gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.chimesdkmessaging.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/chime-sdk-messaging-2021-05-15/DescribeChannelMembershipForAppInstanceUser"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeChannelMembershipForAppInstanceUserRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ARN of the channel to which the user belongs.
* </p>
*/
private String channelArn;
/**
* <p>
* The ARN of the user in a channel.
* </p>
*/
private String appInstanceUserArn;
/**
* <p>
* The <code>AppInstanceUserArn</code> of the user that makes the API call.
* </p>
*/
private String chimeBearer;
/**
* <p>
* The ARN of the channel to which the user belongs.
* </p>
*
* @param channelArn
* The ARN of the channel to which the user belongs.
*/
public void setChannelArn(String channelArn) {
this.channelArn = channelArn;
}
/**
* <p>
* The ARN of the channel to which the user belongs.
* </p>
*
* @return The ARN of the channel to which the user belongs.
*/
public String getChannelArn() {
return this.channelArn;
}
/**
* <p>
* The ARN of the channel to which the user belongs.
* </p>
*
* @param channelArn
* The ARN of the channel to which the user belongs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeChannelMembershipForAppInstanceUserRequest withChannelArn(String channelArn) {
setChannelArn(channelArn);
return this;
}
/**
* <p>
* The ARN of the user in a channel.
* </p>
*
* @param appInstanceUserArn
* The ARN of the user in a channel.
*/
public void setAppInstanceUserArn(String appInstanceUserArn) {
this.appInstanceUserArn = appInstanceUserArn;
}
/**
* <p>
* The ARN of the user in a channel.
* </p>
*
* @return The ARN of the user in a channel.
*/
public String getAppInstanceUserArn() {
return this.appInstanceUserArn;
}
/**
* <p>
* The ARN of the user in a channel.
* </p>
*
* @param appInstanceUserArn
* The ARN of the user in a channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeChannelMembershipForAppInstanceUserRequest withAppInstanceUserArn(String appInstanceUserArn) {
setAppInstanceUserArn(appInstanceUserArn);
return this;
}
/**
* <p>
* The <code>AppInstanceUserArn</code> of the user that makes the API call.
* </p>
*
* @param chimeBearer
* The <code>AppInstanceUserArn</code> of the user that makes the API call.
*/
public void setChimeBearer(String chimeBearer) {
this.chimeBearer = chimeBearer;
}
/**
* <p>
* The <code>AppInstanceUserArn</code> of the user that makes the API call.
* </p>
*
* @return The <code>AppInstanceUserArn</code> of the user that makes the API call.
*/
public String getChimeBearer() {
return this.chimeBearer;
}
/**
* <p>
* The <code>AppInstanceUserArn</code> of the user that makes the API call.
* </p>
*
* @param chimeBearer
* The <code>AppInstanceUserArn</code> of the user that makes the API call.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeChannelMembershipForAppInstanceUserRequest withChimeBearer(String chimeBearer) {
setChimeBearer(chimeBearer);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getChannelArn() != null)
sb.append("ChannelArn: ").append(getChannelArn()).append(",");
if (getAppInstanceUserArn() != null)
sb.append("AppInstanceUserArn: ").append(getAppInstanceUserArn()).append(",");
if (getChimeBearer() != null)
sb.append("ChimeBearer: ").append(getChimeBearer());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeChannelMembershipForAppInstanceUserRequest == false)
return false;
DescribeChannelMembershipForAppInstanceUserRequest other = (DescribeChannelMembershipForAppInstanceUserRequest) obj;
if (other.getChannelArn() == null ^ this.getChannelArn() == null)
return false;
if (other.getChannelArn() != null && other.getChannelArn().equals(this.getChannelArn()) == false)
return false;
if (other.getAppInstanceUserArn() == null ^ this.getAppInstanceUserArn() == null)
return false;
if (other.getAppInstanceUserArn() != null && other.getAppInstanceUserArn().equals(this.getAppInstanceUserArn()) == false)
return false;
if (other.getChimeBearer() == null ^ this.getChimeBearer() == null)
return false;
if (other.getChimeBearer() != null && other.getChimeBearer().equals(this.getChimeBearer()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getChannelArn() == null) ? 0 : getChannelArn().hashCode());
hashCode = prime * hashCode + ((getAppInstanceUserArn() == null) ? 0 : getAppInstanceUserArn().hashCode());
hashCode = prime * hashCode + ((getChimeBearer() == null) ? 0 : getChimeBearer().hashCode());
return hashCode;
}
@Override
public DescribeChannelMembershipForAppInstanceUserRequest clone() {
return (DescribeChannelMembershipForAppInstanceUserRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.datastream;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.Public;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.api.common.operators.ResourceSpec;
import org.apache.flink.api.common.operators.SlotSharingGroup;
import org.apache.flink.api.connector.sink2.Sink;
import org.apache.flink.api.dag.Transformation;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.operators.ChainingStrategy;
import org.apache.flink.streaming.api.operators.StreamSink;
import org.apache.flink.streaming.api.transformations.LegacySinkTransformation;
import org.apache.flink.streaming.api.transformations.PhysicalTransformation;
import org.apache.flink.streaming.api.transformations.SinkTransformation;
import org.apache.flink.streaming.api.transformations.SinkV1Adapter;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* A Stream Sink. This is used for emitting elements from a streaming topology.
*
* @param <T> The type of the elements in the Stream
*/
@Public
public class DataStreamSink<T> {
private final PhysicalTransformation<T> transformation;
protected DataStreamSink(PhysicalTransformation<T> transformation) {
this.transformation = checkNotNull(transformation);
}
static <T> DataStreamSink<T> forSinkFunction(
DataStream<T> inputStream, SinkFunction<T> sinkFunction) {
StreamSink<T> sinkOperator = new StreamSink<>(sinkFunction);
final StreamExecutionEnvironment executionEnvironment =
inputStream.getExecutionEnvironment();
PhysicalTransformation<T> transformation =
new LegacySinkTransformation<>(
inputStream.getTransformation(),
"Unnamed",
sinkOperator,
executionEnvironment.getParallelism());
executionEnvironment.addOperator(transformation);
return new DataStreamSink<>(transformation);
}
@Internal
public static <T> DataStreamSink<T> forSink(DataStream<T> inputStream, Sink<T> sink) {
final StreamExecutionEnvironment executionEnvironment =
inputStream.getExecutionEnvironment();
SinkTransformation<T, T> transformation =
new SinkTransformation<>(
inputStream,
sink,
inputStream.getType(),
"Sink",
executionEnvironment.getParallelism());
executionEnvironment.addOperator(transformation);
return new DataStreamSink<>(transformation);
}
@Internal
public static <T> DataStreamSink<T> forSinkV1(
DataStream<T> inputStream, org.apache.flink.api.connector.sink.Sink<T, ?, ?, ?> sink) {
return forSink(inputStream, SinkV1Adapter.wrap(sink));
}
/** Returns the transformation that contains the actual sink operator of this sink. */
@Internal
public Transformation<T> getTransformation() {
return transformation;
}
@Internal
public LegacySinkTransformation<T> getLegacyTransformation() {
if (transformation instanceof LegacySinkTransformation) {
return (LegacySinkTransformation<T>) transformation;
} else {
throw new IllegalStateException("There is no the LegacySinkTransformation.");
}
}
/**
* Sets the name of this sink. This name is used by the visualization and logging during
* runtime.
*
* @return The named sink.
*/
public DataStreamSink<T> name(String name) {
transformation.setName(name);
return this;
}
/**
* Sets an ID for this operator.
*
* <p>The specified ID is used to assign the same operator ID across job submissions (for
* example when starting a job from a savepoint).
*
* <p><strong>Important</strong>: this ID needs to be unique per transformation and job.
* Otherwise, job submission will fail.
*
* @param uid The unique user-specified ID of this transformation.
* @return The operator with the specified ID.
*/
@PublicEvolving
public DataStreamSink<T> uid(String uid) {
transformation.setUid(uid);
return this;
}
/**
* Sets an user provided hash for this operator. This will be used AS IS the create the
* JobVertexID.
*
* <p>The user provided hash is an alternative to the generated hashes, that is considered when
* identifying an operator through the default hash mechanics fails (e.g. because of changes
* between Flink versions).
*
* <p><strong>Important</strong>: this should be used as a workaround or for trouble shooting.
* The provided hash needs to be unique per transformation and job. Otherwise, job submission
* will fail. Furthermore, you cannot assign user-specified hash to intermediate nodes in an
* operator chain and trying so will let your job fail.
*
* <p>A use case for this is in migration between Flink versions or changing the jobs in a way
* that changes the automatically generated hashes. In this case, providing the previous hashes
* directly through this method (e.g. obtained from old logs) can help to reestablish a lost
* mapping from states to their target operator.
*
* @param uidHash The user provided hash for this operator. This will become the JobVertexID,
* which is shown in the logs and web ui.
* @return The operator with the user provided hash.
*/
@PublicEvolving
public DataStreamSink<T> setUidHash(String uidHash) {
if (!(transformation instanceof LegacySinkTransformation)) {
throw new UnsupportedOperationException(
"Cannot set a custom UID hash on a non-legacy sink");
}
transformation.setUidHash(uidHash);
return this;
}
/**
* Sets the parallelism for this sink. The degree must be higher than zero.
*
* @param parallelism The parallelism for this sink.
* @return The sink with set parallelism.
*/
public DataStreamSink<T> setParallelism(int parallelism) {
transformation.setParallelism(parallelism);
return this;
}
/**
* Sets the description for this sink.
*
* <p>Description is used in json plan and web ui, but not in logging and metrics where only
* name is available. Description is expected to provide detailed information about the sink,
* while name is expected to be more simple, providing summary information only, so that we can
* have more user-friendly logging messages and metric tags without losing useful messages for
* debugging.
*
* @param description The description for this sink.
* @return The sink with new description.
*/
@PublicEvolving
public DataStreamSink<T> setDescription(String description) {
transformation.setDescription(description);
return this;
}
// ---------------------------------------------------------------------------
// Fine-grained resource profiles are an incomplete work-in-progress feature
// The setters are hence private at this point.
// ---------------------------------------------------------------------------
/**
* Sets the minimum and preferred resources for this sink, and the lower and upper resource
* limits will be considered in resource resize feature for future plan.
*
* @param minResources The minimum resources for this sink.
* @param preferredResources The preferred resources for this sink
* @return The sink with set minimum and preferred resources.
*/
private DataStreamSink<T> setResources(
ResourceSpec minResources, ResourceSpec preferredResources) {
transformation.setResources(minResources, preferredResources);
return this;
}
/**
* Sets the resources for this sink, the minimum and preferred resources are the same by
* default.
*
* @param resources The resources for this sink.
* @return The sink with set minimum and preferred resources.
*/
private DataStreamSink<T> setResources(ResourceSpec resources) {
transformation.setResources(resources, resources);
return this;
}
/**
* Turns off chaining for this operator so thread co-location will not be used as an
* optimization.
*
* <p>Chaining can be turned off for the whole job by {@link
* org.apache.flink.streaming.api.environment.StreamExecutionEnvironment#disableOperatorChaining()}
* however it is not advised for performance considerations.
*
* @return The sink with chaining disabled
*/
@PublicEvolving
public DataStreamSink<T> disableChaining() {
this.transformation.setChainingStrategy(ChainingStrategy.NEVER);
return this;
}
/**
* Sets the slot sharing group of this operation. Parallel instances of operations that are in
* the same slot sharing group will be co-located in the same TaskManager slot, if possible.
*
* <p>Operations inherit the slot sharing group of input operations if all input operations are
* in the same slot sharing group and no slot sharing group was explicitly specified.
*
* <p>Initially an operation is in the default slot sharing group. An operation can be put into
* the default group explicitly by setting the slot sharing group to {@code "default"}.
*
* @param slotSharingGroup The slot sharing group name.
*/
@PublicEvolving
public DataStreamSink<T> slotSharingGroup(String slotSharingGroup) {
transformation.setSlotSharingGroup(slotSharingGroup);
return this;
}
/**
* Sets the slot sharing group of this operation. Parallel instances of operations that are in
* the same slot sharing group will be co-located in the same TaskManager slot, if possible.
*
* <p>Operations inherit the slot sharing group of input operations if all input operations are
* in the same slot sharing group and no slot sharing group was explicitly specified.
*
* <p>Initially an operation is in the default slot sharing group. An operation can be put into
* the default group explicitly by setting the slot sharing group with name {@code "default"}.
*
* @param slotSharingGroup which contains name and its resource spec.
*/
@PublicEvolving
public DataStreamSink<T> slotSharingGroup(SlotSharingGroup slotSharingGroup) {
transformation.setSlotSharingGroup(slotSharingGroup);
return this;
}
}
| |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import com.gemstone.gemfire.cache.EntryEvent;
import com.gemstone.gemfire.internal.cache.lru.EnableLRU;
import com.gemstone.gemfire.internal.InternalStatisticsDisabledException;
import com.gemstone.gemfire.internal.cache.lru.LRUClockNode;
import com.gemstone.gemfire.internal.cache.lru.NewLRUClockHand;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.internal.cache.versions.VersionStamp;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
// macros whose definition changes this class:
// disk: DISK
// lru: LRU
// stats: STATS
// versioned: VERSIONED
// offheap: OFFHEAP
// One of the following key macros must be defined:
// key object: KEY_OBJECT
// key int: KEY_INT
// key long: KEY_LONG
// key uuid: KEY_UUID
// key string1: KEY_STRING1
// key string2: KEY_STRING2
/**
* Do not modify this class. It was generated.
* Instead modify LeafRegionEntry.cpp and then run
* bin/generateRegionEntryClasses.sh from the directory
* that contains your build.xml.
*/
public class VersionedStatsLRURegionEntryHeapIntKey extends VersionedStatsLRURegionEntryHeap {
public VersionedStatsLRURegionEntryHeapIntKey (RegionEntryContext context, int key,
Object value
) {
super(context,
value
);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
this.key = key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// common code
protected int hash;
private HashEntry<Object, Object> next;
@SuppressWarnings("unused")
private volatile long lastModified;
private static final AtomicLongFieldUpdater<VersionedStatsLRURegionEntryHeapIntKey> lastModifiedUpdater
= AtomicLongFieldUpdater.newUpdater(VersionedStatsLRURegionEntryHeapIntKey.class, "lastModified");
private volatile Object value;
@Override
protected final Object getValueField() {
return this.value;
}
@Override
protected void setValueField(Object v) {
this.value = v;
}
protected long getlastModifiedField() {
return lastModifiedUpdater.get(this);
}
protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) {
return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue);
}
/**
* @see HashEntry#getEntryHash()
*/
public final int getEntryHash() {
return this.hash;
}
protected void setEntryHash(int v) {
this.hash = v;
}
/**
* @see HashEntry#getNextEntry()
*/
public final HashEntry<Object, Object> getNextEntry() {
return this.next;
}
/**
* @see HashEntry#setNextEntry
*/
public final void setNextEntry(final HashEntry<Object, Object> n) {
this.next = n;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// lru code
@Override
public void setDelayedDiskId(LocalRegion r) {
// nothing needed for LRUs with no disk
}
public final synchronized int updateEntrySize(EnableLRU capacityController) {
return updateEntrySize(capacityController, _getValue()); // OFHEAP: _getValue ok w/o incing refcount because we are synced and only getting the size
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public final synchronized int updateEntrySize(EnableLRU capacityController,
Object value) {
int oldSize = getEntrySize();
int newSize = capacityController.entrySize( getKeyForSizing(), value);
setEntrySize(newSize);
int delta = newSize - oldSize;
// if ( debug ) log( "updateEntrySize key=" + getKey()
// + (_getValue() == Token.INVALID ? " invalid" :
// (_getValue() == Token.LOCAL_INVALID ? "local_invalid" :
// (_getValue()==null ? " evicted" : " valid")))
// + " oldSize=" + oldSize
// + " newSize=" + this.size );
return delta;
}
public final boolean testRecentlyUsed() {
return areAnyBitsSet(RECENTLY_USED);
}
@Override
public final void setRecentlyUsed() {
setBits(RECENTLY_USED);
}
public final void unsetRecentlyUsed() {
clearBits(~RECENTLY_USED);
}
public final boolean testEvicted() {
return areAnyBitsSet(EVICTED);
}
public final void setEvicted() {
setBits(EVICTED);
}
public final void unsetEvicted() {
clearBits(~EVICTED);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private LRUClockNode nextLRU;
private LRUClockNode prevLRU;
private int size;
public final void setNextLRUNode( LRUClockNode next ) {
this.nextLRU = next;
}
public final LRUClockNode nextLRUNode() {
return this.nextLRU;
}
public final void setPrevLRUNode( LRUClockNode prev ) {
this.prevLRU = prev;
}
public final LRUClockNode prevLRUNode() {
return this.prevLRU;
}
public final int getEntrySize() {
return this.size;
}
protected final void setEntrySize(int size) {
this.size = size;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
//@Override
//public StringBuilder appendFieldsToString(final StringBuilder sb) {
// StringBuilder result = super.appendFieldsToString(sb);
// result.append("; prev=").append(this.prevLRU==null?"null":"not null");
// result.append("; next=").append(this.nextLRU==null?"null":"not null");
// return result;
//}
@Override
public Object getKeyForSizing() {
// inline keys always report null for sizing since the size comes from the entry size
return null;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// stats code
@Override
public final void updateStatsForGet(boolean hit, long time)
{
setLastAccessed(time);
if (hit) {
incrementHitCount();
} else {
incrementMissCount();
}
}
@Override
protected final void setLastModified(long lastModified) {
_setLastModified(lastModified);
if (!DISABLE_ACCESS_TIME_UPDATE_ON_PUT) {
setLastAccessed(lastModified);
}
}
private volatile long lastAccessed;
private volatile int hitCount;
private volatile int missCount;
private static final AtomicIntegerFieldUpdater<VersionedStatsLRURegionEntryHeapIntKey> hitCountUpdater
= AtomicIntegerFieldUpdater.newUpdater(VersionedStatsLRURegionEntryHeapIntKey.class, "hitCount");
private static final AtomicIntegerFieldUpdater<VersionedStatsLRURegionEntryHeapIntKey> missCountUpdater
= AtomicIntegerFieldUpdater.newUpdater(VersionedStatsLRURegionEntryHeapIntKey.class, "missCount");
@Override
public final long getLastAccessed() throws InternalStatisticsDisabledException {
return this.lastAccessed;
}
private void setLastAccessed(long lastAccessed) {
this.lastAccessed = lastAccessed;
}
@Override
public final long getHitCount() throws InternalStatisticsDisabledException {
return this.hitCount & 0xFFFFFFFFL;
}
@Override
public final long getMissCount() throws InternalStatisticsDisabledException {
return this.missCount & 0xFFFFFFFFL;
}
private void incrementHitCount() {
hitCountUpdater.incrementAndGet(this);
}
private void incrementMissCount() {
missCountUpdater.incrementAndGet(this);
}
@Override
public final void resetCounts() throws InternalStatisticsDisabledException {
hitCountUpdater.set(this,0);
missCountUpdater.set(this,0);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public final void txDidDestroy(long currTime) {
setLastModified(currTime);
setLastAccessed(currTime);
this.hitCount = 0;
this.missCount = 0;
}
@Override
public boolean hasStats() {
return true;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// versioned code
private VersionSource memberID;
private short entryVersionLowBytes;
private short regionVersionHighBytes;
private int regionVersionLowBytes;
private byte entryVersionHighByte;
private byte distributedSystemId;
public int getEntryVersion() {
return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF);
}
public long getRegionVersion() {
return (((long)regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL);
}
public long getVersionTimeStamp() {
return getLastModified();
}
public void setVersionTimeStamp(long time) {
setLastModified(time);
}
public VersionSource getMemberID() {
return this.memberID;
}
public int getDistributedSystemId() {
return this.distributedSystemId;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public void setVersions(VersionTag tag) {
this.memberID = tag.getMemberID();
int eVersion = tag.getEntryVersion();
this.entryVersionLowBytes = (short)(eVersion & 0xffff);
this.entryVersionHighByte = (byte)((eVersion & 0xff0000) >> 16);
this.regionVersionHighBytes = tag.getRegionVersionHighBytes();
this.regionVersionLowBytes = tag.getRegionVersionLowBytes();
if (!(tag.isGatewayTag()) && this.distributedSystemId == tag.getDistributedSystemId()) {
if (getVersionTimeStamp() <= tag.getVersionTimeStamp()) {
setVersionTimeStamp(tag.getVersionTimeStamp());
} else {
tag.setVersionTimeStamp(getVersionTimeStamp());
}
} else {
setVersionTimeStamp(tag.getVersionTimeStamp());
}
this.distributedSystemId = (byte)(tag.getDistributedSystemId() & 0xff);
}
public void setMemberID(VersionSource memberID) {
this.memberID = memberID;
}
@Override
public VersionStamp getVersionStamp() {
return this;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public VersionTag asVersionTag() {
VersionTag tag = VersionTag.create(memberID);
tag.setEntryVersion(getEntryVersion());
tag.setRegionVersion(this.regionVersionHighBytes, this.regionVersionLowBytes);
tag.setVersionTimeStamp(getVersionTimeStamp());
tag.setDistributedSystemId(this.distributedSystemId);
return tag;
}
public void processVersionTag(LocalRegion r, VersionTag tag,
boolean isTombstoneFromGII, boolean hasDelta,
VersionSource thisVM, InternalDistributedMember sender, boolean checkForConflicts) {
basicProcessVersionTag(r, tag, isTombstoneFromGII, hasDelta, thisVM, sender, checkForConflicts);
}
@Override
public void processVersionTag(EntryEvent cacheEvent) {
// this keeps Eclipse happy. without it the sender chain becomes confused
// while browsing this code
super.processVersionTag(cacheEvent);
}
/** get rvv internal high byte. Used by region entries for transferring to storage */
public short getRegionVersionHighBytes() {
return this.regionVersionHighBytes;
}
/** get rvv internal low bytes. Used by region entries for transferring to storage */
public int getRegionVersionLowBytes() {
return this.regionVersionLowBytes;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// key code
private final int key;
@Override
public final Object getKey() {
return this.key;
}
@Override
public boolean isKeyEqual(Object k) {
if (k instanceof Integer) {
return ((Integer) k).intValue() == this.key;
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
package com.sachin.filemanager.fragments;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.Snackbar;
import android.support.design.widget.TextInputLayout;
import android.support.v4.app.DialogFragment;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.EditText;
import com.sachin.filemanager.R;
import com.sachin.filemanager.activities.MainActivity;
import com.sachin.filemanager.utils.FileListSorter;
import com.sachin.filemanager.utils.FileManagerUtils;
import com.sachin.filemanager.utils.FileUtil;
import com.sachin.filemanager.utils.MainActivityHelper;
import java.io.File;
public class MainDialogFragment extends DialogFragment {
public static final int PICK_SD_PATH_CODE = 122;
public static final int CREATE_FOLDER_DIALOG = 1;
public static final int SORT_TYPE_DIALOG = 2;
public static final int SAF_TYPE_DIALOG = 3;
private File file;
private int sortType;
public static MainDialogFragment newInstance(int dialogType) {
MainDialogFragment fragment = new MainDialogFragment();
Bundle args = new Bundle();
args.putInt("type", dialogType);
fragment.setArguments(args);
return fragment;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
int type = getArguments().getInt("type");
switch (type) {
case CREATE_FOLDER_DIALOG:
return makeCreateFolderDialog();
case SORT_TYPE_DIALOG:
return makeSortTypeDialog();
case SAF_TYPE_DIALOG:
return makeSAFDialog();
default:
return super.onCreateDialog(savedInstanceState);
}
}
private Dialog makeSAFDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle("Write Access Required");
final View view = View.inflate(getActivity(), R.layout.saf_dialog_layout, null);
builder.setView(view);
builder.setPositiveButton("Open", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Intent intent = new Intent();
intent.setAction(Intent.ACTION_OPEN_DOCUMENT_TREE);
intent.addCategory(Intent.CATEGORY_DEFAULT);
getActivity().startActivityForResult(intent, PICK_SD_PATH_CODE);
}
});
builder.setNegativeButton(getString(android.R.string.cancel), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismiss();
}
});
return builder.create();
}
private Dialog makeSortTypeDialog() {
AlertDialog.Builder dialog = new AlertDialog.Builder(getActivity());
sortType = FileManagerUtils.getInstance().getSortType();
final String[] sortItems = getActivity().getResources().getStringArray(R.array.sort_types);
dialog.setSingleChoiceItems(sortItems, sortType, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
sortType = i;
}
});
dialog.setPositiveButton("Ascending", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
FileManagerUtils fileManagerUtils = FileManagerUtils.getInstance();
if (fileManagerUtils.getSortType() != sortType || fileManagerUtils.getAscending() != FileListSorter.SORT_ASCENDING)
((MainActivity) getActivity()).getHelper().updateSortSettings(sortType, FileListSorter.SORT_ASCENDING);
dismiss();
}
});
dialog.setNegativeButton("Descending", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
FileManagerUtils fileManagerUtils = FileManagerUtils.getInstance();
if (fileManagerUtils.getSortType() != sortType || fileManagerUtils.getAscending() != FileListSorter.SORT_DESCENDING)
((MainActivity) getActivity()).getHelper().updateSortSettings(sortType, FileListSorter.SORT_DESCENDING);
dismiss();
}
});
AlertDialog alertDialog = dialog.create();
return alertDialog;
}
private Dialog makeCreateFolderDialog() {
final FileManagerUtils managerUtils = FileManagerUtils.getInstance();
final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
final View view = View.inflate(getActivity(), R.layout.input_layout, null);
builder.setView(view);
builder.setTitle(R.string.myfiles_create_folder_dialog_title);
final EditText editText = (EditText) view.findViewById(R.id.input_field);
final TextInputLayout textInputLayout = (TextInputLayout) view.findViewById(R.id.textInput);
builder.setPositiveButton("Create", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
File target = new File(managerUtils.getCurrentDirectory(), editText.getText().toString());
//boolean result = managerUtils.createDir(managerUtils.getCurrentDirectory(), editText.getText().toString());
boolean result = FileUtil.mkdir(target);
MainActivityHelper helper = ((MainActivity) getActivity()).getHelper();
View snackView = helper.getRecyclerView();
if (result) {
Snackbar.make(snackView, editText.getText() + " created successfully!", Snackbar.LENGTH_SHORT).show();
helper.updateDirectory(managerUtils.getNextDirectory(managerUtils.getCurrentDirectory(), true));
} else
Snackbar.make(snackView, "Can't create folder!", Snackbar.LENGTH_SHORT).show();
dialog.dismiss();
}
});
builder.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
dialogInterface.dismiss();
}
});
builder.setCancelable(false);
final AlertDialog dialog = builder.create();
dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE);
editText.getText().append("New Folder");
editText.selectAll();
editText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
boolean valid = FileUtil.isValidName(s.toString());
Button positive = dialog.getButton(AlertDialog.BUTTON_POSITIVE);
boolean enabled = positive.isEnabled();
if (s.toString().isEmpty() && enabled)
positive.setEnabled(false);
if (!valid) {
textInputLayout.setError("This name is not valid");
if (enabled)
positive.setEnabled(false);
} else {
if (!enabled)
positive.setEnabled(true);
if (textInputLayout.isErrorEnabled())
textInputLayout.setErrorEnabled(false);
}
}
@Override
public void afterTextChanged(Editable s) {
}
});
editText.requestFocus();
return dialog;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.resource;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.compute.ComputeJob;
import org.apache.ignite.compute.ComputeJobAdapter;
import org.apache.ignite.compute.ComputeJobResult;
import org.apache.ignite.compute.ComputeTaskSplitAdapter;
import org.apache.ignite.lang.IgniteOutClosure;
import org.apache.ignite.lang.IgniteRunnable;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.resources.LoggerResource;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.GridTestKernalContext;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
import org.springframework.util.Assert;
/**
* Unit tests for grid resource processor.
*/
@GridCommonTest(group = "Resource Self")
public class GridResourceProcessorSelfTest extends GridCommonAbstractTest {
/** */
private GridTestKernalContext ctx;
/** */
public GridResourceProcessorSelfTest() {
super(/*start grid*/false);
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
ctx = newContext();
ctx.add(new GridResourceProcessor(ctx));
ctx.start();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
ctx.stop(true);
}
/** */
@Target({ElementType.METHOD, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
private static @interface TestAnnotation {
// No-op.
}
/** */
@Target({ElementType.METHOD, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
private static @interface TestAnnotation2 {
// No-op.
}
/** */
@Target({ElementType.METHOD, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
private static @interface TestAnnotation3 {
// No-op.
}
/** */
private static class TestClassWithAnnotatedField {
/** */
@TestAnnotation
private String str;
/**
* @return Value of the field.
*/
public String getStr() {
return str;
}
/**
* @param str New value.
*/
public void setStr(String str) {
this.str = str;
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testInjectResourceToAnnotatedField() throws Exception {
TestClassWithAnnotatedField target = new TestClassWithAnnotatedField();
String testStr = Long.toString(System.currentTimeMillis());
ctx.resource().injectBasicResource(target, TestAnnotation.class, testStr);
assertEquals(testStr, target.str);
ctx.resource().injectBasicResource(target, TestAnnotation2.class, "Some another string.");
// Value should not be updated.
assertEquals(testStr, target.str);
}
/** */
private static class TestClassWithAnnotatedMethod {
/** */
private String str;
/**
* @param str New value of the field.
*/
@TestAnnotation
void setStr(String str) {
this.str = str;
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testInjectResourceToAnnotatedMethod() throws Exception {
TestClassWithAnnotatedMethod target = new TestClassWithAnnotatedMethod();
String testStr = Long.toString(System.currentTimeMillis());
ctx.resource().injectBasicResource(target, TestAnnotation.class, testStr);
assertEquals(testStr, target.str);
ctx.resource().injectBasicResource(target, TestAnnotation2.class, "Some another string.");
// Value should not be updated.
assertEquals(testStr, target.str);
}
/** */
private static class TestClassWithAnnotationsOuter {
/** */
@TestAnnotation
private String str1;
/** */
@TestAnnotation
private String str2;
/** */
@TestAnnotation3
private String str7;
/**
* @param str1 New value.
*/
@TestAnnotation2
public void setValue1(String str1) {
this.str1 = str1;
}
/**
* @param str2 New value.
*/
@TestAnnotation2
public void setValue2(String str2) {
this.str2 = str2;
}
/** */
private class TestClassWithAnnotationsInner {
/** */
@TestAnnotation
private String str3;
/** */
@TestAnnotation
private String str4;
/**
* @param str3 New value.
*/
@TestAnnotation2
public void setValue3(String str3) { this.str3 = str3; }
/**
* @param str4 New value.
*/
@TestAnnotation2
public void setValue4(String str4) { this.str4 = str4; }
/** */
private class TestClassWithAnnotationsDeep {
/** */
@TestAnnotation
private String str5;
/** */
@TestAnnotation
private String str6;
/** */
private Callable<String> c = new Callable<String>() {
@TestAnnotation
private String cStr;
private Runnable r = new Runnable() {
@TestAnnotation
private String rStr;
@Override public void run() {
assert cStr != null;
assertEquals(cStr, rStr);
}
};
@Override public String call() throws Exception {
assert str5 != null;
assertEquals(str5, cStr);
r.run();
return cStr;
}
};
/**
* @param str5 New value.
*/
@TestAnnotation2
public void setValue5(String str5) { this.str5 = str5; }
/**
* @param str6 New value.
*/
@TestAnnotation2
public void setValue6(String str6) { this.str6 = str6; }
}
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testInjectResourceInnerClasses() throws Exception {
// Test fields.
TestClassWithAnnotationsOuter outer = new TestClassWithAnnotationsOuter();
TestClassWithAnnotationsOuter.TestClassWithAnnotationsInner inner = outer.new TestClassWithAnnotationsInner();
TestClassWithAnnotationsOuter.TestClassWithAnnotationsInner.TestClassWithAnnotationsDeep deep =
inner.new TestClassWithAnnotationsDeep();
String testStr = Long.toString(System.currentTimeMillis());
ctx.resource().injectBasicResource(deep, TestAnnotation.class, testStr);
assertEquals(testStr, outer.str1);
assertEquals(testStr, outer.str2);
assertEquals(testStr, inner.str3);
assertEquals(testStr, inner.str4);
assertEquals(testStr, deep.str5);
assertEquals(testStr, deep.str6);
// Check if all resources have been injected to nested callable and runnable.
deep.c.call();
// Test methods.
outer = new TestClassWithAnnotationsOuter();
inner = outer.new TestClassWithAnnotationsInner();
deep = inner.new TestClassWithAnnotationsDeep();
ctx.resource().injectBasicResource(deep, TestAnnotation2.class, testStr);
assertEquals(testStr, outer.str1);
assertEquals(testStr, outer.str2);
assertEquals(testStr, inner.str3);
assertEquals(testStr, inner.str4);
assertEquals(testStr, deep.str5);
assertEquals(testStr, deep.str6);
assertNull(outer.str7);
ctx.resource().injectBasicResource(deep, TestAnnotation3.class, testStr);
assertEquals(testStr, outer.str7);
}
/**
* Test task.
*/
@SuppressWarnings({"PublicInnerClass"})
public static class TestTask extends ComputeTaskSplitAdapter<Object, Object> {
/** */
@LoggerResource
private IgniteLogger taskLog;
/**
* @return Task resource.
*/
public IgniteLogger getTaskLog() {
return taskLog;
}
/**
* Creates a single job.
*
* @param gridSize Grid size.
* @param arg Task argument.
*/
@Override protected Collection<? extends ComputeJob> split(int gridSize, Object arg) {
assert taskLog != null;
final IgniteOutClosure<Object> callable = new IgniteOutClosure<Object>() {
/** Should be injected despite this is a {@link Callable} instance nested in a job. */
@IgniteInstanceResource
private Ignite grid;
/** Runnable object nested inside callable. */
private Runnable run = new IgniteRunnable() {
@IgniteInstanceResource
private Ignite ignite;
@Override public void run() {
assert ignite != null;
assert ignite.configuration() != null;
assert ignite.configuration().getIgniteHome() != null;
}
};
@Override public Object apply() {
assert grid != null;
run.run();
return new Object();
}
};
return Collections.singleton(new ComputeJobAdapter() {
@Nullable @Override public Object execute() {
return callable.apply();
}
});
}
/** {@inheritDoc} */
@Override public Object reduce(List<ComputeJobResult> results) {
assert results.size() == 1;
return results.get(0).getData();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testInjectResourceGridTaskAndJob() throws Exception {
Ignite g = startGrid();
try {
// Should not be null if task has been completed successfully (meaning all resources have been injected).
Assert.notNull(g.compute().execute(TestTask.class, null));
}
finally {
stopGrid();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testInjectResourcePerformance() throws Exception {
int injNum = 50000;
long start = System.currentTimeMillis();
TestClassWithAnnotatedField target = new TestClassWithAnnotatedField();
for (int i = 0; i < injNum; i++)
ctx.resource().injectBasicResource(target, TestAnnotation.class, "Test string.");
long duration = System.currentTimeMillis() - start;
info("Resource injection takes " + ((double)duration / injNum) + " msec per target object.");
}
/**
* @throws Exception If failed.
*/
@SuppressWarnings("TooBroadScope")
@Test
public void testInjectResourceMultiThreaded() throws Exception {
final int threadsCnt = 100;
final int iters = 2000000;
ctx = newContext();
ctx.add(new GridResourceProcessor(ctx));
ctx.start();
try {
GridTestUtils.runMultiThreaded(new Runnable() {
@Override public void run() {
try {
Test1 obj = new Test1();
long start = System.currentTimeMillis();
for (int i = 0; i < iters; i++)
ctx.resource().injectBasicResource(obj, TestAnnotation1.class, "value");
long duration = (System.currentTimeMillis() - start);
float avgInjectTime = Math.round(1000.0f * duration / iters) / 1000.0f;
info("Finished load test [avgInjectTime=" + avgInjectTime +
"ms, duration=" + duration + "ms, count=" + iters + ']');
}
catch (IgniteCheckedException e) {
fail("Failed to inject resources: " + e.getMessage());
}
}
}, threadsCnt, "grid-ioc-test");
}
finally {
ctx.stop(true);
}
}
/**
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.FIELD})
private @interface TestAnnotation1 {
// No-op.
}
/**
*
*/
private static final class Test1 {
/** */
@SuppressWarnings({"unused"})
@TestAnnotation1
private String val1;
}
}
| |
package snisi.entities;
import java.util.Vector;
import snisi.entities.EntityHashTable;
/**
* List of static codes and names for Entities/Locations
* Automatically generated.
*/
public class EntityHashTableDAG4 extends EntityHashTable {
public EntityHashTableDAG4() {
this.code = "DAG4";
this.name = "YELIMANE";
this.children = new Vector();
EntityHashTable hXCE8 = new EntityHashTable("XCE8", "YELIMANE CSREF");
this.children.addElement(hXCE8);
EntityHashTable hKRY2 = new EntityHashTable("KRY2", "BANDIOUGOULA");
EntityHashTable v17029001 = new EntityHashTable("17029001", "BANDIOUGOULA");
hKRY2.children.addElement(v17029001);
EntityHashTable v17029148 = new EntityHashTable("17029148", "DJEMAEL");
hKRY2.children.addElement(v17029148);
EntityHashTable v17029540 = new EntityHashTable("17029540", "LAWORNATT");
hKRY2.children.addElement(v17029540);
EntityHashTable v17029665 = new EntityHashTable("17029665", "MADINA");
hKRY2.children.addElement(v17029665);
this.children.addElement(hKRY2);
EntityHashTable hDGD9 = new EntityHashTable("DGD9", "BILADJIMI");
EntityHashTable v17022001 = new EntityHashTable("17022001", "BILADJIMI");
hDGD9.children.addElement(v17022001);
EntityHashTable v17022694 = new EntityHashTable("17022694", "SABOUCIRE");
hDGD9.children.addElement(v17022694);
EntityHashTable v17022892 = new EntityHashTable("17022892", "TAKOUTALA");
hDGD9.children.addElement(v17022892);
EntityHashTable v17022033 = new EntityHashTable("17022033", "BIDADJI");
hDGD9.children.addElement(v17022033);
EntityHashTable v17022812 = new EntityHashTable("17022812", "GUEMOU KASSE");
hDGD9.children.addElement(v17022812);
this.children.addElement(hDGD9);
EntityHashTable hP5P2 = new EntityHashTable("P5P2", "DIALAKA");
EntityHashTable v17071250 = new EntityHashTable("17071250", "DIALAKA");
hP5P2.children.addElement(v17071250);
this.children.addElement(hP5P2);
EntityHashTable hSF50 = new EntityHashTable("SF50", "DIONCOULANE");
EntityHashTable v17029197 = new EntityHashTable("17029197", "DIONCOULANE");
hSF50.children.addElement(v17029197);
EntityHashTable v17029198 = new EntityHashTable("17029198", "FADJIGUILA");
hSF50.children.addElement(v17029198);
EntityHashTable v17029589 = new EntityHashTable("17029589", "MADINA GORBEL");
hSF50.children.addElement(v17029589);
EntityHashTable v17029071 = new EntityHashTable("17029071", "FONGOU");
hSF50.children.addElement(v17029071);
EntityHashTable v17029532 = new EntityHashTable("17029532", "DARSALAM");
hSF50.children.addElement(v17029532);
this.children.addElement(hSF50);
EntityHashTable hTBF0 = new EntityHashTable("TBF0", "DIONGAGA");
EntityHashTable v17001249 = new EntityHashTable("17001249", "GUINANGOUROU");
hTBF0.children.addElement(v17001249);
EntityHashTable v17001373 = new EntityHashTable("17001373", "KARDIDI");
hTBF0.children.addElement(v17001373);
EntityHashTable v17001497 = new EntityHashTable("17001497", "NIAGNELA");
hTBF0.children.addElement(v17001497);
EntityHashTable v17001621 = new EntityHashTable("17001621", "SALAKA");
hTBF0.children.addElement(v17001621);
EntityHashTable v17001745 = new EntityHashTable("17001745", "SORFA BERELA");
hTBF0.children.addElement(v17001745);
EntityHashTable v17001869 = new EntityHashTable("17001869", "TAHANA A DIAM");
hTBF0.children.addElement(v17001869);
EntityHashTable v17001001 = new EntityHashTable("17001001", "DIONGAGA");
hTBF0.children.addElement(v17001001);
EntityHashTable v17001125 = new EntityHashTable("17001125", "GUEMOU");
hTBF0.children.addElement(v17001125);
this.children.addElement(hTBF0);
EntityHashTable hYS55 = new EntityHashTable("YS55", "DOGOFRI");
EntityHashTable v17057001 = new EntityHashTable("17057001", "DIABOUGOU");
hYS55.children.addElement(v17057001);
EntityHashTable v17057334 = new EntityHashTable("17057334", "DIAKADROMOU");
hYS55.children.addElement(v17057334);
EntityHashTable v17057667 = new EntityHashTable("17057667", "DOGOFIRY");
hYS55.children.addElement(v17057667);
this.children.addElement(hYS55);
EntityHashTable h3Y28 = new EntityHashTable("3Y28", "FANGA");
EntityHashTable v17015001 = new EntityHashTable("17015001", "DIARIKA");
h3Y28.children.addElement(v17015001);
EntityHashTable v17015250 = new EntityHashTable("17015250", "DJENGUERE");
h3Y28.children.addElement(v17015250);
EntityHashTable v17015499 = new EntityHashTable("17015499", "FANGA");
h3Y28.children.addElement(v17015499);
EntityHashTable v17015748 = new EntityHashTable("17015748", "TANGO");
h3Y28.children.addElement(v17015748);
this.children.addElement(h3Y28);
EntityHashTable hGJK4 = new EntityHashTable("GJK4", "GORY");
EntityHashTable v17022397 = new EntityHashTable("17022397", "GORY");
hGJK4.children.addElement(v17022397);
this.children.addElement(hGJK4);
EntityHashTable h46H8 = new EntityHashTable("46H8", "HAMDALLAYE YELIMANE");
EntityHashTable v17036199 = new EntityHashTable("17036199", "HAMDALAYE");
h46H8.children.addElement(v17036199);
EntityHashTable v17036397 = new EntityHashTable("17036397", "KIDENDJI");
h46H8.children.addElement(v17036397);
EntityHashTable v17036100 = new EntityHashTable("17036100", "FANCOURA");
h46H8.children.addElement(v17036100);
this.children.addElement(h46H8);
EntityHashTable h28D7 = new EntityHashTable("28D7", "KANGUESSANOU");
EntityHashTable v17029295 = new EntityHashTable("17029295", "GAWA");
h28D7.children.addElement(v17029295);
EntityHashTable v17029344 = new EntityHashTable("17029344", "GNINENGOMA");
h28D7.children.addElement(v17029344);
EntityHashTable v17029442 = new EntityHashTable("17029442", "KANGUESSANOU");
h28D7.children.addElement(v17029442);
EntityHashTable v17029311 = new EntityHashTable("17029311", "MANGORO");
h28D7.children.addElement(v17029311);
this.children.addElement(h28D7);
EntityHashTable hSCE9 = new EntityHashTable("SCE9", "KERSIGNANE");
EntityHashTable v17043001 = new EntityHashTable("17043001", "BEDIARA");
hSCE9.children.addElement(v17043001);
EntityHashTable v17043334 = new EntityHashTable("17043334", "KERSIGNANE");
hSCE9.children.addElement(v17043334);
EntityHashTable v17043667 = new EntityHashTable("17043667", "KOMODINDE");
hSCE9.children.addElement(v17043667);
this.children.addElement(hSCE9);
EntityHashTable hZ4H2 = new EntityHashTable("Z4H2", "KERSIGNANE KANIAGA");
EntityHashTable v17050520 = new EntityHashTable("17050520", "KERSIGNANE");
hZ4H2.children.addElement(v17050520);
EntityHashTable v17050766 = new EntityHashTable("17050766", "BOUGOUDERE");
hZ4H2.children.addElement(v17050766);
EntityHashTable v17050950 = new EntityHashTable("17050950", "MANTHIA");
hZ4H2.children.addElement(v17050950);
this.children.addElement(hZ4H2);
EntityHashTable h93T7 = new EntityHashTable("93T7", "KIRANE");
EntityHashTable v17036496 = new EntityHashTable("17036496", "KIRANE");
h93T7.children.addElement(v17036496);
EntityHashTable v17036595 = new EntityHashTable("17036595", "KOROMPO");
h93T7.children.addElement(v17036595);
this.children.addElement(h93T7);
EntityHashTable hR9N8 = new EntityHashTable("R9N8", "KODJE");
EntityHashTable v17029491 = new EntityHashTable("17029491", "KODIE");
hR9N8.children.addElement(v17029491);
this.children.addElement(hR9N8);
EntityHashTable h4KB8 = new EntityHashTable("4KB8", "KREMIS");
EntityHashTable v17050001 = new EntityHashTable("17050001", "KAKOULOU");
h4KB8.children.addElement(v17050001);
EntityHashTable v17050200 = new EntityHashTable("17050200", "KREMIS");
h4KB8.children.addElement(v17050200);
EntityHashTable v17050399 = new EntityHashTable("17050399", "SENEWALY-DIALLOUBE");
h4KB8.children.addElement(v17050399);
EntityHashTable v17050598 = new EntityHashTable("17050598", "SENEWALY-TORDONABE");
h4KB8.children.addElement(v17050598);
EntityHashTable v17050797 = new EntityHashTable("17050797", "SENEWALY TASSARNABE");
h4KB8.children.addElement(v17050797);
this.children.addElement(h4KB8);
EntityHashTable hRPN5 = new EntityHashTable("RPN5", "LAKANGUEMOU");
EntityHashTable v17036694 = new EntityHashTable("17036694", "LAKANGUEMOU");
hRPN5.children.addElement(v17036694);
this.children.addElement(hRPN5);
EntityHashTable h5MJ4 = new EntityHashTable("5MJ4", "LAMBATRA");
EntityHashTable v17071499 = new EntityHashTable("17071499", "LAMBATARA");
h5MJ4.children.addElement(v17071499);
this.children.addElement(h5MJ4);
EntityHashTable h7NW0 = new EntityHashTable("7NW0", "MARENA TRINGA");
EntityHashTable v17071001 = new EntityHashTable("17071001", "DIAKONE");
h7NW0.children.addElement(v17071001);
EntityHashTable v17071748 = new EntityHashTable("17071748", "MARENA");
h7NW0.children.addElement(v17071748);
this.children.addElement(h7NW0);
EntityHashTable h57T4 = new EntityHashTable("57T4", "NOGOMERA");
EntityHashTable v17029687 = new EntityHashTable("17029687", "NIOGOMERA");
h57T4.children.addElement(v17029687);
EntityHashTable v17029688 = new EntityHashTable("17029688", "KARAMA");
h57T4.children.addElement(v17029688);
this.children.addElement(h57T4);
EntityHashTable hJNF0 = new EntityHashTable("JNF0", "OUOLOGUELA");
EntityHashTable v17008001 = new EntityHashTable("17008001", "BANGASSI");
hJNF0.children.addElement(v17008001);
EntityHashTable v17008229 = new EntityHashTable("17008229", "GUIFFI");
hJNF0.children.addElement(v17008229);
EntityHashTable v17008609 = new EntityHashTable("17008609", "MOUNIA");
hJNF0.children.addElement(v17008609);
EntityHashTable v17008685 = new EntityHashTable("17008685", "OULEGUELA");
hJNF0.children.addElement(v17008685);
EntityHashTable v17008761 = new EntityHashTable("17008761", "SAKARADJI");
hJNF0.children.addElement(v17008761);
this.children.addElement(hJNF0);
EntityHashTable hSPF6 = new EntityHashTable("SPF6", "SAMBAGA");
EntityHashTable v17008837 = new EntityHashTable("17008837", "SAMBAGA");
hSPF6.children.addElement(v17008837);
this.children.addElement(hSPF6);
EntityHashTable hZGY0 = new EntityHashTable("ZGY0", "TAKABA");
EntityHashTable v17064001 = new EntityHashTable("17064001", "BOUGOUNDIA");
hZGY0.children.addElement(v17064001);
EntityHashTable v17064200 = new EntityHashTable("17064200", "NIAGNELA");
hZGY0.children.addElement(v17064200);
EntityHashTable v17064399 = new EntityHashTable("17064399", "NIAKATELA");
hZGY0.children.addElement(v17064399);
EntityHashTable v17064598 = new EntityHashTable("17064598", "TAGADONGA");
hZGY0.children.addElement(v17064598);
EntityHashTable v17064797 = new EntityHashTable("17064797", "TAKABA");
hZGY0.children.addElement(v17064797);
this.children.addElement(hZGY0);
EntityHashTable hAFW6 = new EntityHashTable("AFW6", "TAMBACARA");
EntityHashTable v17008077 = new EntityHashTable("17008077", "GHAKE FILY");
hAFW6.children.addElement(v17008077);
EntityHashTable v17008153 = new EntityHashTable("17008153", "GUIDEOURA");
hAFW6.children.addElement(v17008153);
EntityHashTable v17008305 = new EntityHashTable("17008305", "KOMEOULOU");
hAFW6.children.addElement(v17008305);
EntityHashTable v17008457 = new EntityHashTable("17008457", "LEE SARAKOLE");
hAFW6.children.addElement(v17008457);
EntityHashTable v17008913 = new EntityHashTable("17008913", "TAMBACARA");
hAFW6.children.addElement(v17008913);
EntityHashTable v17008409 = new EntityHashTable("17008409", "LEE AHMET DIALLO");
hAFW6.children.addElement(v17008409);
this.children.addElement(hAFW6);
EntityHashTable hZAX6 = new EntityHashTable("ZAX6", "YAGUINE");
EntityHashTable v17091748 = new EntityHashTable("17091748", "YAGUINE");
hZAX6.children.addElement(v17091748);
EntityHashTable v17091606 = new EntityHashTable("17091606", "SAMBACANOU");
hZAX6.children.addElement(v17091606);
this.children.addElement(hZAX6);
EntityHashTable h3MD6 = new EntityHashTable("3MD6", "YELIMANE CENTRAL");
EntityHashTable v17029999 = new EntityHashTable("17029999", "GUEMOU-NEUF");
h3MD6.children.addElement(v17029999);
EntityHashTable v17029050 = new EntityHashTable("17029050", "DIABAGUELA");
h3MD6.children.addElement(v17029050);
EntityHashTable v17029099 = new EntityHashTable("17029099", "DIADJIE");
h3MD6.children.addElement(v17029099);
EntityHashTable v17029638 = new EntityHashTable("17029638", "MAKANA");
h3MD6.children.addElement(v17029638);
EntityHashTable v17029736 = new EntityHashTable("17029736", "TOPOKANE");
h3MD6.children.addElement(v17029736);
EntityHashTable v17029785 = new EntityHashTable("17029785", "YAGUINE-BANDA");
h3MD6.children.addElement(v17029785);
EntityHashTable v17029834 = new EntityHashTable("17029834", "YARKA");
h3MD6.children.addElement(v17029834);
EntityHashTable v17029883 = new EntityHashTable("17029883", "YELIMANE");
h3MD6.children.addElement(v17029883);
EntityHashTable v17029932 = new EntityHashTable("17029932", "YELIMANE-SEBE");
h3MD6.children.addElement(v17029932);
EntityHashTable v17029450 = new EntityHashTable("17029450", "BOUBARA");
h3MD6.children.addElement(v17029450);
EntityHashTable v17029337 = new EntityHashTable("17029337", "GORY BANDA");
h3MD6.children.addElement(v17029337);
this.children.addElement(h3MD6);
EntityHashTable h2N97 = new EntityHashTable("2N97", "WAIKANOU");
EntityHashTable v17036892 = new EntityHashTable("17036892", "WAIKANOU");
h2N97.children.addElement(v17036892);
EntityHashTable v17036897 = new EntityHashTable("17036897", "SABOUCIRE");
h2N97.children.addElement(v17036897);
this.children.addElement(h2N97);
EntityHashTable hJZZ4 = new EntityHashTable("JZZ4", "MOUSSALA");
EntityHashTable v17071252 = new EntityHashTable("17071252", "MOUSSALA");
hJZZ4.children.addElement(v17071252);
this.children.addElement(hJZZ4);
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.datastore;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import lombok.AllArgsConstructor;
import org.hisp.dhis.common.IllegalQueryException;
import org.hisp.dhis.datastore.DatastoreNamespaceProtection.ProtectionType;
import org.hisp.dhis.render.RenderService;
import org.hisp.dhis.security.acl.AclService;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.User;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @author Stian Sandvold (initial)
* @author Jan Bernitt (namespace protection)
*/
@AllArgsConstructor
@Service
public class DefaultDatastoreService
implements DatastoreService
{
private final Map<String, DatastoreNamespaceProtection> protectionByNamespace = new ConcurrentHashMap<>();
private final DatastoreStore store;
private final CurrentUserService currentUserService;
private final AclService aclService;
private final RenderService renderService;
@Override
public void addProtection( DatastoreNamespaceProtection protection )
{
protectionByNamespace.put( protection.getNamespace(), protection );
}
@Override
public void removeProtection( String namespace )
{
protectionByNamespace.remove( namespace );
}
@Override
@Transactional( readOnly = true )
public List<String> getNamespaces()
{
return store.getNamespaces().stream().filter( this::isNamespaceVisible ).collect( toList() );
}
@Override
@Transactional( readOnly = true )
public boolean isUsedNamespace( String namespace )
{
return readProtectedIn( namespace, false,
() -> store.countKeysInNamespace( namespace ) > 0 );
}
@Override
@Transactional( readOnly = true )
public List<String> getKeysInNamespace( String namespace, Date lastUpdated )
{
return readProtectedIn( namespace, emptyList(),
() -> store.getKeysInNamespace( namespace, lastUpdated ) );
}
@Override
@Transactional( readOnly = true )
public <T> T getFields( DatastoreQuery query, Function<Stream<DatastoreFields>, T> transform )
{
DatastoreQueryValidator.validate( query );
return readProtectedIn( query.getNamespace(), null,
() -> store.getFields( query, transform ) );
}
@Override
public DatastoreQuery plan( DatastoreQuery query )
throws IllegalQueryException
{
DatastoreQueryValidator.validate( query );
return query;
}
@Override
@Transactional( readOnly = true )
public DatastoreEntry getEntry( String namespace, String key )
{
return readProtectedIn( namespace, null,
() -> store.getEntry( namespace, key ) );
}
@Override
@Transactional
public void addEntry( DatastoreEntry entry )
{
if ( getEntry( entry.getNamespace(), entry.getKey() ) != null )
{
throw new IllegalStateException( String.format(
"Key '%s' already exists in namespace '%s'", entry.getKey(), entry.getNamespace() ) );
}
validateEntry( entry );
writeProtectedIn( entry.getNamespace(),
() -> singletonList( entry ),
() -> store.save( entry ) );
}
@Override
@Transactional
public void updateEntry( DatastoreEntry entry )
{
validateEntry( entry );
writeProtectedIn( entry.getNamespace(),
() -> singletonList( entry ),
() -> store.update( entry ) );
}
@Override
@Transactional
public void saveOrUpdateEntry( DatastoreEntry entry )
{
validateEntry( entry );
DatastoreEntry existing = getEntry( entry.getNamespace(), entry.getKey() );
if ( existing != null )
{
existing.setValue( entry.getValue() );
writeProtectedIn( entry.getNamespace(),
() -> singletonList( existing ),
() -> store.update( existing ) );
}
else
{
writeProtectedIn( entry.getNamespace(),
() -> singletonList( entry ),
() -> store.save( entry ) );
}
}
@Override
@Transactional
public void deleteNamespace( String namespace )
{
writeProtectedIn( namespace,
() -> store.getEntryByNamespace( namespace ),
() -> store.deleteNamespace( namespace ) );
}
@Override
@Transactional
public void deleteEntry( DatastoreEntry entry )
{
writeProtectedIn( entry.getNamespace(),
() -> singletonList( entry ),
() -> store.delete( entry ) );
}
private <T> T readProtectedIn( String namespace, T whenHidden, Supplier<T> read )
{
DatastoreNamespaceProtection protection = protectionByNamespace.get( namespace );
if ( protection == null
|| protection.getReads() == ProtectionType.NONE
|| currentUserHasAuthority( protection.getAuthorities() ) )
{
T res = read.get();
if ( res instanceof DatastoreEntry && protection != null && protection.isSharingRespected() )
{
DatastoreEntry entry = (DatastoreEntry) res;
if ( !aclService.canRead( currentUserService.getCurrentUser(), entry ) )
{
throw new AccessDeniedException( String.format(
"Access denied for key '%s' in namespace '%s'", entry.getKey(), namespace ) );
}
}
return res;
}
else if ( protection.getReads() == ProtectionType.RESTRICTED )
{
throw accessDeniedTo( namespace );
}
return whenHidden;
}
private void writeProtectedIn( String namespace, Supplier<List<DatastoreEntry>> whenSharing, Runnable write )
{
DatastoreNamespaceProtection protection = protectionByNamespace.get( namespace );
if ( protection == null || protection.getWrites() == ProtectionType.NONE )
{
write.run();
}
else if ( currentUserHasAuthority( protection.getAuthorities() ) )
{
// might also need to check sharing
if ( protection.isSharingRespected() )
{
for ( DatastoreEntry entry : whenSharing.get() )
{
if ( !aclService.canWrite( currentUserService.getCurrentUser(), entry ) )
{
throw accessDeniedTo( namespace, entry.getKey() );
}
}
}
write.run();
}
else if ( protection.getWrites() == ProtectionType.RESTRICTED )
{
throw accessDeniedTo( namespace );
}
// HIDDEN: the operation silently just isn't run
}
private AccessDeniedException accessDeniedTo( String namespace )
{
return new AccessDeniedException( String.format(
"Namespace '%s' is protected, access denied", namespace ) );
}
private AccessDeniedException accessDeniedTo( String namespace, String key )
{
return new AccessDeniedException( String.format(
"Access denied for key '%s' in namespace '%s'", key, namespace ) );
}
private boolean isNamespaceVisible( String namespace )
{
DatastoreNamespaceProtection protection = protectionByNamespace.get( namespace );
return protection == null
|| protection.getReads() != ProtectionType.HIDDEN
|| currentUserHasAuthority( protection.getAuthorities() );
}
private boolean currentUserHasAuthority( Set<String> authorities )
{
User currentUser = currentUserService.getCurrentUser();
if ( currentUser == null )
{
return false;
}
return currentUser.isSuper() || !authorities.isEmpty() && currentUser.hasAnyAuthority( authorities );
}
private void validateEntry( DatastoreEntry entry )
{
String json = entry.getValue();
try
{
if ( json != null && !renderService.isValidJson( json ) )
{
throw new IllegalArgumentException( String.format(
"Invalid JSON value for key '%s'", entry.getKey() ) );
}
}
catch ( IOException ex )
{
throw new IllegalArgumentException( String.format(
"Invalid JSON value for key '%s'", entry.getKey() ), ex );
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iam-2010-05-08/GetContextKeysForPrincipalPolicy"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetContextKeysForPrincipalPolicyRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ARN of a user, group, or role whose policies contain the context keys that you want listed. If you specify a
* user, the list includes context keys that are found in all policies that are attached to the user. The list also
* includes all groups that the user is a member of. If you pick a group or a role, then it includes only those
* context keys that are found in policies attached to that entity. Note that all parameters are shown in unencoded
* form here for clarity, but must be URL encoded to be included as a part of a real HTML request.
* </p>
* <p>
* For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names (ARNs)
* and AWS Service Namespaces</a> in the <i>AWS General Reference</i>.
* </p>
*/
private String policySourceArn;
/**
* <p>
* An optional list of additional policies for which you want the list of context keys that are referenced.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a string of
* characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
* </ul>
*/
private com.amazonaws.internal.SdkInternalList<String> policyInputList;
/**
* <p>
* The ARN of a user, group, or role whose policies contain the context keys that you want listed. If you specify a
* user, the list includes context keys that are found in all policies that are attached to the user. The list also
* includes all groups that the user is a member of. If you pick a group or a role, then it includes only those
* context keys that are found in policies attached to that entity. Note that all parameters are shown in unencoded
* form here for clarity, but must be URL encoded to be included as a part of a real HTML request.
* </p>
* <p>
* For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names (ARNs)
* and AWS Service Namespaces</a> in the <i>AWS General Reference</i>.
* </p>
*
* @param policySourceArn
* The ARN of a user, group, or role whose policies contain the context keys that you want listed. If you
* specify a user, the list includes context keys that are found in all policies that are attached to the
* user. The list also includes all groups that the user is a member of. If you pick a group or a role, then
* it includes only those context keys that are found in policies attached to that entity. Note that all
* parameters are shown in unencoded form here for clarity, but must be URL encoded to be included as a part
* of a real HTML request.</p>
* <p>
* For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs) and AWS Service Namespaces</a> in the <i>AWS General Reference</i>.
*/
public void setPolicySourceArn(String policySourceArn) {
this.policySourceArn = policySourceArn;
}
/**
* <p>
* The ARN of a user, group, or role whose policies contain the context keys that you want listed. If you specify a
* user, the list includes context keys that are found in all policies that are attached to the user. The list also
* includes all groups that the user is a member of. If you pick a group or a role, then it includes only those
* context keys that are found in policies attached to that entity. Note that all parameters are shown in unencoded
* form here for clarity, but must be URL encoded to be included as a part of a real HTML request.
* </p>
* <p>
* For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names (ARNs)
* and AWS Service Namespaces</a> in the <i>AWS General Reference</i>.
* </p>
*
* @return The ARN of a user, group, or role whose policies contain the context keys that you want listed. If you
* specify a user, the list includes context keys that are found in all policies that are attached to the
* user. The list also includes all groups that the user is a member of. If you pick a group or a role, then
* it includes only those context keys that are found in policies attached to that entity. Note that all
* parameters are shown in unencoded form here for clarity, but must be URL encoded to be included as a part
* of a real HTML request.</p>
* <p>
* For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs) and AWS Service Namespaces</a> in the <i>AWS General Reference</i>.
*/
public String getPolicySourceArn() {
return this.policySourceArn;
}
/**
* <p>
* The ARN of a user, group, or role whose policies contain the context keys that you want listed. If you specify a
* user, the list includes context keys that are found in all policies that are attached to the user. The list also
* includes all groups that the user is a member of. If you pick a group or a role, then it includes only those
* context keys that are found in policies attached to that entity. Note that all parameters are shown in unencoded
* form here for clarity, but must be URL encoded to be included as a part of a real HTML request.
* </p>
* <p>
* For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names (ARNs)
* and AWS Service Namespaces</a> in the <i>AWS General Reference</i>.
* </p>
*
* @param policySourceArn
* The ARN of a user, group, or role whose policies contain the context keys that you want listed. If you
* specify a user, the list includes context keys that are found in all policies that are attached to the
* user. The list also includes all groups that the user is a member of. If you pick a group or a role, then
* it includes only those context keys that are found in policies attached to that entity. Note that all
* parameters are shown in unencoded form here for clarity, but must be URL encoded to be included as a part
* of a real HTML request.</p>
* <p>
* For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs) and AWS Service Namespaces</a> in the <i>AWS General Reference</i>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetContextKeysForPrincipalPolicyRequest withPolicySourceArn(String policySourceArn) {
setPolicySourceArn(policySourceArn);
return this;
}
/**
* <p>
* An optional list of additional policies for which you want the list of context keys that are referenced.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a string of
* characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
* </ul>
*
* @return An optional list of additional policies for which you want the list of context keys that are
* referenced.</p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a
* string of characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character
* range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
*/
public java.util.List<String> getPolicyInputList() {
if (policyInputList == null) {
policyInputList = new com.amazonaws.internal.SdkInternalList<String>();
}
return policyInputList;
}
/**
* <p>
* An optional list of additional policies for which you want the list of context keys that are referenced.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a string of
* characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
* </ul>
*
* @param policyInputList
* An optional list of additional policies for which you want the list of context keys that are
* referenced.</p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a
* string of characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character
* range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
*/
public void setPolicyInputList(java.util.Collection<String> policyInputList) {
if (policyInputList == null) {
this.policyInputList = null;
return;
}
this.policyInputList = new com.amazonaws.internal.SdkInternalList<String>(policyInputList);
}
/**
* <p>
* An optional list of additional policies for which you want the list of context keys that are referenced.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a string of
* characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
* </ul>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setPolicyInputList(java.util.Collection)} or {@link #withPolicyInputList(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param policyInputList
* An optional list of additional policies for which you want the list of context keys that are
* referenced.</p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a
* string of characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character
* range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetContextKeysForPrincipalPolicyRequest withPolicyInputList(String... policyInputList) {
if (this.policyInputList == null) {
setPolicyInputList(new com.amazonaws.internal.SdkInternalList<String>(policyInputList.length));
}
for (String ele : policyInputList) {
this.policyInputList.add(ele);
}
return this;
}
/**
* <p>
* An optional list of additional policies for which you want the list of context keys that are referenced.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a string of
* characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
* </ul>
*
* @param policyInputList
* An optional list of additional policies for which you want the list of context keys that are
* referenced.</p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> used to validate this parameter is a
* string of characters consisting of the following:
* </p>
* <ul>
* <li>
* <p>
* Any printable ASCII character ranging from the space character ( ) through the end of the ASCII character
* range
* </p>
* </li>
* <li>
* <p>
* The printable characters in the Basic Latin and Latin-1 Supplement character set (through \u00FF)
* </p>
* </li>
* <li>
* <p>
* The special characters tab ( ), line feed ( ), and carriage return ( )
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetContextKeysForPrincipalPolicyRequest withPolicyInputList(java.util.Collection<String> policyInputList) {
setPolicyInputList(policyInputList);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPolicySourceArn() != null)
sb.append("PolicySourceArn: ").append(getPolicySourceArn()).append(",");
if (getPolicyInputList() != null)
sb.append("PolicyInputList: ").append(getPolicyInputList());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetContextKeysForPrincipalPolicyRequest == false)
return false;
GetContextKeysForPrincipalPolicyRequest other = (GetContextKeysForPrincipalPolicyRequest) obj;
if (other.getPolicySourceArn() == null ^ this.getPolicySourceArn() == null)
return false;
if (other.getPolicySourceArn() != null && other.getPolicySourceArn().equals(this.getPolicySourceArn()) == false)
return false;
if (other.getPolicyInputList() == null ^ this.getPolicyInputList() == null)
return false;
if (other.getPolicyInputList() != null && other.getPolicyInputList().equals(this.getPolicyInputList()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPolicySourceArn() == null) ? 0 : getPolicySourceArn().hashCode());
hashCode = prime * hashCode + ((getPolicyInputList() == null) ? 0 : getPolicyInputList().hashCode());
return hashCode;
}
@Override
public GetContextKeysForPrincipalPolicyRequest clone() {
return (GetContextKeysForPrincipalPolicyRequest) super.clone();
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* DeleteSnapshotResponseType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* DeleteSnapshotResponseType bean class
*/
public class DeleteSnapshotResponseType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = DeleteSnapshotResponseType
Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for RequestId
*/
protected java.lang.String localRequestId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getRequestId(){
return localRequestId;
}
/**
* Auto generated setter method
* @param param RequestId
*/
public void setRequestId(java.lang.String param){
this.localRequestId=param;
}
/**
* field for _return
*/
protected boolean local_return ;
/**
* Auto generated getter method
* @return boolean
*/
public boolean get_return(){
return local_return;
}
/**
* Auto generated setter method
* @param param _return
*/
public void set_return(boolean param){
this.local_return=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
DeleteSnapshotResponseType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":DeleteSnapshotResponseType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"DeleteSnapshotResponseType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"requestId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"requestId");
}
} else {
xmlWriter.writeStartElement("requestId");
}
if (localRequestId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}else{
xmlWriter.writeCharacters(localRequestId);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"return", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"return");
}
} else {
xmlWriter.writeStartElement("return");
}
if (false) {
throw new org.apache.axis2.databinding.ADBException("return cannot be null!!");
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return));
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"requestId"));
if (localRequestId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localRequestId));
} else {
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"return"));
elementList.add(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return));
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static DeleteSnapshotResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
DeleteSnapshotResponseType object =
new DeleteSnapshotResponseType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"DeleteSnapshotResponseType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (DeleteSnapshotResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","requestId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setRequestId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","return").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.set_return(
org.apache.axis2.databinding.utils.ConverterUtil.convertToBoolean(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.introduceParameter;
import com.intellij.analysis.AnalysisScope;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.ChangeContextUtil;
import com.intellij.codeInsight.generation.GenerateMembersUtil;
import com.intellij.lang.findUsages.DescriptiveNameUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.searches.MethodReferencesSearch;
import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.BaseRefactoringProcessor;
import com.intellij.refactoring.IntroduceParameterRefactoring;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.changeSignature.ChangeSignatureProcessor;
import com.intellij.refactoring.introduceVariable.IntroduceVariableBase;
import com.intellij.refactoring.listeners.RefactoringEventData;
import com.intellij.refactoring.util.*;
import com.intellij.refactoring.util.duplicates.MethodDuplicatesHandler;
import com.intellij.refactoring.util.occurrences.ExpressionOccurrenceManager;
import com.intellij.refactoring.util.occurrences.LocalVariableOccurrenceManager;
import com.intellij.refactoring.util.occurrences.OccurrenceManager;
import com.intellij.refactoring.util.usageInfo.DefaultConstructorImplicitUsageInfo;
import com.intellij.refactoring.util.usageInfo.NoConstructorClassUsageInfo;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewDescriptor;
import com.intellij.usageView.UsageViewUtil;
import com.intellij.util.IncorrectOperationException;
import java.util.HashSet;
import com.intellij.util.containers.MultiMap;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Set;
/**
* @author dsl
* @since 07.05.2002
*/
public class IntroduceParameterProcessor extends BaseRefactoringProcessor implements IntroduceParameterData {
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.introduceParameter.IntroduceParameterProcessor");
private final PsiMethod myMethodToReplaceIn;
private final PsiMethod myMethodToSearchFor;
private PsiExpression myParameterInitializer;
private final PsiExpression myExpressionToSearch;
private final PsiLocalVariable myLocalVariable;
private final boolean myRemoveLocalVariable;
private final String myParameterName;
private final boolean myReplaceAllOccurrences;
private int myReplaceFieldsWithGetters;
private final boolean myDeclareFinal;
private final boolean myGenerateDelegate;
private PsiType myForcedType;
private final TIntArrayList myParametersToRemove;
private final PsiManager myManager;
private JavaExpressionWrapper myInitializerWrapper;
private boolean myHasConflicts;
/**
* if expressionToSearch is null, search for localVariable
*/
public IntroduceParameterProcessor(@NotNull Project project,
PsiMethod methodToReplaceIn,
@NotNull PsiMethod methodToSearchFor,
PsiExpression parameterInitializer,
PsiExpression expressionToSearch,
PsiLocalVariable localVariable,
boolean removeLocalVariable,
String parameterName,
boolean replaceAllOccurrences,
int replaceFieldsWithGetters,
boolean declareFinal,
boolean generateDelegate,
PsiType forcedType,
@NotNull TIntArrayList parametersToRemove) {
super(project);
myMethodToReplaceIn = methodToReplaceIn;
myMethodToSearchFor = methodToSearchFor;
myParameterInitializer = parameterInitializer;
myExpressionToSearch = expressionToSearch;
myLocalVariable = localVariable;
myRemoveLocalVariable = removeLocalVariable;
myParameterName = parameterName;
myReplaceAllOccurrences = replaceAllOccurrences;
myReplaceFieldsWithGetters = replaceFieldsWithGetters;
myDeclareFinal = declareFinal;
myGenerateDelegate = generateDelegate;
myForcedType = forcedType;
myManager = PsiManager.getInstance(project);
myParametersToRemove = parametersToRemove;
myInitializerWrapper = expressionToSearch == null ? null : new JavaExpressionWrapper(expressionToSearch);
}
public void setParameterInitializer(PsiExpression parameterInitializer) {
myParameterInitializer = parameterInitializer;
}
@NotNull
protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) {
return new IntroduceParameterViewDescriptor(myMethodToSearchFor);
}
@NotNull
public PsiType getForcedType() {
return myForcedType;
}
public void setForcedType(PsiType forcedType) {
myForcedType = forcedType;
}
public int getReplaceFieldsWithGetters() {
return myReplaceFieldsWithGetters;
}
public void setReplaceFieldsWithGetters(int replaceFieldsWithGetters) {
myReplaceFieldsWithGetters = replaceFieldsWithGetters;
}
@NotNull
protected UsageInfo[] findUsages() {
ArrayList<UsageInfo> result = new ArrayList<>();
PsiMethod[] overridingMethods =
OverridingMethodsSearch.search(myMethodToSearchFor).toArray(PsiMethod.EMPTY_ARRAY);
for (PsiMethod overridingMethod : overridingMethods) {
result.add(new UsageInfo(overridingMethod));
}
if (!myGenerateDelegate) {
PsiReference[] refs =
MethodReferencesSearch.search(myMethodToSearchFor, GlobalSearchScope.projectScope(myProject), true).toArray(PsiReference.EMPTY_ARRAY);
for (PsiReference ref1 : refs) {
PsiElement ref = ref1.getElement();
if (ref instanceof PsiMethod && ((PsiMethod)ref).isConstructor()) {
DefaultConstructorImplicitUsageInfo implicitUsageInfo =
new DefaultConstructorImplicitUsageInfo((PsiMethod)ref, ((PsiMethod)ref).getContainingClass(), myMethodToSearchFor);
result.add(implicitUsageInfo);
}
else if (ref instanceof PsiClass) {
result.add(new NoConstructorClassUsageInfo((PsiClass)ref));
}
else if (!IntroduceParameterUtil.insideMethodToBeReplaced(ref, myMethodToReplaceIn)) {
result.add(new ExternalUsageInfo(ref));
}
else {
result.add(new ChangedMethodCallInfo(ref));
}
}
}
if (myReplaceAllOccurrences) {
for (PsiElement expr : getOccurrences()) {
result.add(new InternalUsageInfo(expr));
}
}
else {
if (myExpressionToSearch != null && myExpressionToSearch.isValid()) {
result.add(new InternalUsageInfo(myExpressionToSearch));
}
}
final UsageInfo[] usageInfos = result.toArray(UsageInfo.EMPTY_ARRAY);
return UsageViewUtil.removeDuplicatedUsages(usageInfos);
}
protected PsiElement[] getOccurrences() {
final OccurrenceManager occurrenceManager;
if (myLocalVariable == null) {
occurrenceManager = new ExpressionOccurrenceManager(myExpressionToSearch, myMethodToReplaceIn, null);
}
else {
occurrenceManager = new LocalVariableOccurrenceManager(myLocalVariable, null);
}
return occurrenceManager.getOccurrences();
}
public boolean hasConflicts() {
return myHasConflicts;
}
private static class ReferencedElementsCollector extends JavaRecursiveElementWalkingVisitor {
private final Set<PsiElement> myResult = new HashSet<>();
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
visitReferenceElement(expression);
}
@Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) {
super.visitReferenceElement(reference);
final PsiElement element = reference.resolve();
if (element != null) {
myResult.add(element);
}
}
}
protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) {
UsageInfo[] usagesIn = refUsages.get();
MultiMap<PsiElement, String> conflicts = new MultiMap<>();
AnySameNameVariables anySameNameVariables = new AnySameNameVariables();
myMethodToReplaceIn.accept(anySameNameVariables);
final Pair<PsiElement, String> conflictPair = anySameNameVariables.getConflict();
if (conflictPair != null) {
conflicts.putValue(conflictPair.first, conflictPair.second);
}
if (!myGenerateDelegate) {
detectAccessibilityConflicts(usagesIn, conflicts);
}
if (myParameterInitializer != null && !myMethodToReplaceIn.hasModifierProperty(PsiModifier.PRIVATE)) {
final AnySupers anySupers = new AnySupers();
myParameterInitializer.accept(anySupers);
if (anySupers.isResult()) {
for (UsageInfo usageInfo : usagesIn) {
if (!(usageInfo.getElement() instanceof PsiMethod) && !(usageInfo instanceof InternalUsageInfo)) {
if (!PsiTreeUtil.isAncestor(myMethodToReplaceIn.getContainingClass(), usageInfo.getElement(), false)) {
String message = RefactoringBundle.message("parameter.initializer.contains.0.but.not.all.calls.to.method.are.in.its.class",
CommonRefactoringUtil.htmlEmphasize(PsiKeyword.SUPER));
conflicts.putValue(myParameterInitializer, message);
break;
}
}
}
}
}
for (IntroduceParameterMethodUsagesProcessor processor : IntroduceParameterMethodUsagesProcessor.EP_NAME.getExtensions()) {
processor.findConflicts(this, refUsages.get(), conflicts);
}
myHasConflicts = !conflicts.isEmpty();
return showConflicts(conflicts, usagesIn);
}
private void detectAccessibilityConflicts(final UsageInfo[] usageArray, MultiMap<PsiElement, String> conflicts) {
if (myParameterInitializer != null) {
final ReferencedElementsCollector collector = new ReferencedElementsCollector();
myParameterInitializer.accept(collector);
final Set<PsiElement> result = collector.myResult;
if (!result.isEmpty()) {
for (final UsageInfo usageInfo : usageArray) {
if (usageInfo instanceof ExternalUsageInfo && IntroduceParameterUtil.isMethodUsage(usageInfo)) {
final PsiElement place = usageInfo.getElement();
for (PsiElement element : result) {
if (element instanceof PsiField && myReplaceFieldsWithGetters != IntroduceParameterRefactoring.REPLACE_FIELDS_WITH_GETTERS_NONE) {
//check getter access instead
final PsiClass psiClass = ((PsiField)element).getContainingClass();
LOG.assertTrue(psiClass != null);
final PsiMethod method = psiClass.findMethodBySignature(GenerateMembersUtil.generateGetterPrototype((PsiField)element), true);
if (method != null){
element = method;
}
}
if (element instanceof PsiMember &&
!JavaPsiFacade.getInstance(myProject).getResolveHelper().isAccessible((PsiMember)element, place, null)) {
String message =
RefactoringBundle.message(
"0.is.not.accessible.from.1.value.for.introduced.parameter.in.that.method.call.will.be.incorrect",
RefactoringUIUtil.getDescription(element, true),
RefactoringUIUtil.getDescription(ConflictsUtil.getContainer(place), true));
conflicts.putValue(element, message);
}
}
}
}
}
}
}
public static class AnySupers extends JavaRecursiveElementWalkingVisitor {
private boolean myResult;
@Override public void visitSuperExpression(PsiSuperExpression expression) {
myResult = true;
}
public boolean isResult() {
return myResult;
}
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
visitElement(expression);
}
}
public class AnySameNameVariables extends JavaRecursiveElementWalkingVisitor {
private Pair<PsiElement, String> conflict;
public Pair<PsiElement, String> getConflict() {
return conflict;
}
@Override public void visitVariable(PsiVariable variable) {
if (variable == myLocalVariable) return;
if (variable instanceof PsiParameter && ((PsiParameter)variable).getDeclarationScope() == myMethodToReplaceIn) {
if (getParametersToRemove().contains(myMethodToReplaceIn.getParameterList().getParameterIndex((PsiParameter)variable))){
return;
}
}
if (myParameterName.equals(variable.getName())) {
String descr = RefactoringBundle.message("there.is.already.a.0.it.will.conflict.with.an.introduced.parameter",
RefactoringUIUtil.getDescription(variable, true));
conflict = Pair.create(variable, CommonRefactoringUtil.capitalize(descr));
}
}
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
}
@Override public void visitElement(PsiElement element) {
if(conflict != null) return;
super.visitElement(element);
}
}
@Nullable
@Override
protected String getRefactoringId() {
return "refactoring.introduceParameter";
}
@Nullable
@Override
protected RefactoringEventData getBeforeData() {
RefactoringEventData data = new RefactoringEventData();
data.addElements(new PsiElement[] {myLocalVariable, myExpressionToSearch});
return data;
}
@Nullable
@Override
protected RefactoringEventData getAfterData(@NotNull UsageInfo[] usages) {
final PsiParameter parameter = JavaIntroduceParameterMethodUsagesProcessor.getAnchorParameter(myMethodToReplaceIn);
final RefactoringEventData afterData = new RefactoringEventData();
afterData.addElement(parameter);
return afterData;
}
protected void performRefactoring(@NotNull UsageInfo[] usages) {
try {
PsiElementFactory factory = JavaPsiFacade.getInstance(myManager.getProject()).getElementFactory();
PsiType initializerType = getInitializerType(myForcedType, myParameterInitializer, myLocalVariable);
setForcedType(initializerType);
// Converting myParameterInitializer
if (myParameterInitializer == null) {
LOG.assertTrue(myLocalVariable != null);
myParameterInitializer = factory.createExpressionFromText(myLocalVariable.getName(), myLocalVariable);
}
else if (myParameterInitializer instanceof PsiArrayInitializerExpression){
final PsiExpression newExprArrayInitializer =
RefactoringUtil.createNewExpressionFromArrayInitializer((PsiArrayInitializerExpression)myParameterInitializer, initializerType);
myParameterInitializer = (PsiExpression)myParameterInitializer.replace(newExprArrayInitializer);
}
myInitializerWrapper = new JavaExpressionWrapper(myParameterInitializer);
// Changing external occurrences (the tricky part)
IntroduceParameterUtil.processUsages(usages, this);
if (myGenerateDelegate) {
generateDelegate(myMethodToReplaceIn);
if (myMethodToReplaceIn != myMethodToSearchFor) {
final PsiMethod method = generateDelegate(myMethodToSearchFor);
if (method.getContainingClass().isInterface()) {
final PsiCodeBlock block = method.getBody();
if (block != null) {
block.delete();
}
}
}
}
// Changing signature of initial method
// (signature of myMethodToReplaceIn will be either changed now or have already been changed)
LOG.assertTrue(initializerType.isValid());
final FieldConflictsResolver fieldConflictsResolver = new FieldConflictsResolver(myParameterName, myMethodToReplaceIn.getBody());
IntroduceParameterUtil.changeMethodSignatureAndResolveFieldConflicts(new UsageInfo(myMethodToReplaceIn), usages, this);
if (myMethodToSearchFor != myMethodToReplaceIn) {
IntroduceParameterUtil.changeMethodSignatureAndResolveFieldConflicts(new UsageInfo(myMethodToSearchFor), usages, this);
}
else if (myGenerateDelegate && myMethodToReplaceIn.findSuperMethods().length == 0) {
final PsiAnnotation annotation = AnnotationUtil.findAnnotation(myMethodToReplaceIn, true, Override.class.getName());
if (annotation != null) {
annotation.delete();
}
}
ChangeContextUtil.clearContextInfo(myParameterInitializer);
// Replacing expression occurrences
for (UsageInfo usage : usages) {
if (usage instanceof ChangedMethodCallInfo) {
PsiElement element = usage.getElement();
processChangedMethodCall(element);
}
else if (usage instanceof InternalUsageInfo) {
PsiElement element = usage.getElement();
if (element instanceof PsiExpression) {
element = RefactoringUtil.outermostParenthesizedExpression((PsiExpression)element);
}
if (element != null) {
if (element.getParent() instanceof PsiExpressionStatement) {
element.getParent().delete();
}
else {
PsiExpression newExpr = factory.createExpressionFromText(myParameterName, element);
IntroduceVariableBase.replace((PsiExpression)element, newExpr, myProject);
}
}
}
}
if(myLocalVariable != null && myRemoveLocalVariable) {
myLocalVariable.normalizeDeclaration();
myLocalVariable.getParent().delete();
}
fieldConflictsResolver.fix();
}
catch (IncorrectOperationException ex) {
LOG.error(ex);
}
if (isReplaceDuplicates()) {
ApplicationManager.getApplication().invokeLater(() -> processMethodsDuplicates(), myProject.getDisposed());
}
}
protected boolean isReplaceDuplicates() {
return true;
}
private void processMethodsDuplicates() {
final Runnable runnable = () -> {
if (!myMethodToReplaceIn.isValid()) return;
MethodDuplicatesHandler.invokeOnScope(myProject, Collections.singleton(myMethodToReplaceIn),
new AnalysisScope(myMethodToReplaceIn.getContainingFile()), true);
};
ProgressManager.getInstance().runProcessWithProgressSynchronously(() -> ApplicationManager.getApplication().runReadAction(runnable), "Search method duplicates...", true, myProject);
}
private PsiMethod generateDelegate(final PsiMethod methodToReplaceIn) throws IncorrectOperationException {
final PsiMethod delegate = (PsiMethod)methodToReplaceIn.copy();
final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(myManager.getProject()).getElementFactory();
ChangeSignatureProcessor.makeEmptyBody(elementFactory, delegate);
final PsiCallExpression callExpression = ChangeSignatureProcessor.addDelegatingCallTemplate(delegate, delegate.getName());
final PsiExpressionList argumentList = callExpression.getArgumentList();
assert argumentList != null;
final PsiParameter[] psiParameters = methodToReplaceIn.getParameterList().getParameters();
final PsiParameter anchorParameter = getAnchorParameter(methodToReplaceIn);
if (psiParameters.length == 0) {
argumentList.add(myParameterInitializer);
}
else {
if (anchorParameter == null) {
argumentList.add(myParameterInitializer);
}
for (int i = 0; i < psiParameters.length; i++) {
PsiParameter psiParameter = psiParameters[i];
if (!myParametersToRemove.contains(i)) {
final PsiExpression expression = elementFactory.createExpressionFromText(psiParameter.getName(), delegate);
argumentList.add(expression);
}
if (psiParameter == anchorParameter) {
argumentList.add(myParameterInitializer);
}
}
}
return (PsiMethod)methodToReplaceIn.getContainingClass().addBefore(delegate, methodToReplaceIn);
}
static PsiType getInitializerType(PsiType forcedType, PsiExpression parameterInitializer, PsiLocalVariable localVariable) {
final PsiType initializerType;
if (forcedType == null) {
if (parameterInitializer == null) {
if (localVariable != null) {
initializerType = localVariable.getType();
} else {
LOG.assertTrue(false);
initializerType = null;
}
} else {
if (localVariable == null) {
initializerType = RefactoringUtil.getTypeByExpressionWithExpectedType(parameterInitializer);
} else {
initializerType = localVariable.getType();
}
}
} else {
initializerType = forcedType;
}
return initializerType;
}
private void processChangedMethodCall(PsiElement element) throws IncorrectOperationException {
if (element.getParent() instanceof PsiMethodCallExpression) {
PsiMethodCallExpression methodCall = (PsiMethodCallExpression)element.getParent();
if (myMethodToReplaceIn == myMethodToSearchFor && PsiTreeUtil.isAncestor(methodCall, myParameterInitializer, false)) return;
PsiElementFactory factory = JavaPsiFacade.getInstance(methodCall.getProject()).getElementFactory();
PsiExpression expression = factory.createExpressionFromText(myParameterName, null);
final PsiExpressionList argList = methodCall.getArgumentList();
final PsiExpression[] exprs = argList.getExpressions();
boolean first = false;
PsiElement anchor = null;
if (myMethodToSearchFor.isVarArgs()) {
final int oldParamCount = myMethodToSearchFor.getParameterList().getParametersCount() - 1;
if (exprs.length >= oldParamCount) {
if (oldParamCount > 1) {
anchor = exprs[oldParamCount - 2];
}
else {
first = true;
anchor = null;
}
} else {
anchor = exprs[exprs.length -1];
}
} else if (exprs.length > 0) {
anchor = exprs[exprs.length - 1];
}
if (anchor != null) {
argList.addAfter(expression, anchor);
}
else {
if (first && exprs.length > 0) {
argList.addBefore(expression, exprs[0]);
} else {
argList.add(expression);
}
}
removeParametersFromCall(argList);
}
else {
LOG.error(element.getParent());
}
}
private void removeParametersFromCall(final PsiExpressionList argList) {
final PsiExpression[] exprs = argList.getExpressions();
myParametersToRemove.forEachDescending(paramNum -> {
if (paramNum < exprs.length) {
try {
exprs[paramNum].delete();
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
return true;
});
}
@NotNull
protected String getCommandName() {
return RefactoringBundle.message("introduce.parameter.command", DescriptiveNameUtil.getDescriptiveName(myMethodToReplaceIn));
}
@Nullable
private static PsiParameter getAnchorParameter(PsiMethod methodToReplaceIn) {
PsiParameterList parameterList = methodToReplaceIn.getParameterList();
final PsiParameter anchorParameter;
final PsiParameter[] parameters = parameterList.getParameters();
final int length = parameters.length;
if (!methodToReplaceIn.isVarArgs()) {
anchorParameter = length > 0 ? parameters[length-1] : null;
}
else {
LOG.assertTrue(length > 0);
LOG.assertTrue(parameters[length-1].isVarArgs());
anchorParameter = length > 1 ? parameters[length-2] : null;
}
return anchorParameter;
}
public PsiMethod getMethodToReplaceIn() {
return myMethodToReplaceIn;
}
@NotNull
public PsiMethod getMethodToSearchFor() {
return myMethodToSearchFor;
}
public JavaExpressionWrapper getParameterInitializer() {
return myInitializerWrapper;
}
@NotNull
public String getParameterName() {
return myParameterName;
}
public boolean isDeclareFinal() {
return myDeclareFinal;
}
public boolean isGenerateDelegate() {
return myGenerateDelegate;
}
@NotNull
public TIntArrayList getParametersToRemove() {
return myParametersToRemove;
}
@NotNull
public Project getProject() {
return myProject;
}
}
| |
package fr.adrienbrault.idea.symfony2plugin.config.yaml;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.Annotator;
import com.intellij.openapi.project.Project;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.psi.PsiElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.php.lang.psi.elements.Method;
import com.jetbrains.php.lang.psi.elements.Parameter;
import com.jetbrains.php.lang.psi.elements.PhpClass;
import fr.adrienbrault.idea.symfony2plugin.Settings;
import fr.adrienbrault.idea.symfony2plugin.Symfony2InterfacesUtil;
import fr.adrienbrault.idea.symfony2plugin.Symfony2ProjectComponent;
import fr.adrienbrault.idea.symfony2plugin.stubs.ContainerCollectionResolver;
import fr.adrienbrault.idea.symfony2plugin.util.PhpElementsUtil;
import fr.adrienbrault.idea.symfony2plugin.util.PsiElementUtils;
import fr.adrienbrault.idea.symfony2plugin.util.dict.ServiceUtil;
import fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.yaml.YAMLTokenTypes;
import org.jetbrains.yaml.psi.YAMLArray;
import org.jetbrains.yaml.psi.YAMLCompoundValue;
import org.jetbrains.yaml.psi.YAMLKeyValue;
import org.jetbrains.yaml.psi.YAMLSequence;
import java.util.List;
public class YamlAnnotator implements Annotator {
private ContainerCollectionResolver.LazyServiceCollector lazyServiceCollector;
@Override
public void annotate(@NotNull final PsiElement psiElement, @NotNull AnnotationHolder holder) {
if(!Symfony2ProjectComponent.isEnabled(psiElement.getProject()) || !Settings.getInstance(psiElement.getProject()).yamlAnnotateServiceConfig) {
return;
}
this.annotateParameter(psiElement, holder);
this.annotateClass(psiElement, holder);
this.annotateService(psiElement, holder);
// only match inside service definitions
if(!YamlElementPatternHelper.getInsideKeyValue("services").accepts(psiElement)) {
return;
}
this.annotateConstructorSequenceArguments(psiElement, holder);
this.annotateConstructorArguments(psiElement, holder);
this.annotateCallsArguments(psiElement, holder);
this.lazyServiceCollector = null;
}
private void annotateParameter(@NotNull final PsiElement psiElement, @NotNull AnnotationHolder holder) {
if(!YamlElementPatternHelper.getServiceParameterDefinition().accepts(psiElement) || !YamlElementPatternHelper.getInsideServiceKeyPattern().accepts(psiElement)) {
return;
}
// at least %a%
// and not this one: %kernel.root_dir%/../web/
// %kernel.root_dir%/../web/%webpath_modelmasks%
String parameterName = PsiElementUtils.getText(psiElement);
if(!YamlHelper.isValidParameterName(parameterName)) {
return;
}
// strip "%"
parameterName = parameterName.substring(1, parameterName.length() - 1);
// parameter a always lowercase see #179
parameterName = parameterName.toLowerCase();
if (!ContainerCollectionResolver.getParameterNames(psiElement.getProject()).contains(parameterName)) {
holder.createWarningAnnotation(psiElement, "Missing Parameter");
}
}
private void annotateService(@NotNull final PsiElement psiElement, @NotNull AnnotationHolder holder) {
if(!YamlElementPatternHelper.getServiceDefinition().accepts(psiElement) || !YamlElementPatternHelper.getInsideServiceKeyPattern().accepts(psiElement)) {
return;
}
String serviceName = getServiceName(psiElement);
// dont mark "@", "@?", "@@" escaping and expressions
if(serviceName.length() < 2 || serviceName.startsWith("=") || serviceName.startsWith("@")) {
return;
}
if(ContainerCollectionResolver.hasServiceNames(psiElement.getProject(), serviceName)) {
return;
}
holder.createWarningAnnotation(psiElement, "Missing Service");
}
private void annotateClass(@NotNull final PsiElement element, @NotNull AnnotationHolder holder) {
if(!((YamlElementPatternHelper.getSingleLineScalarKey("class", "factory_class").accepts(element) || YamlElementPatternHelper.getParameterClassPattern().accepts(element)) && YamlElementPatternHelper.getInsideServiceKeyPattern().accepts(element))) {
return;
}
String className = PsiElementUtils.getText(element);
if(YamlHelper.isValidParameterName(className)) {
String resolvedParameter = ContainerCollectionResolver.resolveParameter(element.getProject(), className);
if(resolvedParameter != null && PhpElementsUtil.getClassInterfacePsiElements(element.getProject(), resolvedParameter) != null) {
return ;
}
}
if(PhpElementsUtil.getClassInterface(element.getProject(), className) == null) {
holder.createWarningAnnotation(element, "Missing Class");
}
}
/**
* arguments:
* - @twig
* - @twig
*/
private void annotateConstructorSequenceArguments(@NotNull final PsiElement psiElement, @NotNull AnnotationHolder holder) {
IElementType elementType = psiElement.getNode().getElementType();
if(elementType == YAMLTokenTypes.TEXT || elementType == YAMLTokenTypes.SCALAR_DSTRING || elementType == YAMLTokenTypes.SCALAR_STRING) {
PsiElement yamlSequence = psiElement.getContext();
if(yamlSequence instanceof YAMLSequence) {
PsiElement yamlCompoundValue = yamlSequence.getContext();
if(yamlCompoundValue instanceof YAMLCompoundValue) {
PsiElement yamlKeyValue = yamlCompoundValue.getContext();
if(yamlKeyValue instanceof YAMLKeyValue) {
String keyText = ((YAMLKeyValue) yamlKeyValue).getKeyText();
if("arguments".equals(keyText)) {
List<YAMLSequence> test = PsiElementUtils.getPrevSiblingsOfType(yamlSequence, PlatformPatterns.psiElement(YAMLSequence.class));
PsiElement yamlCompoundValueService = yamlKeyValue.getParent();
if(yamlCompoundValueService instanceof YAMLCompoundValue) {
String className = YamlHelper.getYamlKeyValueAsString((YAMLCompoundValue) yamlCompoundValueService, "class", false);
if(className != null) {
PhpClass serviceClass = ServiceUtil.getResolvedClassDefinition(psiElement.getProject(), className, this.getLazyServiceCollector(psiElement.getProject()));
if(serviceClass != null) {
Method constructor = serviceClass.getConstructor();
if(constructor != null) {
attachInstanceAnnotation(psiElement, holder, test.size(), constructor);
}
}
}
}
}
}
}
}
}
}
private void annotateConstructorArguments(@NotNull final PsiElement psiElement, @NotNull AnnotationHolder holder) {
if(!PlatformPatterns.psiElement(YAMLTokenTypes.TEXT).accepts(psiElement)
&& !PlatformPatterns.psiElement(YAMLTokenTypes.SCALAR_DSTRING).accepts(psiElement)
&& !PlatformPatterns.psiElement(YAMLTokenTypes.SCALAR_STRING).accepts(psiElement))
{
return;
}
// @TODO: simplify code checks
if(!(psiElement.getContext() instanceof YAMLArray)) {
return;
}
YAMLArray yamlArray = (YAMLArray) psiElement.getContext();
if(!(yamlArray.getContext() instanceof YAMLCompoundValue)) {
return;
}
YAMLCompoundValue yamlCompoundValue = (YAMLCompoundValue) yamlArray.getContext();
if(!(yamlCompoundValue.getContext() instanceof YAMLKeyValue)) {
return;
}
YAMLKeyValue yamlKeyValue = (YAMLKeyValue) yamlCompoundValue.getContext();
if(yamlKeyValue == null || !yamlKeyValue.getKeyText().equals("arguments")) {
return;
}
YAMLKeyValue classKeyValue = YamlHelper.getYamlKeyValue(yamlKeyValue.getContext(), "class");
if(classKeyValue == null) {
return;
}
PhpClass serviceClass = ServiceUtil.getResolvedClassDefinition(psiElement.getProject(), classKeyValue.getValueText(), this.getLazyServiceCollector(psiElement.getProject()));
if(serviceClass == null) {
return;
}
Method constructor = serviceClass.getConstructor();
if(constructor == null) {
return;
}
attachInstanceAnnotation(psiElement, holder, yamlArray, constructor);
}
private void annotateCallsArguments(@NotNull final PsiElement psiElement, @NotNull AnnotationHolder holder) {
if(!PlatformPatterns.psiElement(YAMLTokenTypes.TEXT).accepts(psiElement)
&& !PlatformPatterns.psiElement(YAMLTokenTypes.SCALAR_DSTRING).accepts(psiElement))
{
return;
}
// @TODO: simplify code checks
if(!(psiElement.getContext() instanceof YAMLArray)) {
return;
}
YAMLArray yamlCallParameterArray = (YAMLArray) psiElement.getContext();
if(!(yamlCallParameterArray.getContext() instanceof YAMLArray)) {
return;
}
YAMLArray yamlCallArray = (YAMLArray) yamlCallParameterArray.getContext();
if(!(yamlCallArray.getContext() instanceof YAMLSequence)) {
return;
}
List<PsiElement> methodParameter = YamlHelper.getYamlArrayValues(yamlCallArray);
if(methodParameter.size() < 2) {
return;
}
String methodName = PsiElementUtils.getText(methodParameter.get(0));
YAMLSequence yamlSequence = (YAMLSequence) yamlCallArray.getContext();
if(!(yamlSequence.getContext() instanceof YAMLCompoundValue)) {
return;
}
YAMLCompoundValue yamlCompoundValue = (YAMLCompoundValue) yamlSequence.getContext();
if(!(yamlCompoundValue.getContext() instanceof YAMLKeyValue)) {
return;
}
YAMLCompoundValue serviceDefinition = PsiTreeUtil.getParentOfType(yamlCompoundValue, YAMLCompoundValue.class);
YAMLKeyValue classKeyValue = YamlHelper.getYamlKeyValue(serviceDefinition, "class");
if(classKeyValue == null) {
return;
}
PhpClass serviceClass = ServiceUtil.getResolvedClassDefinition(psiElement.getProject(), classKeyValue.getValueText(), this.getLazyServiceCollector(psiElement.getProject()));
if(serviceClass == null) {
return;
}
Method method = PhpElementsUtil.getClassMethod(serviceClass, methodName);
if(method == null) {
return;
}
attachInstanceAnnotation(psiElement, holder, yamlCallParameterArray, method);
}
private void attachInstanceAnnotation(PsiElement psiElement, AnnotationHolder holder, int parameterIndex, Method constructor) {
String serviceName = getServiceName(psiElement);
if(StringUtils.isBlank(serviceName)) {
return;
}
PhpClass serviceParameterClass = ServiceUtil.getResolvedClassDefinition(psiElement.getProject(), getServiceName(psiElement), this.getLazyServiceCollector(psiElement.getProject()));
if(serviceParameterClass == null) {
return;
}
Parameter[] constructorParameter = constructor.getParameters();
if(parameterIndex >= constructorParameter.length) {
return;
}
PhpClass expectedClass = PhpElementsUtil.getClassInterface(psiElement.getProject(), constructorParameter[parameterIndex].getDeclaredType().toString());
if(expectedClass == null) {
return;
}
if(!new Symfony2InterfacesUtil().isInstanceOf(serviceParameterClass, expectedClass)) {
holder.createWeakWarningAnnotation(psiElement, "Expect instance of: " + expectedClass.getPresentableFQN());
}
}
private void attachInstanceAnnotation(PsiElement psiElement, AnnotationHolder holder, YAMLArray yamlArray, Method constructor) {
if(psiElement == null) {
return;
}
int parameterIndex = YamlHelper.getYamlParameter(yamlArray, psiElement);
if(parameterIndex == -1) {
return;
}
attachInstanceAnnotation(psiElement, holder, parameterIndex, constructor);
}
private String getServiceName(PsiElement psiElement) {
return YamlHelper.trimSpecialSyntaxServiceName(PsiElementUtils.getText(psiElement));
}
private ContainerCollectionResolver.LazyServiceCollector getLazyServiceCollector(Project project) {
return this.lazyServiceCollector == null ? this.lazyServiceCollector = new ContainerCollectionResolver.LazyServiceCollector(project) : this.lazyServiceCollector;
}
}
| |
package com.roughike.bottombar;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.test.InstrumentationRegistry;
import android.support.test.annotation.UiThreadTest;
import android.support.test.filters.LargeTest;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InOrder;
import org.mockito.Mockito;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* Created by iiro on 13.8.2016.
*/
@RunWith(AndroidJUnit4.class)
@LargeTest
public class BottomBarTest {
private OnTabSelectListener selectListener;
private OnTabReselectListener reselectListener;
private BottomBar bottomBar;
@Before
public void setUp() {
selectListener = Mockito.mock(OnTabSelectListener.class);
reselectListener = Mockito.mock(OnTabReselectListener.class);
bottomBar = new BottomBar(InstrumentationRegistry.getContext());
bottomBar.setItems(com.roughike.bottombar.test.R.xml.dummy_tabs_three);
bottomBar.setOnTabSelectListener(selectListener);
bottomBar.setOnTabReselectListener(reselectListener);
}
@Test(expected = RuntimeException.class)
public void setItems_ThrowsExceptionWithNoResource() {
BottomBar secondBar = new BottomBar(InstrumentationRegistry.getContext());
secondBar.setItems(0);
}
@Test
public void setItemsWithCustomConfig_OverridesPreviousValues() {
float inActiveTabAlpha = 0.69f;
float activeTabAlpha = 0.96f;
int inActiveTabColor = Color.BLUE;
int activeTabColor = Color.GREEN;
int defaultBackgroundColor = Color.CYAN;
int defaultBadgeBackgroundColor = Color.MAGENTA;
int titleTextAppearance = com.roughike.bottombar.test.R.style.dummy_text_appearance;
BottomBarTab.Config config = new BottomBarTab.Config.Builder()
.inActiveTabAlpha(inActiveTabAlpha)
.activeTabAlpha(activeTabAlpha)
.inActiveTabColor(inActiveTabColor)
.activeTabColor(activeTabColor)
.barColorWhenSelected(defaultBackgroundColor)
.badgeBackgroundColor(defaultBadgeBackgroundColor)
.titleTextAppearance(titleTextAppearance)
.build();
BottomBar newBar = new BottomBar(InstrumentationRegistry.getContext());
newBar.setItems(com.roughike.bottombar.test.R.xml.dummy_tabs_three, config);
BottomBarTab first = newBar.getTabAtPosition(0);
assertEquals(inActiveTabAlpha, first.getInActiveAlpha(), 0);
assertEquals(activeTabAlpha, first.getActiveAlpha(), 0);
assertEquals(inActiveTabColor, first.getInActiveColor());
assertEquals(activeTabColor, first.getActiveColor());
assertEquals(defaultBackgroundColor, first.getBarColorWhenSelected());
assertEquals(defaultBadgeBackgroundColor, first.getBadgeBackgroundColor());
assertEquals(titleTextAppearance, first.getTitleTextAppearance());
}
@Test
@UiThreadTest
public void tabCount_IsCorrect() {
assertEquals(3, bottomBar.getTabCount());
}
@Test
@UiThreadTest
public void findingPositionForTabs_ReturnsCorrectPositions() {
assertEquals(0, bottomBar.findPositionForTabWithId(com.roughike.bottombar.test.R.id.tab_favorites));
assertEquals(1, bottomBar.findPositionForTabWithId(com.roughike.bottombar.test.R.id.tab_nearby));
assertEquals(2, bottomBar.findPositionForTabWithId(com.roughike.bottombar.test.R.id.tab_friends));
}
@Test
@UiThreadTest
public void whenTabIsSelected_SelectionListenerIsFired() {
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_friends);
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_nearby);
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_favorites);
InOrder inOrder = inOrder(selectListener);
inOrder.verify(selectListener, times(1)).onTabSelected(com.roughike.bottombar.test.R.id.tab_friends);
inOrder.verify(selectListener, times(1)).onTabSelected(com.roughike.bottombar.test.R.id.tab_nearby);
inOrder.verify(selectListener, times(1)).onTabSelected(com.roughike.bottombar.test.R.id.tab_favorites);
inOrder.verifyNoMoreInteractions();
}
@Test
@UiThreadTest
public void afterConfigurationChanged_SavedStateRestored_AndSelectedTabPersists() {
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_favorites);
Bundle savedState = bottomBar.saveState();
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_nearby);
bottomBar.restoreState(savedState);
assertEquals(com.roughike.bottombar.test.R.id.tab_favorites, bottomBar.getCurrentTabId());
}
@Test
@UiThreadTest
public void whenTabIsReselected_ReselectionListenerIsFired() {
int firstTabId = com.roughike.bottombar.test.R.id.tab_favorites;
bottomBar.selectTabWithId(firstTabId);
verify(reselectListener, times(1)).onTabReSelected(firstTabId);
int secondTabId = com.roughike.bottombar.test.R.id.tab_nearby;
bottomBar.selectTabWithId(secondTabId);
bottomBar.selectTabWithId(secondTabId);
verify(reselectListener, times(1)).onTabReSelected(secondTabId);
int thirdTabId = com.roughike.bottombar.test.R.id.tab_friends;
bottomBar.selectTabWithId(thirdTabId);
bottomBar.selectTabWithId(thirdTabId);
verify(reselectListener, times(1)).onTabReSelected(thirdTabId);
}
@Test
@UiThreadTest
public void whenDefaultTabIsSet_ItsSelectedAtFirst() {
int defaultTabId = com.roughike.bottombar.test.R.id.tab_friends;
bottomBar.setDefaultTab(defaultTabId);
verify(selectListener).onTabSelected(defaultTabId);
}
@Test(expected = IndexOutOfBoundsException.class)
public void settingTooLowDefaultPosition_Throws() {
bottomBar.setDefaultTabPosition(-1);
}
@Test(expected = IndexOutOfBoundsException.class)
public void settingTooHighDefaultPosition_Throws() {
bottomBar.setDefaultTabPosition(bottomBar.getTabCount());
}
@Test
@UiThreadTest
public void afterConfigurationChanged_UserSelectedTabPersistsWhenResettingDefaultTab() {
int defaultTabId = com.roughike.bottombar.test.R.id.tab_friends;
bottomBar.setDefaultTab(defaultTabId);
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_nearby);
Bundle savedState = bottomBar.saveState();
bottomBar.restoreState(savedState);
bottomBar.setDefaultTab(defaultTabId);
assertNotSame(defaultTabId, bottomBar.getCurrentTabId());
assertEquals(com.roughike.bottombar.test.R.id.tab_nearby, bottomBar.getCurrentTabId());
}
@Test
@UiThreadTest
public void whenGettingCurrentTab_ReturnsCorrectOne() {
int firstTabId = com.roughike.bottombar.test.R.id.tab_favorites;
bottomBar.selectTabWithId(firstTabId);
assertEquals(firstTabId, bottomBar.getCurrentTabId());
assertEquals(bottomBar.findPositionForTabWithId(firstTabId), bottomBar.getCurrentTabPosition());
assertEquals(bottomBar.getTabWithId(firstTabId), bottomBar.getCurrentTab());
int secondTabId = com.roughike.bottombar.test.R.id.tab_nearby;
bottomBar.selectTabWithId(secondTabId);
assertEquals(secondTabId, bottomBar.getCurrentTabId());
assertEquals(bottomBar.findPositionForTabWithId(secondTabId), bottomBar.getCurrentTabPosition());
assertEquals(bottomBar.getTabWithId(secondTabId), bottomBar.getCurrentTab());
int thirdTabId = com.roughike.bottombar.test.R.id.tab_friends;
bottomBar.selectTabWithId(thirdTabId);
assertEquals(thirdTabId, bottomBar.getCurrentTabId());
assertEquals(bottomBar.findPositionForTabWithId(thirdTabId), bottomBar.getCurrentTabPosition());
assertEquals(bottomBar.getTabWithId(thirdTabId), bottomBar.getCurrentTab());
}
@Test
@UiThreadTest
public void whenSelectionChanges_AndHasNoListeners_onlyOneTabIsSelectedAtATime() {
bottomBar.setOnTabSelectListener(null);
bottomBar.setOnTabReselectListener(null);
int firstTabId = com.roughike.bottombar.test.R.id.tab_favorites;
int secondTabId = com.roughike.bottombar.test.R.id.tab_nearby;
int thirdTabId = com.roughike.bottombar.test.R.id.tab_friends;
bottomBar.selectTabWithId(secondTabId);
assertOnlyHasOnlyOneSelectedTabWithId(secondTabId);
bottomBar.selectTabWithId(thirdTabId);
assertOnlyHasOnlyOneSelectedTabWithId(thirdTabId);
bottomBar.selectTabWithId(firstTabId);
assertOnlyHasOnlyOneSelectedTabWithId(firstTabId);
}
private void assertOnlyHasOnlyOneSelectedTabWithId(int tabId) {
for (int i = 0; i < bottomBar.getTabCount(); i++) {
BottomBarTab tab = bottomBar.getTabAtPosition(i);
if (tab.getId() == tabId) {
assertTrue(tab.isActive());
} else {
assertFalse(tab.isActive());
}
}
}
@Test
@UiThreadTest
public void whenTabIsSelectedOnce_AndNoSelectionListenerSet_ReselectionListenerIsNotFired() {
bottomBar.setOnTabSelectListener(null);
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_friends);
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_nearby);
bottomBar.selectTabWithId(com.roughike.bottombar.test.R.id.tab_favorites);
verifyZeroInteractions(reselectListener);
}
@Test
@UiThreadTest
public void whenInActiveAlphaSetProgrammatically_AlphaIsUpdated() {
BottomBarTab inActiveTab = bottomBar.getTabAtPosition(1);
assertNotEquals(bottomBar.getCurrentTab(), inActiveTab);
float previousAlpha = inActiveTab.getInActiveAlpha();
float testAlpha = 0.69f;
assertNotEquals(testAlpha, previousAlpha);
assertNotEquals(testAlpha, inActiveTab.getIconView().getAlpha());
assertNotEquals(testAlpha, inActiveTab.getTitleView().getAlpha());
bottomBar.setInActiveTabAlpha(testAlpha);
assertEquals(testAlpha, inActiveTab.getInActiveAlpha(), 0);
assertEquals(testAlpha, inActiveTab.getIconView().getAlpha(), 0);
assertEquals(testAlpha, inActiveTab.getTitleView().getAlpha(), 0);
}
@Test
@UiThreadTest
public void whenActiveAlphaSetProgrammatically_AlphaIsUpdated() {
BottomBarTab activeTab = bottomBar.getCurrentTab();
float previousAlpha = activeTab.getActiveAlpha();
float testAlpha = 0.69f;
assertNotEquals(testAlpha, previousAlpha);
assertNotEquals(testAlpha, activeTab.getIconView().getAlpha());
assertNotEquals(testAlpha, activeTab.getTitleView().getAlpha());
bottomBar.setActiveTabAlpha(testAlpha);
assertEquals(testAlpha, activeTab.getActiveAlpha(), 0);
assertEquals(testAlpha, activeTab.getIconView().getAlpha(), 0);
assertEquals(testAlpha, activeTab.getTitleView().getAlpha(), 0);
}
@Test
@UiThreadTest
public void whenInActiveColorSetProgrammatically_ColorIsUpdated() {
BottomBarTab inActiveTab = bottomBar.getTabAtPosition(1);
assertNotEquals(bottomBar.getCurrentTab(), inActiveTab);
int previousInActiveColor = inActiveTab.getInActiveColor();
int previousIconColor = inActiveTab.getCurrentDisplayedIconColor();
int previousTitleColor = inActiveTab.getCurrentDisplayedTitleColor();
int testColor = Color.GREEN;
assertNotEquals(testColor, previousInActiveColor);
assertNotEquals(testColor, previousIconColor);
assertNotEquals(testColor, previousTitleColor);
bottomBar.setInActiveTabColor(testColor);
assertEquals(testColor, inActiveTab.getInActiveColor());
assertEquals(testColor, inActiveTab.getCurrentDisplayedIconColor());
assertEquals(testColor, inActiveTab.getCurrentDisplayedTitleColor());
}
@Test
@UiThreadTest
public void whenActiveColorSetProgrammatically_ColorIsUpdated() {
BottomBarTab activeTab = bottomBar.getCurrentTab();
int previousActiveColor = activeTab.getActiveColor();
int previousIconColor = activeTab.getCurrentDisplayedIconColor();
int previousTitleColor = activeTab.getCurrentDisplayedTitleColor();
int testColor = Color.GREEN;
assertNotEquals(testColor, previousActiveColor);
assertNotEquals(testColor, previousIconColor);
assertNotEquals(testColor, previousTitleColor);
bottomBar.setActiveTabColor(testColor);
assertEquals(testColor, activeTab.getActiveColor());
assertEquals(testColor, activeTab.getCurrentDisplayedIconColor());
assertEquals(testColor, activeTab.getCurrentDisplayedTitleColor());
}
@Test
@UiThreadTest
public void whenBadgeBackgroundColorSetProgrammatically_ColorIsUpdated() {
BottomBarTab inActiveTab = bottomBar.getTabAtPosition(1);
inActiveTab.setBadgeCount(3);
int previousBadgeColor = inActiveTab.getBadgeBackgroundColor();
int testColor = Color.GREEN;
assertNotEquals(testColor, previousBadgeColor);
bottomBar.setBadgeBackgroundColor(testColor);
assertEquals(testColor, inActiveTab.getBadgeBackgroundColor());
}
@Test
@UiThreadTest
public void whenTitleTextAppearanceSetProgrammatically_AppearanceUpdated() {
BottomBarTab tab = bottomBar.getCurrentTab();
int testTextApperance = -666;
assertNotEquals(testTextApperance, tab.getTitleTextAppearance());
assertNotEquals(testTextApperance, tab.getCurrentDisplayedTextAppearance());
bottomBar.setTabTitleTextAppearance(testTextApperance);
assertEquals(testTextApperance, tab.getTitleTextAppearance());
assertEquals(testTextApperance, tab.getCurrentDisplayedTextAppearance());
}
@Test
@UiThreadTest
public void whenTitleTypeFaceSetProgrammatically_TypefaceUpdated() {
BottomBarTab tab = bottomBar.getCurrentTab();
Typeface testTypeFace = Typeface.createFromAsset(
bottomBar.getContext().getAssets(), "fonts/GreatVibes-Regular.otf");
assertNotEquals(testTypeFace, tab.getTitleTypeFace());
assertNotEquals(testTypeFace, tab.getTitleView().getTypeface());
bottomBar.setTabTitleTypeface(testTypeFace);
assertEquals(testTypeFace, tab.getTitleTypeFace());
assertEquals(testTypeFace, tab.getTitleView().getTypeface());
}
}
| |
/*
* Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.bpel.core.ode.integration;
import com.hazelcast.core.*;
import org.apache.commons.httpclient.HttpConnectionManager;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.commons.httpclient.params.HttpConnectionManagerParams;
import org.apache.commons.httpclient.util.IdleConnectionTimeoutThread;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.common.evt.DebugBpelEventListener;
import org.apache.ode.bpel.dao.BpelDAOConnectionFactory;
import org.apache.ode.bpel.engine.BpelServerImpl;
import org.apache.ode.bpel.engine.CountLRUDehydrationPolicy;
import org.apache.ode.bpel.engine.cron.CronScheduler;
import org.apache.ode.bpel.extension.ExtensionBundleRuntime;
import org.apache.ode.bpel.extension.ExtensionCorrelationFilter;
import org.apache.ode.bpel.iapi.*;
import org.apache.ode.bpel.intercept.MessageExchangeInterceptor;
import org.apache.ode.bpel.memdao.BpelDAOConnectionFactoryImpl;
import org.apache.ode.il.dbutil.Database;
import org.apache.ode.scheduler.simple.JdbcDelegate;
import org.apache.ode.scheduler.simple.ODECluster;
import org.apache.ode.scheduler.simple.SimpleScheduler;
import org.wso2.carbon.bpel.core.BPELConstants;
import org.wso2.carbon.bpel.core.internal.BPELServerHolder;
import org.wso2.carbon.bpel.core.internal.BPELServiceComponent;
import org.wso2.carbon.bpel.core.ode.integration.config.BPELServerConfiguration;
import org.wso2.carbon.bpel.core.ode.integration.jmx.Instance;
import org.wso2.carbon.bpel.core.ode.integration.jmx.InstanceStatusMonitor;
import org.wso2.carbon.bpel.core.ode.integration.jmx.Processes;
import org.wso2.carbon.bpel.core.ode.integration.store.*;
import org.wso2.carbon.bpel.core.ode.integration.utils.BPELDatabaseCreator;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.MBeanRegistrar;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.NotCompliantMBeanException;
import javax.sql.DataSource;
import javax.transaction.TransactionManager;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
/**
* BPELServer implementation. All the ODE BPEL Engine initialization is handled here.
*/
public final class BPELServerImpl implements BPELServer , Observer{
private static Log log = LogFactory.getLog(BPELServerImpl.class);
/* ODE BPEL Server instance*/
private BpelServerImpl odeBpelServer;
private ProcessStoreImpl processStore;
private TransactionManager transactionManager;
/* For the moment it's look like we don't want multi-threaded http connection manager*/
private MultiThreadedHttpConnectionManager httpConnectionManager;
/* BPEL DAO Connection Factory*/
private BpelDAOConnectionFactory daoConnectionFactory;
/* ODE Database manager */
private Database db;
/* ODE Scheduler */
private Scheduler scheduler;
/* ODE Configuration properties */
private ODEConfigurationProperties odeConfigurationProperties;
private ExecutorService executorService;
private CronScheduler cronScheduler;
private IdleConnectionTimeoutThread idleConnectionTimeoutThread;
/* BPEL Server Configuration */
private BPELServerConfiguration bpelServerConfiguration;
private static BPELServerImpl ourInstance = new BPELServerImpl();
public static BPELServerImpl getInstance() {
return ourInstance;
}
private BPELServerImpl() {
}
/**
* Initialize the ODE BPEL engine.
*
* @throws Exception if failed to start the BPEL engine.
*/
public void init() throws Exception {
bpelServerConfiguration = new BPELServerConfiguration();
odeConfigurationProperties = new ODEConfigurationProperties(bpelServerConfiguration);
if (log.isDebugEnabled()) {
log.debug("Initializing transaction manager");
}
initTransactionManager();
if (log.isDebugEnabled()) {
log.debug("Creating data source");
}
initDataSource();
if (log.isDebugEnabled()) {
log.debug("Starting DAO");
}
initDAO();
BPELEndpointReferenceContextImpl eprContext =
new BPELEndpointReferenceContextImpl();
if (log.isDebugEnabled()) {
log.debug("Initializing BPEL process store");
}
initProcessStore(eprContext);
if (log.isDebugEnabled()) {
log.debug("Initializing BPEL server");
}
initBPELServer(eprContext);
if (log.isDebugEnabled()) {
log.debug("Initializing multithreaded connection manager");
}
initHttpConnectionManager();
/* Register event listeners configured in ode-axis2.properties file*/
registerEventListeners();
/* Register message exchange interceptors configured in ode-axis.properties file*/
registerMexInterceptors();
registerExtensionActivityBundles();
registerExtensionCorrelationFilters();
//registerExtensionActivityBundles();
//registerExternalVariableModules();
try {
odeBpelServer.start();
} catch (Exception e) {
shutdown();
String errMsg = "BPEL Server failed to start.";
log.error(errMsg, e);
throw new Exception(errMsg, e);
}
if(bpelServerConfiguration.getUseDistributedLock() && isAxis2ClusteringEnabled()) {
BPELServerHolder.getInstance().addObserver(this);
if(log.isDebugEnabled()) {
log.debug("Clustering Enabled, Registering Observer for HazelCast service");
}
}
registerMBeans();
}
/**
* Shutdown ODE BPEL Server, schedulers, process store, database connections and
* http connection pools.
*
* @throws Exception if error occurred while shutting down BPEL Server.
*/
public void shutdown() throws Exception {
if (scheduler != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down quartz scheduler.");
}
scheduler.shutdown();
} catch (Exception e) {
log.warn("Scheduler couldn't be shut down.", e);
} finally {
scheduler = null;
}
}
if (odeBpelServer != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down BPEL server.");
}
odeBpelServer.shutdown();
} catch (Exception e) {
log.warn("Error stopping services.", e);
} finally {
odeBpelServer = null;
}
}
if (cronScheduler != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down cron scheduler.");
}
cronScheduler.shutdown();
} catch (Exception e) {
log.warn("Cron scheduler couldn't be shutdown.", e);
} finally {
cronScheduler = null;
}
}
if (processStore != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down process store.");
}
processStore.shutdown();
} catch (Exception e) {
log.warn("Process store could not be shutdown.", e);
} finally {
processStore = null;
}
}
if (daoConnectionFactory != null) {
try {
daoConnectionFactory.shutdown();
} catch (Exception e) {
log.warn("DAO shutdown failed.", e);
} finally {
daoConnectionFactory = null;
}
}
if (db != null) {
try {
db.shutdown();
} catch (Exception e) {
log.warn("DB shutdown failed.", e);
} finally {
db = null;
}
}
if (transactionManager != null) {
transactionManager = null;
}
if (httpConnectionManager != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down HTTP Connection Manager.");
}
httpConnectionManager.shutdown();
} catch (Exception e) {
log.warn("HTTP Connection Manager shutdown failed.");
}
}
if (idleConnectionTimeoutThread != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down Idle Connection Timeout Thread.");
}
idleConnectionTimeoutThread.shutdown();
} catch (Exception e) {
log.warn("Idle connection timeout thread shutdown failed.");
}
}
executorService.shutdown();
log.info("BPEL Server shutdown completed.");
}
/**
* Register BPEL Event listener.
*
* @param eventListenerClass Fully qualified class name of BpelEventListener implementation.
*/
public void registerEventListener(final String eventListenerClass) {
try {
odeBpelServer.registerBpelEventListener(
(BpelEventListener) Class.forName(eventListenerClass).newInstance());
log.info("Registered custom BPEL event listener: " + eventListenerClass);
} catch (Exception e) {
log.warn("Couldn't register the event listener " + eventListenerClass
+ ", the class couldn't loaded properly: ", e);
}
}
/**
* Register ODE message echange interceptor.
*
* @param mexInterceptorClass Fully qualified class name of ODe MexInterceptor implementation
*/
public void registerMessageExchangeInterceptor(final String mexInterceptorClass) {
}
/**
* Get the multi-tenant process store instance of BPEL Server.
*
* @return MultiTenant Process store instance
*/
public MultiTenantProcessStore getMultiTenantProcessStore() {
return processStore;
}
/**
* Get the multi threaded http connection manager to use with external service invocations.
*
* @return HttpConnectionManager instace(multi-threaded implementation).
*/
public HttpConnectionManager getHttpConnectionManager() {
return httpConnectionManager;
}
public BpelServerImpl getODEBPELServer() {
return odeBpelServer;
}
public TransactionManager getTransactionManager() {
return transactionManager;
}
public ODEConfigurationProperties getOdeConfigurationProperties() {
return odeConfigurationProperties;
}
/**
* Initialize the transaction manager.
*
* @throws BPELEngineException If error occured while initializing transaction manager
*/
private void initTransactionManager() throws BPELEngineException {
String txFactoryName = bpelServerConfiguration.getTransactionFactoryClass();
if (log.isDebugEnabled()) {
log.debug("Initializing transaction manager using " + txFactoryName);
}
try {
Class txFactoryClass = this.getClass().getClassLoader().loadClass(txFactoryName);
Object txFactory = txFactoryClass.newInstance();
transactionManager = (TransactionManager) txFactoryClass.
getMethod("getTransactionManager", (Class[]) null).invoke(txFactory);
// Didn't use Debug Transaction manager which used in ODE.
// TODO: Look for the place we use this axis parameter.
//axisConfiguration.addParameter("ode.transaction.manager", transactionManager);
} catch (Exception e) {
log.fatal("Couldn't initialize a transaction manager with factory: "
+ txFactoryName, e);
throw new BPELEngineException("Couldn't initialize a transaction manager with factory: "
+ txFactoryName, e);
}
}
/**
* Initialize the data source.
*
* @throws BPELEngineException If error occured while initializing datasource
*/
private void initDataSource() throws BPELEngineException {
db = new Database(odeConfigurationProperties);
db.setTransactionManager(transactionManager);
if (System.getProperty("setup") != null) {
BPELDatabaseCreator bpelDBCreator;
try {
bpelDBCreator = new BPELDatabaseCreator(
db.<DataSource>lookupInJndi(odeConfigurationProperties.getDbDataSource()));
} catch (Exception e) {
String errMsg = "Error creating BPELDatabaseCreator";
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
if (!bpelDBCreator.isDatabaseStructureCreated("SELECT * FROM ODE_SCHEMA_VERSION")) {
try {
//TODO rename following method
bpelDBCreator.createRegistryDatabase();
} catch (Exception e) {
String errMsg = "Error creating BPEL database";
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
} else {
if (log.isDebugEnabled()) {
log.debug("BPEL database already exists. Using the old database.");
}
}
}
// In carbon, embedded H2 database for ODE is located at CARBON_HOME/repository/database
String dbRoot = CarbonUtils.getCarbonHome() + File.separator + "repository" + File.separator
+ "database";
File dbRootDir = new File(dbRoot);
if (dbRootDir.exists() && dbRootDir.isDirectory()) {
db.setWorkRoot(dbRootDir);
} else {
db.setWorkRoot(null);
}
try {
db.start();
} catch (Exception e) {
String errMsg =
"Error starting database connections, check the database configuration!";
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
}
/**
* Initialize ODE DAO connection factory.
*
* @throws BPELEngineException if DAO connection factory creation fails
*/
private void initDAO() throws BPELEngineException {
log.info("Using DAO Connection Factory class: " +
odeConfigurationProperties.getDAOConnectionFactory());
try {
daoConnectionFactory = db.createDaoCF();
} catch (Exception e) {
String errMsg = "Error instantiating DAO Connection Factory class " +
odeConfigurationProperties.getDAOConnectionFactory();
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
}
/**
* Initialize process store/
*
* @param eprContext Endpoint reference context
* @throws Exception if process store initialization failed
*/
private void initProcessStore(EndpointReferenceContext eprContext) throws Exception {
processStore = new ProcessStoreImpl(eprContext,
db.getDataSource(),
odeConfigurationProperties);
processStore.setLocalBPELDeploymentUnitRepo(new File(CarbonUtils.getCarbonHome() +
File.separator + "repository" + File.separator + "bpel"));
processStore.registerListener(new ProcessStoreListenerImpl());
}
/**
* Init ODE BpelServer.
*
* @param eprContext endpoint reference context.
*/
private void initBPELServer(EndpointReferenceContext eprContext) {
initExecutorService(createThreadFactory());
odeBpelServer = new BpelServerImpl();
setupJobScheduler();
setupCronScheduler();
odeBpelServer.setDaoConnectionFactory(daoConnectionFactory);
odeBpelServer.setInMemDaoConnectionFactory(
new BpelDAOConnectionFactoryImpl(scheduler, odeConfigurationProperties.getInMemMexTtl()));
odeBpelServer.setEndpointReferenceContext(eprContext);
odeBpelServer.setMessageExchangeContext(new BPELMessageExchangeContextImpl());
odeBpelServer.setBindingContext(new BPELBindingContextImpl(this));
odeBpelServer.setScheduler(scheduler);
// TODO: Analyze a way of integrating with lazy loading
activateDehydration();
odeBpelServer.setMigrationTransactionTimeout(
odeConfigurationProperties.getMigrationTransactionTimeout());
odeBpelServer.setConfigProperties(
odeConfigurationProperties.getProperties());
odeBpelServer.init();
odeBpelServer.setInstanceThrottledMaximumCount(
odeConfigurationProperties.getInstanceThrottledMaximumCount());
odeBpelServer.setProcessThrottledMaximumCount(
odeConfigurationProperties.getProcessThrottledMaximumCount());
odeBpelServer.setProcessThrottledMaximumSize(
odeConfigurationProperties.getProcessThrottledMaximumSize());
odeBpelServer.setHydrationLazy(odeConfigurationProperties.isHydrationLazy());
odeBpelServer.setHydrationLazyMinimumSize(
odeConfigurationProperties.getHydrationLazyMinimumSize());
}
/**
* Activate process dehydration.
*/
private void activateDehydration() {
if (bpelServerConfiguration.isProcessDehydrationEnabled()) {
CountLRUDehydrationPolicy dehy = new CountLRUDehydrationPolicy();
if (bpelServerConfiguration.getProcessDehydrationMaxAge() > 0) {
dehy.setProcessMaxAge(bpelServerConfiguration.getProcessDehydrationMaxAge());
if (log.isDebugEnabled()) {
log.debug("Process Max Age: "
+ bpelServerConfiguration.getProcessDehydrationMaxAge());
}
}
if (bpelServerConfiguration.getProcessDehydraionMaxCount() > 0) {
dehy.setProcessMaxCount(bpelServerConfiguration.getProcessDehydraionMaxCount());
if (log.isDebugEnabled()) {
log.debug("Process Max Count: "
+ bpelServerConfiguration.getProcessDehydraionMaxCount());
}
}
odeBpelServer.setDehydrationPolicy(dehy);
log.info("Process Dehydration is activated...");
}
}
/**
* Setting up cron scheduler
*/
private void setupCronScheduler() {
cronScheduler = new CronScheduler();
cronScheduler.setScheduledTaskExec(executorService);
cronScheduler.setContexts(odeBpelServer.getContexts());
odeBpelServer.setCronScheduler(cronScheduler);
cronScheduler.scheduleSystemCronJobs(bpelServerConfiguration.getSystemCleanupCronJobs());
}
private void setupJobScheduler() {
scheduler = createScheduler();
scheduler.setJobProcessor(odeBpelServer);
BpelServerImpl.PolledRunnableProcessor polledRunnableProcessor =
new BpelServerImpl.PolledRunnableProcessor();
polledRunnableProcessor.setPolledRunnableExecutorService(executorService);
polledRunnableProcessor.setContexts(odeBpelServer.getContexts());
scheduler.setPolledRunnableProcesser(polledRunnableProcessor);
}
private Scheduler createScheduler() {
SimpleScheduler simpleScheduler = new SimpleScheduler(bpelServerConfiguration.getNodeId(),
new JdbcDelegate(db.getDataSource()),
odeConfigurationProperties.getProperties());
simpleScheduler.setExecutorService(executorService);
simpleScheduler.setTransactionManager(transactionManager);
return simpleScheduler;
}
private ThreadFactory createThreadFactory() {
return new ThreadFactory() {
private int threadNumber = 0;
public Thread newThread(Runnable r) {
threadNumber += 1;
Thread t = new Thread(r, "BPELServer-" + threadNumber);
t.setDaemon(true);
return t;
}
};
}
private void initExecutorService(ThreadFactory threadFactory) {
if (odeConfigurationProperties.getThreadPoolMaxSize() == 0) {
executorService = Executors.newCachedThreadPool(threadFactory);
} else {
executorService = Executors.newFixedThreadPool(
odeConfigurationProperties.getThreadPoolMaxSize(),
threadFactory);
}
}
private void initHttpConnectionManager() throws Exception {
httpConnectionManager = new MultiThreadedHttpConnectionManager();
int maxConnectionsPerHost = bpelServerConfiguration.getMaxConnectionsPerHost();
int maxTotalConnections = bpelServerConfiguration.getMaxTotalConnections();
if (log.isDebugEnabled()) {
log.debug(HttpConnectionManagerParams.MAX_HOST_CONNECTIONS + "=" + maxConnectionsPerHost);
log.debug(HttpConnectionManagerParams.MAX_TOTAL_CONNECTIONS + "=" + maxTotalConnections);
}
if (maxConnectionsPerHost < 1 || maxTotalConnections < 1) {
String errmsg = HttpConnectionManagerParams.MAX_HOST_CONNECTIONS + " and " +
HttpConnectionManagerParams.MAX_TOTAL_CONNECTIONS
+ " must be positive integers!";
log.error(errmsg);
throw new Exception(errmsg);
}
httpConnectionManager.getParams().setDefaultMaxConnectionsPerHost(maxConnectionsPerHost);
httpConnectionManager.getParams().setMaxTotalConnections(maxTotalConnections);
// TODO: Modify this and move configuration to bps.xml
// Register the connection manager to a idle check thread
idleConnectionTimeoutThread = new IdleConnectionTimeoutThread();
idleConnectionTimeoutThread.setName("Http_Idle_Connection_Timeout_Thread");
long idleConnectionTimeout = Long.parseLong(
odeConfigurationProperties
.getProperty("http.idle.connection.timeout", "30000"));
long idleConnectionCheckInterval = Long.parseLong(
odeConfigurationProperties
.getProperty("http.idle.connection.check.interval", "30000"));
if (log.isDebugEnabled()) {
log.debug("http.idle.connection.timeout=" + idleConnectionTimeout);
log.debug("http.idle.connection.check.interval=" + idleConnectionCheckInterval);
}
idleConnectionTimeoutThread.setConnectionTimeout(idleConnectionTimeout);
idleConnectionTimeoutThread.setTimeoutInterval(idleConnectionCheckInterval);
idleConnectionTimeoutThread.addConnectionManager(httpConnectionManager);
idleConnectionTimeoutThread.start();
}
private void registerEventListeners() {
/* let's always register the debugging listener */
odeBpelServer.registerBpelEventListener(new DebugBpelEventListener());
List<String> eventListeners = bpelServerConfiguration.getEventListeners();
if (!eventListeners.isEmpty()) {
for (String listenerCN : eventListeners) {
try {
odeBpelServer.registerBpelEventListener(
(BpelEventListener) Class.forName(listenerCN).newInstance());
log.info("Registered custom BPEL event listener: " + listenerCN);
} catch (Exception e) {
log.warn("Couldn't register the event listener " + listenerCN
+ ", the class couldn't loaded properly: ", e);
}
}
}
}
private void registerMexInterceptors() {
List<String> mexInterceptors = bpelServerConfiguration.getMexInterceptors();
if (!mexInterceptors.isEmpty()) {
for (String interceptorCN : mexInterceptors) {
try {
odeBpelServer.registerMessageExchangeInterceptor(
(MessageExchangeInterceptor) Class.forName(interceptorCN)
.newInstance());
log.info("Registered message exchange interceptor: " + interceptorCN);
} catch (Exception e) {
log.warn("Couldn't register the message exchange interceptor " + interceptorCN
+ ", the class couldn't be " + "loaded properly.", e);
}
}
}
}
private void registerExtensionActivityBundles() {
try {
log.info("Registering E4X Extension...");
odeBpelServer.registerExtensionBundle((ExtensionBundleRuntime) Class.
forName("org.apache.ode.extension.e4x.JSExtensionBundle").newInstance());
} catch (Exception e) {
log.error("Couldn't register e4x extension bundles runtime.", e);
}
try {
log.info("Registering B4P Extension...");
odeBpelServer.registerExtensionBundle((ExtensionBundleRuntime) Class.
forName("org.wso2.carbon.bpel.b4p.extension.BPEL4PeopleExtensionBundle").newInstance());
} catch (Exception e) {
log.error("Couldn't register B4P extension bundles runtime.", e);
}
//TODO register B4P extension, once it is available
List<String> extensionBundleRuntimes = bpelServerConfiguration.getExtensionBundleRuntimes();
// List<String> extensionBundleValidators = bpelServerConfiguration.getExtensionBundleValidators();
if (extensionBundleRuntimes != null) {
for (String extension : extensionBundleRuntimes) {
try {
// instantiate bundle
ExtensionBundleRuntime bundleRT =
(ExtensionBundleRuntime) Class.forName(extension).newInstance();
// register extension bundle (BPEL server)
odeBpelServer.registerExtensionBundle(bundleRT);
} catch (Exception e) {
log.warn("Couldn't register the extension bundle runtime " + extension +
", the class couldn't be " + "loaded properly.");
}
}
}
//TODO register validators
/*
if (extensionBundleValidators != null) {
Map<QName, ExtensionValidator> validators = new HashMap<QName, ExtensionValidator>();
for (String validator : extensionBundleValidators) {
try {
// instantiate bundle
ExtensionBundleValidation bundleVal =
(ExtensionBundleValidation) Class.forName(validator).newInstance();
//add validators
validators.putAll(bundleVal.getExtensionValidators());
} catch (Exception e) {
log.warn("Couldn't register the extension bundle validator " + validator +
", the class couldn't be " + "loaded properly.");
}
}
// register extension bundle (BPEL store)
store.setExtensionValidators(validators);
}
*/
}
private void registerExtensionCorrelationFilters() {
try {
log.info("Registering B4P Filter...");
odeBpelServer.registerExtensionCorrelationFilter((ExtensionCorrelationFilter) Class.
forName("org.wso2.carbon.bpel.b4p.extension.BPEL4PeopleCorrelationFilter").newInstance());
} catch (Exception e) {
log.error("Couldn't register B4P extension filter.", e);
}
List<String> extensionFilters = bpelServerConfiguration.getExtensionCorrelationFilters();
if (extensionFilters != null) {
// TODO replace StringTokenizer by regex
for (String filter : extensionFilters) {
try {
// instantiate bundle
ExtensionCorrelationFilter filterRT =
(ExtensionCorrelationFilter) Class.forName(filter).newInstance();
// register correlation filter (BPEL server)
odeBpelServer.registerExtensionCorrelationFilter(filterRT);
} catch (Exception e) {
log.warn("Couldn't register the extension correlation filter " + filter + ", the class couldn't be " +
"loaded properly.");
}
}
}
}
private class ProcessStoreListenerImpl implements ProcessStoreListener {
public void onProcessStoreEvent(ProcessStoreEvent processStoreEvent) {
if (log.isDebugEnabled()) {
log.debug("Process store event: " + processStoreEvent);
}
ProcessConf pConf = processStore.getProcessConfiguration(processStoreEvent.pid);
switch (processStoreEvent.type) {
case DEPLOYED:
if (pConf != null) {
/*
* If and only if an old process exists with the same pid,
* the old process is cleaned up. The following line is IMPORTANT and
* used for the case when the deployment and store do not have the
* process while the process itself exists in the BPEL_PROCESS table.
* Notice that the new process is actually created on the 'ACTIVATED'
* event.
*/
odeBpelServer.cleanupProcess(pConf);
}
break;
case ACTIVATED:
// bounce the process
odeBpelServer.unregister(processStoreEvent.pid);
if (pConf != null) {
//odeBpelServer.register(pConf);
try {
odeBpelServer.register(pConf);
} catch (BpelEngineException ex) {
String failureCause = "Process registration failed for:" +
pConf.getProcessId() + ". " + ex.getMessage();
//create DeploymentContext in order to persist the error
int tenantID = processStore.getTenantId(pConf.getProcessId());
String bpelRepoRoot = processStore.getLocalDeploymentUnitRepo().getAbsolutePath();
ProcessConfigurationImpl pConfImpl = (ProcessConfigurationImpl) pConf;
File bpelArchive = new File(pConfImpl.getAbsolutePathForBpelArchive());
BPELDeploymentContext deploymentContext =
new BPELDeploymentContext(tenantID,
bpelRepoRoot, bpelArchive, pConf.getVersion());
deploymentContext.setDeploymentFailureCause(failureCause);
deploymentContext.setStackTrace(ex);
deploymentContext.setFailed(true);
TenantProcessStoreImpl store =
(TenantProcessStoreImpl) processStore.getTenantsProcessStore(tenantID);
try {
store.getBPELPackageRepository().handleBPELPackageDeploymentError(deploymentContext);
} catch (Exception e) {
log.error("Unable to persist the failure cause. Failure: " + failureCause, e);
}
throw ex;
}
} else {
if (log.isDebugEnabled()) {
log.debug("slightly odd:received event " +
processStoreEvent + " for process not in store!");
}
}
break;
case RETIRED:
// are there are instances of this process running?
boolean hasInstances = odeBpelServer.hasActiveInstances(
processStoreEvent.pid);
// Remove the process
odeBpelServer.unregister(processStoreEvent.pid);
// bounce the process if necessary
if (hasInstances) {
if (pConf != null) {
odeBpelServer.register(pConf);
} else {
if (log.isDebugEnabled()) {
log.debug("slightly odd:received event " +
processStoreEvent + " for process not in store!");
}
}
} else {
// we may have potentially created a lot of garbage, so,
// let's hope the garbage collector is configured properly.
if (pConf != null) {
odeBpelServer.cleanupProcess(pConf);
}
}
break;
case DISABLED:
case UNDEPLOYED:
odeBpelServer.unregister(processStoreEvent.pid);
if (pConf != null) {
odeBpelServer.cleanupProcess(pConf);
}
break;
default:
if (log.isDebugEnabled()) {
log.debug("Ignoring store event: " + processStoreEvent);
}
}
if (pConf != null) {
if (processStoreEvent.type == ProcessStoreEvent.Type.UNDEPLOYED) {
if (log.isDebugEnabled()) {
log.debug("Cancelling all cron scheduled jobs on store event: "
+ processStoreEvent);
}
odeBpelServer.getContexts().cronScheduler.cancelProcessCronJobs(
processStoreEvent.pid, true);
}
// Except for undeploy event, we need to re-schedule process dependent jobs
if (log.isDebugEnabled()) {
log.debug("(Re)scheduling cron scheduled jobs on store event: "
+ processStoreEvent);
}
if (processStoreEvent.type != ProcessStoreEvent.Type.UNDEPLOYED) {
odeBpelServer.getContexts().cronScheduler.scheduleProcessCronJobs(
processStoreEvent.pid, pConf);
}
}
}
}
public BPELServerConfiguration getBpelServerConfiguration() {
return bpelServerConfiguration;
}
static class BPELEngineException extends Exception {
public BPELEngineException() {
super();
}
public BPELEngineException(String message) {
super(message);
}
public BPELEngineException(String message, Throwable cause) {
super(message, cause);
}
public BPELEngineException(Throwable cause) {
super(cause);
}
}
public void registerMBeans() throws Exception, MBeanRegistrationException, InstanceAlreadyExistsException, NotCompliantMBeanException {
log.info("Registering MBeans");
Processes processMBean= new Processes();
Instance instanceMBean= new Instance();
InstanceStatusMonitor statusMonitorMBean= InstanceStatusMonitor.getInstanceStatusMonitor();
// ObjectName instanceStatusObjectName= new ObjectName("org.wso2.carbon.bpel.core.ode.integration.jmx:type=InstanceStatusMonitor");
// ObjectName processObjectName= new ObjectName("org.wso2.carbon.bpel.core.ode.integration.jmx:type=Process");
// ObjectName instanceObjectName= new ObjectName("org.wso2.carbon.bpel.core.ode.integration.jmx:type=Instance");
MBeanRegistrar.registerMBean(processMBean,"org.wso2.carbon.bpel.core.ode.integration.jmx:type=Process");
MBeanRegistrar.registerMBean(instanceMBean, "org.wso2.carbon.bpel.core.ode.integration.jmx:type=Instance");
MBeanRegistrar.registerMBean(statusMonitorMBean, "org.wso2.carbon.bpel.core.ode.integration.jmx:type=InstanceStatusMonitor");
}
public Scheduler getScheduler() {
return scheduler;
}
private boolean isAxis2ClusteringEnabled() {
// return BPELServerHolder.getInstance().getConfigCtxService().
// getServerConfigContext().getAxisConfiguration().getClusteringAgent() != null;
return true;
}
public void update(Observable o, Object arg) {
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
if(hazelcastInstance != null) {
String name = hazelcastInstance.getName();
// Set hazelcast instance name as system property
System.setProperty("WSO2_HZ_INSTANCE_NAME", name);
if(bpelServerConfiguration.getUseInstanceStateCache()) {
// set use instance state cache property
System.setProperty("WSO2_USE_STATE_CACHE", "true");
}
odeBpelServer.setHazelcastInstance(hazelcastInstance);
if(log.isInfoEnabled()) {
log.info("Configured HazelCast instance for BPS cluster");
}
// Registering this node in BPS cluster BPS-675.
hazelcastInstance.getCluster().addMembershipListener(new MemberShipListener());
Member localMember = hazelcastInstance.getCluster().getLocalMember();
String localMemberID = getHazelCastNodeID(localMember);
log.info("Registering HZ localMember ID " + localMemberID
+ " as ODE Node ID " + bpelServerConfiguration.getNodeId());
hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP)
.put(localMemberID, bpelServerConfiguration.getNodeId());
}
((SimpleScheduler) scheduler).setCluster(new ODEClusterImpl());
//scheduler.start();
}
/**
* Provides HazelCast node id
* Added to fix BPS-675
*
* @param member
* @return
*/
protected static String getHazelCastNodeID(Member member) {
String hostName = member.getSocketAddress().getHostName();
int port = member.getSocketAddress().getPort();
return hostName + ":" + port;
}
/**
* ODEClusterImpl class is added to fix BPS-675
*/
class ODEClusterImpl implements ODECluster {
@Override
public boolean isClusterEnabled() {
return bpelServerConfiguration.getUseDistributedLock() && isAxis2ClusteringEnabled();
}
/**
* Check whether current node is the leader or not.
* @return boolean
*/
@Override
public boolean isLeader() {
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
Member leader = hazelcastInstance.getCluster().getMembers().iterator().next();
if (leader.localMember()) {
return true;
}
return false;
}
/**
* returns Current BPS Nodes in the cluster.
* @return ODE Node list
*/
@Override
public List<String> getKnownNodes() {
List<String> nodeList = new ArrayList<String>();
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
for (Object s : hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP).keySet()) {
nodeList.add((String) hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP).get(s));
}
return nodeList;
}
}
/**
* MemberShipListener class is added to fix BPS-675
*/
class MemberShipListener implements MembershipListener{
@Override
public void memberAdded(MembershipEvent membershipEvent) {
// Noting to do here.
}
@Override
public void memberRemoved(MembershipEvent membershipEvent) {
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
Member leader = hazelcastInstance.getCluster().getMembers().iterator().next();
// Allow Leader to update distributed map.
if (leader.localMember()) {
String leftMemberID = getHazelCastNodeID(membershipEvent.getMember());
hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP).remove(leftMemberID);
}
}
@Override
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
// Noting to do here.
}
}
}
| |
package net.fastfourier.something;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.FragmentManager;
import android.app.ProgressDialog;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.text.Editable;
import android.text.Html;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.util.Log;
import android.view.ActionMode;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.webkit.WebView;
import android.widget.EditText;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.salvadordalvik.fastlibrary.data.FastQueryTask;
import com.salvadordalvik.fastlibrary.util.FastDateUtils;
import net.fastfourier.something.data.SomeDatabase;
import net.fastfourier.something.request.PMReplyDataRequest;
import net.fastfourier.something.request.PMSendRequest;
import net.fastfourier.something.request.PreviewRequest;
import net.fastfourier.something.request.ReplyDataRequest;
import net.fastfourier.something.request.ReplyPostRequest;
import net.fastfourier.something.request.SomeError;
import java.util.List;
/**
* Created by matthewshepard on 2/10/14.
*/
public class ReplyFragment extends SomeFragment implements DialogInterface.OnCancelListener, TextWatcher, ActionMode.Callback {
private static final int DRAFT_PREVIEW_LENGTH = 100;
private enum BBCODE {BOLD, ITALICS, UNDERLINE, STRIKEOUT, URL, VIDEO, IMAGE, TIMAGE, QUOTE, SPOILER, CODE}
public static final int TYPE_REPLY = 2;
public static final int TYPE_QUOTE = 3;
public static final int TYPE_EDIT = 4;
public static final int TYPE_PM = 5;
public static final int NEW_PM = -1;
private ProgressDialog dialog = null;
private EditText replyContent, replyTitle, replyUsername;
private ActionMode selectionMode;
private int threadId, postId, pmId, replyType;
private String pmUsername;
private boolean sentReply = false;
private ReplyDataRequest.ReplyDataResponse replyData = null;
private PMReplyDataRequest.PMReplyData pmReplyData = null;
public ReplyFragment() {
super(R.layout.reply_fragment, R.menu.post_reply);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Intent intent = getActivity().getIntent();
threadId = intent.getIntExtra("thread_id", 0);
postId = intent.getIntExtra("post_id", 0);
pmId = intent.getIntExtra("pm_id", 0);
replyType = intent.getIntExtra("reply_type", 0);
pmUsername = intent.getStringExtra("pm_username");
switch (replyType){
case TYPE_REPLY:
if(threadId == 0 || postId != 0 || pmId != 0){
throw new IllegalArgumentException("ID MISMATCH");
}
setTitle(getSafeString(R.string.reply_title_reply));
break;
case TYPE_QUOTE:
if(threadId == 0 || postId == 0 || pmId != 0){
throw new IllegalArgumentException("ID MISMATCH");
}
setTitle(getSafeString(R.string.reply_title_reply));
break;
case TYPE_EDIT:
if(threadId != 0 || postId == 0 || pmId != 0){
throw new IllegalArgumentException("ID MISMATCH");
}
setTitle(getSafeString(R.string.reply_title_edit));
break;
case TYPE_PM:
if(threadId != 0 || postId != 0 || pmId == 0){
throw new IllegalArgumentException("ID MISMATCH");
}
setTitle(getSafeString(R.string.reply_title_pm));
break;
default:
throw new IllegalArgumentException("INVALID REPLY TYPE");
}
}
@Override
public void viewCreated(View frag, Bundle savedInstanceState) {
replyContent = (EditText) frag.findViewById(R.id.reply_content);
replyTitle = (EditText) frag.findViewById(R.id.reply_title);
replyUsername = (EditText) frag.findViewById(R.id.reply_username);
replyContent.addTextChangedListener(this);
if(replyType != TYPE_PM){
replyUsername.setVisibility(View.GONE);
replyTitle.setVisibility(View.GONE);
}else{
replyUsername.setVisibility(View.VISIBLE);
replyTitle.setVisibility(View.VISIBLE);
replyUsername.addTextChangedListener(this);
replyTitle.addTextChangedListener(this);
}
replyContent.setCustomSelectionActionModeCallback(this);
startRefresh();
}
@Override
public void onPause() {
super.onPause();
if(shouldSaveDraft()){
if(replyType == TYPE_PM){
if(preparePMData()){
Log.e("ReplyFragment", "save draft "+pmId);
SomeDatabase.getDatabase().insertRows(SomeDatabase.TABLE_SAVED_DRAFT, SQLiteDatabase.CONFLICT_REPLACE, pmReplyData.toContentValues());
}
}else{
if(prepareReplyData()){
SomeDatabase.getDatabase().insertRows(SomeDatabase.TABLE_SAVED_DRAFT, SQLiteDatabase.CONFLICT_REPLACE, replyData.toContentValues());
}
}
}
}
private boolean shouldSaveDraft() {
if(sentReply){
return false;
}
Log.e("ReplyFragment", "shouldSaveDraft "+pmId);
if(replyType == TYPE_PM){
return pmReplyData != null && replyContent.length() > 0 && !replyContent.getText().toString().trim().equalsIgnoreCase(pmReplyData.replyContent.trim());
}else{
return replyData != null && replyContent.length() > 0 && !replyContent.getText().toString().trim().equalsIgnoreCase(replyData.originalContent.trim());
}
}
private void discardDraft(){
switch (replyType){
case TYPE_EDIT:
SomeDatabase.getDatabase().deleteRows(SomeDatabase.TABLE_SAVED_DRAFT, "reply_post_id=? AND reply_type=?", Long.toString(postId), Long.toString(TYPE_EDIT));
break;
case TYPE_REPLY:
case TYPE_QUOTE:
SomeDatabase.getDatabase().deleteRows(SomeDatabase.TABLE_SAVED_DRAFT, "reply_thread_id=? AND reply_type!=?", Long.toString(threadId), Long.toString(TYPE_EDIT));
break;
case TYPE_PM:
SomeDatabase.getDatabase().deleteRows(SomeDatabase.TABLE_SAVED_DRAFT, "reply_post_id=? AND reply_type=?", Long.toString(pmId), Long.toString(TYPE_PM));
break;
}
}
@Override
public void onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
MenuItem reply = menu.findItem(R.id.menu_post_reply);
if(reply != null){
boolean replyEnabled;
if(replyType == TYPE_PM){
replyEnabled = pmReplyData != null
&& replyContent.getText() != null
&& replyContent.getText().toString().trim().length() > 0
&& replyTitle.getText() != null
&& replyTitle.getText().toString().trim().length() > 0
&& replyUsername.getText() != null
&& replyUsername.getText().toString().trim().length() > 0;
}else{
replyEnabled = replyData != null
&& replyContent.getText() != null
&& replyContent.getText().toString().trim().length() > 0;
}
reply.setEnabled(replyEnabled);
}
MenuItem preview = menu.findItem(R.id.menu_preview);
if (preview != null) {
if (replyType == TYPE_PM) {
preview.setEnabled(false);
preview.setVisible(false);
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()){
case R.id.menu_preview:
// TODO: Add preview method
postPreview();
return true;
case R.id.menu_post_reply:
confirmReply();
return true;
case R.id.bbcode_bold:
insertBBCode(BBCODE.BOLD);
return true;
case R.id.bbcode_italics:
insertBBCode(BBCODE.ITALICS);
return true;
case R.id.bbcode_underline:
insertBBCode(BBCODE.UNDERLINE);
return true;
case R.id.bbcode_strikeout:
insertBBCode(BBCODE.STRIKEOUT);
return true;
case R.id.bbcode_url:
insertBBCode(BBCODE.URL);
return true;
case R.id.bbcode_video:
insertBBCode(BBCODE.VIDEO);
return true;
case R.id.bbcode_image:
insertBBCode(BBCODE.IMAGE);
return true;
case R.id.bbcode_timage:
insertBBCode(BBCODE.TIMAGE);
return true;
case R.id.bbcode_quote:
insertBBCode(BBCODE.QUOTE);
return true;
case R.id.bbcode_spoiler:
insertBBCode(BBCODE.SPOILER);
return true;
case R.id.bbcode_code:
insertBBCode(BBCODE.CODE);
return true;
}
return super.onOptionsItemSelected(item);
}
public void insertBBCode(BBCODE code){
insertBBCode(code, -1, -1);
}
public void insertBBCode(BBCODE code, int selectionStart, int selectionEnd){
ClipboardManager cb = (ClipboardManager) getActivity().getSystemService(Context.CLIPBOARD_SERVICE);
if(selectionStart < 0){
//update selection values
selectionStart = replyContent.getSelectionStart();
selectionEnd = replyContent.getSelectionEnd();
}
boolean highlighted = selectionStart != selectionEnd;
String startTag = null;
String endTag = null;
switch(code){
case BOLD:
startTag = "[b]";
endTag = "[/b]";
break;
case ITALICS:
startTag = "[i]";
endTag = "[/i]";
break;
case UNDERLINE:
startTag = "[u]";
endTag = "[/u]";
break;
case STRIKEOUT:
startTag = "[s]";
endTag = "[/s]";
break;
case URL:
String link = null;
ClipData clip = cb.getPrimaryClip();
if(clip != null && clip.getItemCount() > 0){
CharSequence clipText = clip.getItemAt(0).getText();
if(clipText != null){
String text = clipText.toString();
if(text.startsWith("http://") || text.startsWith("https://")){
link = text;
}
}
}
if(link != null){
startTag = "[url="+link+"]";
}else{
startTag = "[url]";
}
endTag = "[/url]";
break;
case QUOTE:
startTag = "[quote]";
endTag = "[/quote]";
break;
case IMAGE:
String imageUrl = null;
ClipData imageClip = cb.getPrimaryClip();
if(imageClip != null && imageClip.getItemCount() > 0){
CharSequence clipText = imageClip.getItemAt(0).getText();
if(clipText != null){
String text = clipText.toString().toLowerCase();
if((text.startsWith("http://") || text.startsWith("https://")) && (text.endsWith(".jpg") || text.endsWith(".gif") || text.endsWith(".png"))){
imageUrl = text;
}
}
}
if(imageUrl != null){
startTag = "[img]"+imageUrl;
}else{
startTag = "[img]";
}
endTag = "[/img]";
break;
case TIMAGE:
String timageUrl = null;
ClipData timageClip = cb.getPrimaryClip();
if(timageClip != null && timageClip.getItemCount() > 0){
CharSequence clipText = timageClip.getItemAt(0).getText();
if(clipText != null){
String text = clipText.toString().toLowerCase();
if((text.startsWith("http://") || text.startsWith("https://")) && (text.endsWith(".jpg") || text.endsWith(".gif") || text.endsWith(".png"))){
timageUrl = text;
}
}
}
if(timageUrl != null){
startTag = "[timg]"+timageUrl;
}else{
startTag = "[timg]";
}
endTag = "[/timg]";
break;
case VIDEO:
startTag = "[video]";
endTag = "[/video]";
break;
case SPOILER:
startTag = "[spoiler]";
endTag = "[/spoiler]";
break;
case CODE:
startTag = "[code]";
endTag = "[/code]";
break;
}
if(replyContent.getEditableText() != null){
if(highlighted){
replyContent.getEditableText().insert(selectionStart, startTag);
replyContent.getEditableText().insert(selectionEnd+startTag.length(), endTag);
replyContent.setSelection(selectionStart+startTag.length());
}else{
replyContent.getEditableText().insert(selectionStart, startTag+endTag);
replyContent.setSelection(selectionStart+startTag.length());
}
}
}
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
selectionMode = mode;
return true;
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
getActivity().getMenuInflater().inflate(R.menu.bbcode_block, menu);
return true;
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
if(item.getItemId() == R.id.bbcode){
showBBCodeMenu();
return true;
}
return false;
}
@Override
public void onDestroyActionMode(ActionMode mode) {
selectionMode = null;
}
private void showBBCodeMenu() {
//we have to dismiss the actionbar, we can't put a submenu in the context actionbar without it auto-closing.
//https://code.google.com/p/android/issues/detail?id=23381
//dismissing the context actionbar kills the selection, so save and reselect.
final int selectionStart = replyContent.getSelectionStart();
final int selectionEnd = replyContent.getSelectionEnd();
if(selectionMode != null){
selectionMode.finish();
selectionMode = null;
}
replyContent.setSelection(selectionStart, selectionEnd);
new AlertDialog.Builder(getActivity()).setTitle(R.string.bbcode).setItems(R.array.bbcode_items, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which){
case 0:
insertBBCode(BBCODE.BOLD, selectionStart, selectionEnd);
break;
case 1:
insertBBCode(BBCODE.ITALICS, selectionStart, selectionEnd);
break;
case 2:
insertBBCode(BBCODE.UNDERLINE, selectionStart, selectionEnd);
break;
case 3:
insertBBCode(BBCODE.STRIKEOUT, selectionStart, selectionEnd);
break;
case 4:
insertBBCode(BBCODE.IMAGE, selectionStart, selectionEnd);
break;
case 5:
insertBBCode(BBCODE.TIMAGE, selectionStart, selectionEnd);
break;
case 6:
insertBBCode(BBCODE.URL, selectionStart, selectionEnd);
break;
case 7:
insertBBCode(BBCODE.VIDEO, selectionStart, selectionEnd);
break;
case 8:
insertBBCode(BBCODE.QUOTE, selectionStart, selectionEnd);
break;
case 9:
insertBBCode(BBCODE.SPOILER, selectionStart, selectionEnd);
break;
case 10:
insertBBCode(BBCODE.CODE, selectionStart, selectionEnd);
break;
}
}
}).show();
}
@Override
public void refreshData(boolean pullToRefresh, boolean staleRefresh) {
dismissDialog();
dialog = ProgressDialog.show(getActivity(), getString(R.string.post_loading), getString(R.string.please_wait), true, false, this);
if(replyType == TYPE_PM){
queueRequest(new PMReplyDataRequest(pmId, pmReplyListener, loadingErrorListener));
}else{
queueRequest(new ReplyDataRequest(threadId, postId, replyType, replyListener, loadingErrorListener));
}
}
private void confirmReply(){
if(getActivity() != null){
int title, confirm;
String message;
switch (replyType){
case TYPE_REPLY:
confirm = R.string.confirm_reply;
title = R.string.reply_confirm_title_reply;
message = getSafeString(R.string.reply_confirm_message_reply) +"\n"+ replyData.threadTitle;
break;
case TYPE_QUOTE:
confirm = R.string.confirm_quote;
title = R.string.reply_confirm_title_quote;
message = getSafeString(R.string.reply_confirm_message_quote) +"\n"+ replyData.threadTitle;
break;
case TYPE_EDIT:
confirm = R.string.confirm_edit;
title = R.string.reply_confirm_title_edit;
message = getSafeString(R.string.reply_confirm_message_edit) +"\n"+ replyData.threadTitle;
break;
case TYPE_PM:
confirm = R.string.confirm_pm;
title = R.string.reply_confirm_title_pm;
message = getSafeString(R.string.reply_confirm_message_pm) +"\n"+replyUsername.getText();
break;
default:
throw new IllegalArgumentException("INVALID REPLY TYPE");
}
new AlertDialog.Builder(getActivity())
.setTitle(title)
.setMessage(message)
.setPositiveButton(confirm,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
postReply();
}
}
)
.setNegativeButton(R.string.cancel, null)
.show();
}
}
// Technically it's "getPreview", but we have to do an HTML post to get a preview...
private void postPreview()
{
dismissDialog();
dialog = ProgressDialog.show(getActivity(), getString(R.string.preview_loading), getString(R.string.please_wait), true, false, this);
if(prepareReplyData())
{
queueRequest(new PreviewRequest(replyData, previewResult, postingErrorListener));
}
else{
//this shouldn't happen, throw and log via bugsense
throw new IllegalArgumentException("MISSING REPLY DATA");
}
}
private void postReply(){
switch (replyType){
case TYPE_REPLY:
case TYPE_QUOTE:
case TYPE_EDIT:
if(prepareReplyData()){
queueRequest(new ReplyPostRequest(replyData, postingResult, postingErrorListener));
dialog = ProgressDialog.show(getActivity(), getSafeString(R.string.posting_title), getSafeString(R.string.posting_message), true, false, this);
}else{
//this shouldn't happen, throw and log via bugsense
throw new IllegalArgumentException("MISSING REPLY DATA");
}
break;
case TYPE_PM:
if(preparePMData()){
queueRequest(new PMSendRequest(pmReplyData, pmSendResult, postingErrorListener));
dialog = ProgressDialog.show(getActivity(), getSafeString(R.string.sending_title), getSafeString(R.string.posting_message), true, false, this);
}else{
//this shouldn't happen, throw and log via bugsense
throw new IllegalArgumentException("MISSING PM REPLY DATA");
}
break;
}
}
private boolean prepareReplyData(){
Editable content = replyContent.getText();
if(replyData != null && content != null && content.length() > 0) {
replyData.replyMessage = content.toString().trim();
return true;
}
return false;
}
private boolean preparePMData(){
Editable content = replyContent.getText();
Editable title = replyTitle.getText();
Editable username = replyUsername.getText();
if(pmReplyData != null && title != null && username != null && content != null && content.length() > 0) {
pmReplyData.replyMessage = content.toString().trim();
pmReplyData.replyUsername = username.toString().trim();
pmReplyData.replyTitle = title.toString().trim();
return true;
}
return false;
}
private Response.Listener<PMSendRequest.PMSendResult> pmSendResult = new Response.Listener<PMSendRequest.PMSendResult>() {
@Override
public void onResponse(PMSendRequest.PMSendResult response) {
sentReply = true;
discardDraft();
dismissDialog();
Activity activity = getActivity();
if(activity != null){
activity.setResult(TYPE_PM);
activity.finish();
}
}
};
private Response.Listener<PreviewRequest.PreviewData> previewResult = new Response.Listener<PreviewRequest.PreviewData>() {
@Override
public void onResponse(PreviewRequest.PreviewData response) {
dismissDialog();
startActivityForResult(
new Intent(getActivity(), PreviewActivity.class)
.putExtra("threadHtml", response.htmlData),
0
);
}
};
private Response.Listener<ReplyPostRequest.ReplyPostResult> postingResult = new Response.Listener<ReplyPostRequest.ReplyPostResult>() {
@Override
public void onResponse(ReplyPostRequest.ReplyPostResult response) {
sentReply = true;
discardDraft();
dismissDialog();
Activity activity = getActivity();
if(activity != null){
activity.setResult(response.jumpPostId, new Intent().putExtra("thread_id", response.jumpThreadId).putExtra("post_id", response.jumpPostId));
activity.finish();
}
}
};
private Response.Listener<PMReplyDataRequest.PMReplyData> pmReplyListener = new Response.Listener<PMReplyDataRequest.PMReplyData>() {
@Override
public void onResponse(PMReplyDataRequest.PMReplyData replyDataResponse) {
pmReplyData = replyDataResponse;
if(replyDataResponse.replyContent != null && replyDataResponse.replyContent.length() > 0){
replyContent.setText("\n\n"+replyDataResponse.replyContent);
replyContent.setSelection(0);
}
if(!TextUtils.isEmpty(replyDataResponse.replyUsername)){
replyUsername.setText(replyDataResponse.replyUsername);
}else if(!TextUtils.isEmpty(pmUsername)){
replyUsername.setText(pmUsername);
}
if(replyDataResponse.replyTitle != null){
replyTitle.setText(replyDataResponse.replyTitle);
}
if(replyUsername.length() > 0 && replyTitle.length() > 0){
replyContent.requestFocusFromTouch();
}else if(replyUsername.length() > 0){
replyTitle.requestFocusFromTouch();
}
dismissDialog();
invalidateOptionsMenu();
querySavedPM();
}
};
private void dismissDialog(){
if(dialog != null){
dialog.dismiss();
dialog = null;
}
}
private Response.Listener<ReplyDataRequest.ReplyDataResponse> replyListener = new Response.Listener<ReplyDataRequest.ReplyDataResponse>() {
@Override
public void onResponse(ReplyDataRequest.ReplyDataResponse replyDataResponse) {
replyData = replyDataResponse;
switch (replyType){
case TYPE_REPLY:
setTitle("Reply: "+replyDataResponse.threadTitle);
break;
case TYPE_QUOTE:
replyContent.setText(replyDataResponse.originalContent +"\n\n");
replyContent.setSelection(replyDataResponse.originalContent.length() + 2);
setTitle("Reply: "+replyDataResponse.threadTitle);
break;
case TYPE_EDIT:
replyContent.setText(replyDataResponse.originalContent +"\n\n");
replyContent.setSelection(replyDataResponse.originalContent.length() + 2);
setTitle("Edit: "+replyDataResponse.threadTitle);
break;
}
dismissDialog();
invalidateOptionsMenu();
if(replyType == TYPE_EDIT){
querySavedEdit();
}else{
querySavedDrafts();
}
}
};
private Response.ErrorListener loadingErrorListener = new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError volleyError) {
dismissDialog();
if(getActivity() != null){
if(volleyError instanceof SomeError){
new AlertDialog.Builder(getActivity())
.setTitle(R.string.post_loading_failed)
.setMessage(volleyError.getMessage())
.setPositiveButton(R.string.button_ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (getActivity() != null) {
getActivity().finish();
}
}
})
.show();
}else{
new AlertDialog.Builder(getActivity())
.setTitle(R.string.post_loading_failed)
.setMessage(R.string.posting_failed_message)
.setPositiveButton(R.string.retry, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
startRefresh();
}
})
.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (getActivity() != null) {
getActivity().finish();
}
}
})
.show();
}
}
}
};
private Response.ErrorListener postingErrorListener = new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError volleyError) {
dismissDialog();
if(getActivity() != null){
if(volleyError instanceof SomeError){
new AlertDialog.Builder(getActivity())
.setTitle(R.string.posting_failed_title)
.setMessage(volleyError.getMessage())
.setPositiveButton(R.string.button_ok, null)
.show();
}else{
new AlertDialog.Builder(getActivity())
.setTitle(R.string.posting_failed_title)
.setMessage(R.string.posting_failed_message)
.setPositiveButton(R.string.retry, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
postReply();
}
})
.setNegativeButton(R.string.cancel, null)
.show();
}
}
}
};
private void querySavedDrafts(){
new FastQueryTask<ReplyDataRequest.ReplyDataResponse>(SomeDatabase.getDatabase(),
new FastQueryTask.QueryResultCallback<ReplyDataRequest.ReplyDataResponse>() {
@Override
public int[] findColumns(Cursor data) {
return FastQueryTask.findColumnIndicies(data, ReplyDataRequest.ReplyDataResponse.COLUMNS);
}
@Override
public void queryResult(List<ReplyDataRequest.ReplyDataResponse> results) {
if(results.size() > 0 && getActivity() != null){
final ReplyDataRequest.ReplyDataResponse draft = results.get(0);
StringBuilder message = new StringBuilder("You have a saved reply:<br/><br/><i>");
if(draft.replyMessage.length() > DRAFT_PREVIEW_LENGTH){
message.append(draft.replyMessage.substring(0, DRAFT_PREVIEW_LENGTH).replaceAll("\\n","<br/>"));
message.append("...");
}else{
message.append(draft.replyMessage.replaceAll("\\n","<br/>"));
}
message.append("</i>");
if(!TextUtils.isEmpty(draft.savedTimestamp)){
message.append("<br/><br/>Saved ");
message.append(FastDateUtils.shortRecentDate(draft.savedTimestamp));
message.append(" ago");
}
new AlertDialog.Builder(getActivity())
.setTitle(getString(R.string.reply_draft_title_reply))
.setMessage(Html.fromHtml(message.toString()))
.setPositiveButton(replyType == TYPE_QUOTE ? "Multiquote" : "Keep", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (replyType == TYPE_QUOTE) {
replyContent.setText(draft.replyMessage+"\n"+replyData.originalContent+"\n\n");
} else if (replyType == TYPE_REPLY) {
replyContent.setText(draft.replyMessage.trim()+"\n\n");
}
replyContent.setSelection(replyContent.length());
}
})
.setNegativeButton("Discard", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
discardDraft();
}
})
.show();
}
}
@Override
public ReplyDataRequest.ReplyDataResponse createItem(Cursor data, int[] columns) {
return new ReplyDataRequest.ReplyDataResponse(data);
}
})
.query(SomeDatabase.TABLE_SAVED_DRAFT, "reply_saved_timestamp DESC", "reply_thread_id=? AND reply_type!=?", Long.toString(threadId), Long.toString(TYPE_EDIT));
}
private void querySavedEdit(){
new FastQueryTask<ReplyDataRequest.ReplyDataResponse>(SomeDatabase.getDatabase(),
new FastQueryTask.QueryResultCallback<ReplyDataRequest.ReplyDataResponse>() {
@Override
public int[] findColumns(Cursor data) {
return FastQueryTask.findColumnIndicies(data, ReplyDataRequest.ReplyDataResponse.COLUMNS);
}
@Override
public void queryResult(List<ReplyDataRequest.ReplyDataResponse> results) {
if(results.size() > 0 && getActivity() != null){
final ReplyDataRequest.ReplyDataResponse draft = results.get(0);
StringBuilder message = new StringBuilder("You have a saved edit:<br/><br/><i>");
if(draft.replyMessage.length() > DRAFT_PREVIEW_LENGTH){
message.append(draft.replyMessage.substring(0, DRAFT_PREVIEW_LENGTH).replaceAll("\\n","<br/>"));
message.append("...");
}else{
message.append(draft.replyMessage.replaceAll("\\n","<br/>"));
}
message.append("</i>");
if(!TextUtils.isEmpty(draft.savedTimestamp)){
message.append("<br/><br/>Saved ");
message.append(FastDateUtils.shortRecentDate(draft.savedTimestamp));
message.append(" ago");
}
new AlertDialog.Builder(getActivity())
.setTitle(getString(R.string.reply_draft_title_edit))
.setMessage(Html.fromHtml(message.toString()))
.setPositiveButton("Keep", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
replyContent.setText(draft.replyMessage);
replyContent.setSelection(draft.replyMessage.length());
}
})
.setNegativeButton("Discard", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
discardDraft();
}
})
.show();
}
}
@Override
public ReplyDataRequest.ReplyDataResponse createItem(Cursor data, int[] columns) {
return new ReplyDataRequest.ReplyDataResponse(data);
}
})
.query(SomeDatabase.TABLE_SAVED_DRAFT, "reply_saved_timestamp DESC", "reply_post_id=? AND reply_type=?", Long.toString(postId), Long.toString(replyType));
}
private void querySavedPM(){
Log.e("ReplyFragment", "querySavedPM "+pmId);
new FastQueryTask<PMReplyDataRequest.PMReplyData>(SomeDatabase.getDatabase(),
new FastQueryTask.QueryResultCallback<PMReplyDataRequest.PMReplyData>() {
@Override
public int[] findColumns(Cursor data) {
return new int[0];
}
@Override
public void queryResult(List<PMReplyDataRequest.PMReplyData> results) {
if(results.size() > 0 && getActivity() != null){
final PMReplyDataRequest.PMReplyData draft = results.get(0);
StringBuilder message = new StringBuilder("You have a saved message:<br/><br/><i>");
if(draft.replyMessage.length() > DRAFT_PREVIEW_LENGTH){
message.append(draft.replyMessage.substring(0, DRAFT_PREVIEW_LENGTH).replaceAll("\\n","<br/>"));
message.append("...");
}else{
message.append(draft.replyMessage.replaceAll("\\n","<br/>"));
}
message.append("</i>");
if(!TextUtils.isEmpty(draft.savedTimestamp)){
message.append("<br/><br/>Saved ");
message.append(FastDateUtils.shortRecentDate(draft.savedTimestamp));
message.append(" ago");
}
new AlertDialog.Builder(getActivity())
.setTitle(getString(R.string.reply_draft_title_pm))
.setMessage(Html.fromHtml(message.toString()))
.setPositiveButton("Keep", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
replyContent.setText(draft.replyMessage);
replyContent.setSelection(draft.replyMessage.length());
replyTitle.setText(draft.replyTitle);
replyUsername.setText(draft.replyUsername);
}
})
.setNegativeButton("Discard", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
discardDraft();
}
})
.show();
}
}
@Override
public PMReplyDataRequest.PMReplyData createItem(Cursor data, int[] columns) {
Log.e("ReplyFragment", "createItem "+pmId);
return new PMReplyDataRequest.PMReplyData(data);
}
})
.query(SomeDatabase.TABLE_SAVED_DRAFT, "reply_saved_timestamp DESC", "reply_post_id=? AND reply_type=?", Long.toString(pmId), Long.toString(TYPE_PM));
}
@Override
public void onCancel(DialogInterface dialog) {
if(this.dialog == dialog){
this.dialog = null;
}
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
invalidateOptionsMenu();
}
@Override
public CharSequence getTitle() {
return "Reply";
}
}
| |
/*
* Copyright (C) 2014 iWedia S.A. Licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.iwedia.activities;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.SharedPreferences;
import android.graphics.PixelFormat;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
import com.iwedia.dtv.DVBManager;
import com.iwedia.dtv.DVBManager.DVBStatus;
import com.iwedia.dtv.IPService;
import com.iwedia.dtv.types.InternalException;
import com.iwedia.epg.R;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
/**
* Parent class off all activities. This class contains connection to dtv
* service through dtv manager object.
*/
public abstract class DTVActivity extends FragmentActivity {
public static final String TAG = "DTVExample-Channel Zapp";
public static final String FINISH_ACTIVITIES_MESSAGE = "activity_finish";
public static final String EXTERNAL_MEDIA_PATH = "/mnt/media/";
public static final String IP_CHANNELS = "ip_service_list.txt";
protected static DTVActivity sInstance = null;
/** List of IP channels. */
public static ArrayList<IPService> sIpChannels = null;
/** DVB manager instance. */
protected DVBManager mDVBManager = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
sInstance = this;
/** Set Full Screen Application. */
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(
WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
getWindow().setFormat(PixelFormat.RGBA_8888);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_DITHER);
getWindow().getDecorView().getBackground().setDither(true);
/** Creates DTV manager object and connects it to service. */
try {
mDVBManager = DVBManager.getInstance();
} catch (InternalException e) {
Log.e(TAG, "There was an error in initializing DVB Manager", e);
finish();
}
mDVBManager.setDVBStatus(mDvbStatusCallBack);
initializeIpChannels();
}
public void finishActivity() {
Toast.makeText(this,
"Error with connection happened, closing application...",
Toast.LENGTH_LONG).show();
super.finish();
}
/**
* Initialize IP.
*/
private void initializeIpChannels() {
copyFile(IP_CHANNELS);
}
/**
* Copy configuration file.
*/
private void copyFile(String filename) {
ContextWrapper contextWrapper = new ContextWrapper(this);
String file = contextWrapper.getFilesDir().getPath() + "/" + filename;
File fl = new File(file);
if (!fl.exists())
copyAssetToData(fl);
}
/**
* Copy configuration file from assets to data folder.
*
* @param strFilename
*/
private void copyAssetToData(File file) {
/** Open your local db as the input stream */
try {
InputStream myInput = getAssets().open(file.getName());
String outFileName = file.getPath();
/** Open the empty db as the output stream. */
OutputStream myOutput = new FileOutputStream(outFileName);
/** Transfer bytes from the inputfile to the outputfile. */
byte[] buffer = new byte[1024];
int length;
while ((length = myInput.read(buffer)) > 0) {
myOutput.write(buffer, 0, length);
}
/** Close the streams. */
myOutput.flush();
myOutput.close();
myInput.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Read the configuration file with built-in application which will be
* displayed in Content list.
*/
public static void readFile(Context ctx, String filePath,
ArrayList<IPService> arrayList) {
File file = new File(filePath);
BufferedReader br;
try {
br = new BufferedReader(new FileReader(file));
String line;
String[] separated = new String[2];
while ((line = br.readLine()) != null) {
separated = line.split("#");
arrayList.add(new IPService(separated[0], separated[1]));
}
br.close();
} catch (IOException e) {
e.printStackTrace();
}
br = null;
}
public void loadIPChannelsFromExternalStorage(
ArrayList<IPService> ipChannels) {
ArrayList<File> ipServiceListFiles = new ArrayList<File>();
File[] storages = new File(EXTERNAL_MEDIA_PATH).listFiles();
if (storages != null) {
/**
* Loop through storages.
*/
for (File storage : storages) {
File[] foundIpFiles = storage.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
if (pathname.getName().equalsIgnoreCase(IP_CHANNELS)) {
return true;
}
return false;
}
});
/**
* Files with given name are found in this array.
*/
if (foundIpFiles != null) {
for (File ip : foundIpFiles) {
ipServiceListFiles.add(ip);
}
}
}
/**
* Loop through found files and add it to IP service list.
*/
for (File ipFile : ipServiceListFiles) {
readFile(this, ipFile.getPath(), ipChannels);
}
/**
* No files found.
*/
if (ipServiceListFiles.size() == 0) {
Toast.makeText(this,
"No files found with name: " + IP_CHANNELS,
Toast.LENGTH_LONG).show();
}
}
}
/**
* DVB CallBack.
*/
private DVBStatus mDvbStatusCallBack = new DVBStatus() {
@Override
public void channelIsScrambled() {
Toast.makeText(getApplicationContext(), R.string.scrambled,
Toast.LENGTH_SHORT).show();
}
};
}
| |
package com.planet_ink.coffee_web.server;
import java.util.*;
import java.util.concurrent.*;
import java.util.logging.Level;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.*;
import java.nio.channels.*;
import javax.net.ssl.SSLContext;
import com.planet_ink.coffee_web.interfaces.HTTPIOHandler;
import com.planet_ink.coffee_web.interfaces.MimeConverterManager;
import com.planet_ink.coffee_web.http.HTTPHeader;
import com.planet_ink.coffee_web.http.HTTPReader;
import com.planet_ink.coffee_web.http.HTTPReqProcessor;
import com.planet_ink.coffee_web.http.HTTPSReader;
import com.planet_ink.coffee_web.http.FileCache;
import com.planet_ink.coffee_web.http.MimeConverter;
import com.planet_ink.coffee_web.http.ServletManager;
import com.planet_ink.coffee_web.http.SessionManager;
import com.planet_ink.coffee_mud.core.Log;
import com.planet_ink.coffee_web.util.RunWrap;
import com.planet_ink.coffee_web.util.CWThread;
import com.planet_ink.coffee_web.util.CWThreadExecutor;
import com.planet_ink.coffee_web.util.CWConfig;
/*
Copyright 2012-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Both the main() kickoff for the coffeewebserver AND the main thread class for the same.
* This class handles all the socket listening and request management/timeout.
* When requests are received, they are passed off to an HTTPReader for processing,
* though a separate thread may timeout those readers at its discretion and shut
* them down.
* @author Bo Zimmerman
*
*/
public class WebServer extends Thread
{
public static final String NAME = "CoffeeWebServer";
public static final String POMVERSION = "2.4";
public static double VERSION;
static
{
try
{
VERSION = Double.parseDouble(POMVERSION);
}
catch (final Exception e)
{
VERSION = 0.0;
}
}
private volatile boolean shutdownRequested = false;// notice of external shutdown request
private volatile String lastErrorMsg = ""; // spam prevention for error reporting
private Selector servSelector = null; // server io selector
private final CWThreadExecutor executor; // request handler thread pool
private Thread timeoutThread = null; // thread to timeout connected but idle channels
private final CWConfig config; // list of all standard http ports to listen on
private SSLContext sslContext;
private final String serverName; // a friendly name for this server instance
private final LinkedList<HTTPIOHandler> handlers; // list of connected channels.. managed by timeoutthread
private final Map<ServerSocketChannel, Boolean> servChannels; // map of socket channels to an SSL boolean
private final LinkedList<Runnable> registerOps;
public WebServer(final String serverName, final CWConfig config)
{
super("cweb-"+serverName);
this.config=config;
this.serverName=serverName;
// setup the thread pool
handlers = new LinkedList<HTTPIOHandler>();
executor = new CWThreadExecutor(serverName,
config,
config.getCoreThreadPoolSize(), config.getMaxThreadPoolSize(), config.getMaxThreadIdleMs(),
TimeUnit.MILLISECONDS, config.getMaxThreadTimeoutSecs(), config.getMaxThreadQueueSize());
servChannels = new HashMap<ServerSocketChannel, Boolean>();
registerOps = new LinkedList<Runnable>();
setDaemon(true);
// if we are going to be listening on ssl, generate a global ssl context to use
if((config.getHttpsListenPorts()==null)
||(config.getHttpsListenPorts().length==0))
sslContext=null;
else
sslContext=HTTPSReader.generateNewContext(config);
}
/**
* Returns the version of this web server
* @return the version
*/
public final String getVersion()
{
return Double.toString(VERSION);
}
/**
* Return list of threads that have timed out according to server settings.
* @return a collection of runnables
*/
public List<Runnable> getOverdueThreads()
{
final Collection<RunWrap> wraps=executor.getTimeoutOutRuns(Integer.MAX_VALUE);
final Vector<Runnable> overDueV=new Vector<Runnable>(wraps.size());
for(final RunWrap wrap : wraps)
overDueV.add(wrap.getRunnable());
return overDueV;
}
/**
* Open a single web server listening sockets and
* register a selector for accepting connections.
* @param listenPort the port to listen on
* @throws IOException
*/
private void openChannel(final int listenPort, final Boolean isSSL) throws IOException
{
final ServerSocketChannel servChan = ServerSocketChannel.open();
final ServerSocket serverSocket = servChan.socket();
serverSocket.bind (new InetSocketAddress (listenPort));
servChan.configureBlocking (false);
servChan.register (servSelector, SelectionKey.OP_ACCEPT);
servChannels.put(servChan, isSSL);
config.getLogger().info("Started "+(isSSL.booleanValue()?"ssl ":"http ")+serverName+" on port "+listenPort);
}
/**
* Open the main web server listening sockets and
* register a selector for accepting connections.
* @throws IOException
*/
private void openChannels() throws IOException
{
servSelector = Selector.open();
boolean portOpen=false;
IOException lastException=null;
for(final int listenPort : config.getHttpListenPorts())
{
try
{
openChannel(listenPort, Boolean.FALSE);
portOpen=true;
}
catch(final IOException e)
{
lastException=e;
config.getLogger().severe("Port "+listenPort+": "+e.getMessage());
}
}
if(sslContext != null)
{
for(final int listenPort : config.getHttpsListenPorts())
{
try
{
openChannel(listenPort, Boolean.TRUE);
portOpen=true;
}
catch(final IOException e)
{
lastException=e;
config.getLogger().severe("Port "+listenPort+": "+e.getMessage());
}
}
}
if((!portOpen)&&(lastException!=null))
throw lastException;
}
/**
* Initialize the timeoutthread. It wakes up every second to look for timed out connections
* It does this by calling a local final method to scan the open connections.
* TODO: an executor thread pool that allows both scheduled and timeoutable entries would
* have allowed us to run this process on the pool. Since that's not supported atm...
*/
private void startTimeoutThread()
{
timeoutThread=new Thread(Thread.currentThread().getThreadGroup(),getName()+"Timeout")
{
@Override
public void run()
{
try
{
config.getLogger().finer("Timeout Thread started");
while(!shutdownRequested)
{
Thread.sleep(1000);
try
{
timeOutStrayHandlers();
config.getSessions().cleanUpSessions();
}
catch(final Exception e)
{
if(!lastErrorMsg.equals(e.toString()) && (e.toString()!=null))
{
config.getLogger().log(Level.SEVERE, e.toString(), e);
lastErrorMsg = e.toString();
}
else
config.getLogger().severe(e.toString());
}
}
}
catch (final InterruptedException e)
{
}
finally
{
config.getLogger().info( "Timeout Thread shutdown");
}
}
};
timeoutThread.start();
}
/**
* Handles a particular channel event from its given selectionkey.
* So far, only accepted connections and readable keys are managed here.
* @param key the channel event key
* @throws IOException
*/
private void handleSelectionKey(final SelectionKey key) throws IOException
{
if (key.isAcceptable()) // a connection was made, so add the handler
{
final ServerSocketChannel server = (ServerSocketChannel) key.channel();
final SocketChannel channel = server.accept();
if (channel != null)
{
HTTPIOHandler handler;
if(servChannels.get(server).booleanValue())
handler=new HTTPSReader(this, channel, sslContext);
else
handler=new HTTPReader(this, channel);
channel.configureBlocking (false);
channel.register (servSelector, SelectionKey.OP_READ, handler);
synchronized(handlers) // synched because you can't iterate and modify, and because its a linkedlist
{
handlers.add(handler);
}
}
}
if(key.isReadable() // bytes were received on one of the channel .. so read!
|| (((key.interestOps() & SelectionKey.OP_WRITE)==SelectionKey.OP_WRITE) && key.isWritable()))
{
final HTTPIOHandler handler = (HTTPIOHandler)key.attachment();
//config.getLogger().finer("Read/Write: "+handler.getName());
try
{
if(!handler.isCloseable())
{
if(key.isValid())
{
try
{
key.interestOps(key.interestOps() & ~SelectionKey.OP_WRITE);
executor.execute(handler);
}
catch(final CancelledKeyException x)
{
synchronized(handlers) // synched because you can't iterate and modify, and because its a linkedlist
{
handlers.remove(handler);
}
}
}
}
else
{
key.cancel();
synchronized(handlers) // synched because you can't iterate and modify, and because its a linkedlist
{
handlers.remove(handler);
}
}
}
catch(final Exception e)
{
config.getLogger().log(Level.SEVERE, e.getMessage(), e);
}
}
else
if(key.attachment() instanceof HTTPIOHandler)
{
final HTTPIOHandler handler = (HTTPIOHandler)key.attachment();
config.getLogger().finer("Rejected handler key for "+handler.getName());
}
}
/**
* Scan the list of active channel connections for any that are timed out,
* or otherwise need to be removed from this list. If found, do so, and
* queue up for actual closing (if necc). After the handlers are all
* scanned, then close any that need closing.
*/
private final void timeOutStrayHandlers()
{
List<HTTPIOHandler> handlersToShutDown = null;
synchronized(handlers)
{
// remove any stray handlers from time to time
if(handlers.size() == 0)
return;
final Iterator<HTTPIOHandler> i;
try
{
i=handlers.iterator();
}
catch(final java.lang.IndexOutOfBoundsException x)
{
handlers.clear();
throw x;
}
for(; i.hasNext(); )
{
try
{
final HTTPIOHandler handler=i.next();
if(handler.isCloseable())
{
if(handlersToShutDown == null)
{
handlersToShutDown = new LinkedList<HTTPIOHandler>();
}
handlersToShutDown.add(handler);
i.remove();
}
}
catch(final NullPointerException e)
{
try
{
i.remove();
}
catch (final Exception xe)
{
}
}
}
}
if(handlersToShutDown != null)
{
for(final HTTPIOHandler handler : handlersToShutDown)
{
handler.closeAndWait();
}
}
}
/**
* The main web server loop
* It blocks on its selector waiting for either accepted connections,
* or data to be read, which it then farms out to another thread.
* This is repeats until something external requests it to shut down.
*/
@Override
public void run()
{
try
{
openChannels(); // open the socket channel
startTimeoutThread(); // start the channel timeout thread
}
catch(final IOException e)
{
config.getLogger().throwing("", "", e); // this is also fatal
close();
return;
}
while (!shutdownRequested)
{
try
{
final int n = servSelector.select();
synchronized(registerOps)
{
while(!registerOps.isEmpty())
{
final Runnable registerOp=registerOps.removeFirst();
registerOp.run();
}
}
if (n == 0)
{
continue;
}
final Iterator<SelectionKey> it = servSelector.selectedKeys().iterator();
while (it.hasNext())
{
final SelectionKey key = it.next();
try
{
handleSelectionKey(key);
}
finally
{
it.remove();
}
}
}
catch(final CancelledKeyException t)
{
// ignore
}
catch(final IOException e)
{
config.getLogger().severe(e.getMessage());
}
catch(final Exception e)
{
config.getLogger().throwing("","",e);
}
}
close();
config.getLogger().info("Shutdown complete");
}
/**
* Called either internally, or can be called externally to shutdown this
* server instance. Closes all the channels and cleans up any stray
* activity.
*/
public void close()
{
shutdownRequested=true;
executor.shutdown();
try
{
servSelector.close();
}
catch (final Exception e)
{
} // ugh, why can't there be an "i don't care" exception syntax in java
for(final ServerSocketChannel servChan : servChannels.keySet())
{
try
{
servChan.close();
}
catch (final Exception e)
{
}
}
if(!executor.isShutdown())
{
try
{
executor.awaitTermination(10, TimeUnit.SECONDS);
}
catch (final InterruptedException e)
{
executor.shutdownNow();
}
}
if(timeoutThread != null)
{
timeoutThread.interrupt();
}
synchronized(handlers)
{
for(final HTTPIOHandler handler : handlers)
{
try
{
handler.closeAndWait();
}
catch (final Exception e)
{
}
}
handlers.clear();
}
}
/**
* Enqueue a new socket channel to be registered for read notifications.
* Does not do the action at once, but will, soon.
* @param channel the socket channel to register
* @param handler the handler to handle it.
*/
public void registerNewHandler(final SocketChannel channel, final HTTPIOHandler handler)
{
synchronized(this.registerOps)
{
final Selector servSelector=this.servSelector;
this.registerOps.add(new Runnable()
{
@Override
public void run()
{
try
{
channel.configureBlocking (false);
channel.register (servSelector, SelectionKey.OP_READ, handler);
synchronized(handlers) // synched because you can't iterate and modify, and because its a linkedlist
{
handlers.add(handler);
}
}
catch (final Exception e)
{
config.getLogger().throwing("", "", e);
}
}
});
servSelector.wakeup();
}
}
/**
* Enqueue a new socket channel to be registered for read notifications.
* Does not do the action at once, but will, soon.
* @param channel the socket channel to register
* @param newOp the new operations for this channel
*/
public void registerChannelInterest(final SocketChannel channel, final int newOp)
{
synchronized(this.registerOps)
{
final Selector servSelector=this.servSelector;
this.registerOps.add(new Runnable()
{
@Override
public void run()
{
final SelectionKey key = channel.keyFor(servSelector);
if(key != null)
{
key.interestOps(newOp);
}
}
});
servSelector.wakeup();
}
}
/**
* Return the configuration for this web server instance
* @return the config
*/
public CWConfig getConfig()
{
return config;
}
/**
* Create, Initialize, load, and create a web server configuration based around the given
* ini filename and the given java logger.
* @param log the java logger to use
* @param iniInputStream the ini data to load further settings from
* @return a populated configuration object to create a server from
*/
public static CWConfig createConfig(final java.util.logging.Logger log, final InputStream iniInputStream) throws IOException
{
final CWConfig config=new CWConfig();
return initConfig(config,log,iniInputStream);
}
/**
* Initialize, load, and create a web server configuration based around the given
* ini filename and the given java logger.
* @param log the java logger to use
* @param iniInputStream the ini data to load further settings from
* @return a populated configuration object to create a server from
*/
public static CWConfig initConfig(final CWConfig config, final java.util.logging.Logger log, final InputStream iniInputStream) throws IOException
{
config.setLogger(log);
final Properties props=new Properties();
props.load(iniInputStream);
config.load(props);
final ServletManager servletsManager = new ServletManager(config);
final SessionManager sessionsManager = new SessionManager(config);
final FileCache fileCacheManager = new FileCache(config,config.getFileManager());
final MimeConverterManager mimeConverterManager = new MimeConverter(config);
final HTTPReqProcessor fileGetter = new HTTPReqProcessor(config);
config.setSessions(sessionsManager);
config.setServletMan(servletsManager);
config.setFileCache(fileCacheManager);
config.setConverters(mimeConverterManager);
config.setFileGetter(fileGetter);
HTTPHeader.Common.setKeepAliveHeader(HTTPHeader.Common.KEEP_ALIVE.makeLine(
String.format(HTTPHeader.Common.KEEP_ALIVE_FMT,
Integer.valueOf((int)(config.getRequestMaxIdleMs()/1000)),
Integer.valueOf(config.getRequestMaxPerConn()))));
return config;
}
/**
* Good olde main. It does nothing but initialize logging, spawn a new web server
* and then join its thread until it is gone. I suppose I could just create the
* web server and call run() on it, but somehow this feels better.
* @param args As no external configuration is permitted, no args are accepted
*/
public static void main(final String[] args)
{
Log.instance().configureLogFile("web", 2);
String debug="OFF";
String iniFilename="coffeeweb.ini";
for(final String arg : args)
{
if(arg.startsWith("BOOT="))
iniFilename=arg.substring(5);
}
CWConfig config;
try
{
config=WebServer.createConfig(Log.instance(), new FileInputStream(iniFilename));
}
catch (final Exception e)
{
e.printStackTrace();
System.exit(-1);
return; // an unhit operation, but my ide is argueing with me over it.
}
debug=config.getDebugFlag();
for(final String arg : args)
{
if(arg.equalsIgnoreCase("DEBUG"))
{
debug="BOTH";
config.setDebugFlag(debug);
}
}
Log.instance().configureLog(Log.Type.info, "BOTH");
Log.instance().configureLog(Log.Type.error, "BOTH");
Log.instance().configureLog(Log.Type.warning, "BOTH");
Log.instance().configureLog(Log.Type.debug, debug);
Log.instance().configureLog(Log.Type.access, config.getAccessLogFlag());
config.getLogger().info("Starting "+NAME+" "+VERSION);
final WebServer server = new WebServer("server", config);
config.setCoffeeWebServer(server);
final Thread t = new CWThread(config, server, NAME);
t.start();
try
{
t.join();
}
catch(final InterruptedException e)
{
e.printStackTrace(System.err);
}
}
}
| |
/**
* The MIT License
* Copyright (c) 2003 David G Jones
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package info.dgjones.abora.white.collection.tables;
import info.dgjones.abora.white.collection.steppers.ArrayAccumulator;
import info.dgjones.abora.white.collection.steppers.Stepper;
import info.dgjones.abora.white.collection.steppers.TableAccumulator;
import info.dgjones.abora.white.collection.steppers.TableStepper;
import info.dgjones.abora.white.exception.AboraRuntimeException;
import info.dgjones.abora.white.rcvr.Rcvr;
import info.dgjones.abora.white.spaces.basic.CoordinateSpace;
import info.dgjones.abora.white.spaces.basic.Dsp;
import info.dgjones.abora.white.spaces.basic.OrderSpec;
import info.dgjones.abora.white.spaces.basic.Position;
import info.dgjones.abora.white.spaces.basic.XnRegion;
import info.dgjones.abora.white.spaces.integers.IntegerPos;
import info.dgjones.abora.white.spaces.integers.IntegerSpace;
import info.dgjones.abora.white.value.IntegerValue;
import info.dgjones.abora.white.xpp.basic.Heaper;
/**
* The class XuArray is intended to model zero-based arrays with integer keys (indices).
* This makes them like the array primitive in C and C++. There is an additional constraint,
* which is they are to have simple domains. Therefore they should not be constructed with
* non-contiguous sections. This is not currently enforced. Given that it is enforced, an
* XuArray with count N would have as its domain exactly the integers from 0 to N-1.
* <p>
* There is some controversy over whether XuArray should be a type and enforce this contraint
* (by BLASTing if an attempt is made to violate the constraint), or whether XuArray is just
* a specialized implementation for when an IntegerTable happens to meet this constraint; in
* which case it should "become" a more general implementation when an attempt is made to
* violate the constraint (see "Type Safe Become"). In the latter case, XuArray will
* probably be made a private class as well. Please give us your opinion.
* XuArray provides no additional protocol.
*/
public abstract class MuArray extends IntegerTable {
// protected static Signal MustBeContiguousDomainSignal;
/*
udanax-top.st:49108:
IntegerTable subclass: #MuArray
instanceVariableNames: ''
classVariableNames: 'MustBeContiguousDomainSignal {Signal smalltalk} '
poolDictionaries: ''
category: 'Xanadu-Collection-Tables'!
*/
/*
udanax-top.st:49112:
MuArray comment:
'The class XuArray is intended to model zero-based arrays with integer keys (indices).
This makes them like the array primitive in C and C++. There is an additional constraint, which is they are to have simple domains. Therefore they should not be constructed with non-contiguous sections. This is not currently enforced. Given that it is enforced, an XuArray with count N would have as its domain exactly the integers from 0 to N-1.
There is some controversy over whether XuArray should be a type and enforce this contraint (by BLASTing if an attempt is made to violate the constraint), or whether XuArray is just a specialized implementation for when an IntegerTable happens to meet this constraint; in which case it should "become" a more general implementation when an attempt is made to violate the constraint (see "Type Safe Become"). In the latter case, XuArray will probably be made a private class as well. Please give us your opinion.
XuArray provides no additional protocol.'!
*/
/*
udanax-top.st:49120:
(MuArray getOrMakeCxxClassDescription)
attributes: ((Set new) add: #DEFERRED; yourself)!
*/
/*
udanax-top.st:49243:
MuArray class
instanceVariableNames: ''!
*/
/*
udanax-top.st:49246:
(MuArray getOrMakeCxxClassDescription)
attributes: ((Set new) add: #DEFERRED; yourself)!
*/
/////////////////////////////////////////////
// Constructors
protected MuArray() {
super();
}
protected MuArray(Rcvr rcvr) {
super(rcvr);
}
/////////////////////////////////////////////
// Static Factory Methods
/**
* A new empty XnArray
*/
public static MuArray array() {
return (MuArray) MuArray.make(IntegerValue.one());
/*
udanax-top.st:49251:MuArray class methodsFor: 'creation'!
{MuArray INLINE} array
"A new empty XnArray"
^MuArray make.IntegerVar: 1!
*/
}
/**
* A new XnArray initialized with a single element, 'obj0', stored at index 0.
*/
public static MuArray array(Heaper obj0) {
MuArray table = (MuArray) MuArray.make(IntegerValue.one());
table.atIntStore(IntegerValue.zero(), obj0);
return table;
/*
udanax-top.st:49256:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper}
"A new XnArray initialized with a single element, 'obj0', stored at index 0."
| table {MuArray} |
table _ MuArray make.IntegerVar: 1.
table atInt: IntegerVar0 store: obj0.
^table!
*/
}
/**
* A new XnArray initialized with a two elements stored at indicies 0 and 1.
*/
public static MuArray array(Heaper obj0, Heaper obj1) {
MuArray table = (MuArray) MuArray.make(IntegerValue.make(2));
table.atIntStore(IntegerValue.zero(), obj0);
table.atIntStore(IntegerValue.one(), obj1);
return table;
/*
udanax-top.st:49264:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper} with: obj1 {Heaper}
"A new XnArray initialized with a two elements stored at indicies 0 and 1."
| table {MuArray} |
table _ MuArray make.IntegerVar: 2.
table atInt: IntegerVar0 store: obj0.
table atInt: 1 store: obj1.
^table!
*/
}
/**
* A new XuArray initialized with a three elements stored at indicies 0, 1, and 2.
*/
public static MuArray array(Heaper obj0, Heaper obj1, Heaper obj2) {
MuArray table = (MuArray) MuArray.make(IntegerValue.make(3));
table.atIntStore(IntegerValue.zero(), obj0);
table.atIntStore(IntegerValue.one(), obj1);
table.atIntStore(IntegerValue.make(2), obj2);
return table;
/*
udanax-top.st:49273:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper} with: obj1 {Heaper} with: obj2 {Heaper}
"A new XuArray initialized with a three elements stored at indicies 0, 1, and 2."
| table {MuArray} |
table _ MuArray make.IntegerVar: 3.
table atInt: IntegerVar0 store: obj0.
table atInt: 1 store: obj1.
table atInt: 2 store: obj2.
^table!
*/
}
/**
* A new XuArray initialized with a four elements stored at indicies 0 through 3.
*/
public static MuArray array(Heaper obj0, Heaper obj1, Heaper obj2, Heaper obj3) {
MuArray table = (MuArray) MuArray.make(IntegerValue.make(4));
table.atIntStore(IntegerValue.zero(), obj0);
table.atIntStore(IntegerValue.one(), obj1);
table.atIntStore(IntegerValue.make(2), obj2);
table.atIntStore(IntegerValue.make(3), obj3);
return table;
/*
udanax-top.st:49283:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper}
with: obj1 {Heaper}
with: obj2 {Heaper}
with: obj3 {Heaper}
"A new XuArray initialized with a four elements stored at indicies 0 through 3."
| table {MuArray} |
table _ MuArray make.IntegerVar: 4.
table atInt: IntegerVar0 store: obj0.
table atInt: 1 store: obj1.
table atInt: 2 store: obj2.
table atInt: 3 store: obj3.
^table!
*/
}
/**
* Returns an Accumulator which will produce an XuArray of the elements
* accumulated into it in order of accumulation. See XuArray. Equivalent to
* 'tableAccumulator()'. Eventually either he or I should be declared obsolete.
*/
public static TableAccumulator arrayAccumulator() {
return ArrayAccumulator.make(MuArray.array());
/*
udanax-top.st:49297:MuArray class methodsFor: 'creation'!
{TableAccumulator} arrayAccumulator
"Returns an Accumulator which will produce an XuArray of the elements
accumulated into it in order of accumulation. See XuArray. Equivalent to
'tableAccumulator()'. Eventually either he or I should be declared obsolete."
^ ArrayAccumulator make: MuArray array!
*/
}
/**
* An accumulator which will accumulate by appending elements onto the end of
* 'onArray'. It is an error for anyone else to modify 'onArray' between creating
* this accumulator and accumulating into it. acc->value() will return 'onArray'
* itself.
*/
public static TableAccumulator arrayAccumulator(MuArray onArray) {
return ArrayAccumulator.make(onArray);
/*
udanax-top.st:49304:MuArray class methodsFor: 'creation'!
{TableAccumulator} arrayAccumulator: onArray {MuArray}
"An accumulator which will accumulate by appending elements onto the end of
'onArray'. It is an error for anyone else to modify 'onArray' between creating
this accumulator and accumulating into it. acc->value() will return 'onArray'
itself."
^ArrayAccumulator make: onArray!
*/
}
/**
* 'someSize' is a hint about how big we should expect the array to need to grow.
*/
public static IntegerTable make(IntegerValue someSize) {
return new ActualArray(someSize);
/*
udanax-top.st:49312:MuArray class methodsFor: 'creation'!
make.IntegerVar: someSize {IntegerVar}
"'someSize' is a hint about how big we should expect the array to need to grow."
^ActualArray create.IntegerVar: someSize!
*/
}
/**
* The resulting ScruTable is a view onto 'array'. It is a view in which each key
* is offset by 'dsp' from where it is in 'array'. By saying it is a view, we mean
* that as 'array' is modified, the view tracks the changes.
*/
public static ScruTable offsetScruArray(MuArray array, Dsp dsp) {
return OffsetScruArray.make(array, dsp);
/*
udanax-top.st:49317:MuArray class methodsFor: 'creation'!
{ScruTable} offsetScruArray: array {MuArray} with: dsp {Dsp}
"The resulting ScruTable is a view onto 'array'. It is a view in which each key
is offset by 'dsp' from where it is in 'array'. By saying it is a view, we mean
that as 'array' is modified, the view tracks the changes."
^OffsetScruArray make: array with: dsp!
*/
}
/////////////////////////////////////////////
// Accessing
public abstract Heaper atIntStore(IntegerValue key, Heaper value);
/*
udanax-top.st:49125:MuArray methodsFor: 'accessing'!
{Heaper} atInt: key {IntegerVar} store: value {Heaper}
self subclassResponsibility!
*/
public abstract CoordinateSpace coordinateSpace();
/*
udanax-top.st:49129:MuArray methodsFor: 'accessing'!
{CoordinateSpace} coordinateSpace
^ IntegerSpace make!
*/
public abstract IntegerValue count();
/*
udanax-top.st:49133:MuArray methodsFor: 'accessing'!
{IntegerVar} count
self subclassResponsibility.!
*/
public abstract XnRegion domain();
/*
udanax-top.st:49137:MuArray methodsFor: 'accessing'!
{XnRegion} domain
self subclassResponsibility.!
*/
public abstract IntegerValue highestIndex();
/*
udanax-top.st:49141:MuArray methodsFor: 'accessing'!
{IntegerVar} highestIndex
self subclassResponsibility!
*/
public abstract Heaper intFetch(IntegerValue key);
/*
udanax-top.st:49144:MuArray methodsFor: 'accessing'!
{Heaper} intFetch: key {IntegerVar}
self subclassResponsibility!
*/
public abstract boolean intWipe(IntegerValue anIdx);
/*
udanax-top.st:49147:MuArray methodsFor: 'accessing'!
{BooleanVar} intWipe: anIdx {IntegerVar}
self subclassResponsibility!
*/
public abstract IntegerValue lowestIndex();
/*
udanax-top.st:49150:MuArray methodsFor: 'accessing'!
{IntegerVar} lowestIndex
self subclassResponsibility!
*/
/**
* Return a table which contains the elements from start to stop, starting at firstIndex.
* Zero-based subclasses will blast if firstIndex is non-zero
*/
public ScruTable offsetSubTableBetween(IntegerValue startIndex, IntegerValue stopIndex, IntegerValue firstIndex) {
return subTableBetween(startIndex, stopIndex);
/*
udanax-top.st:49153:MuArray methodsFor: 'accessing'!
{ScruTable} offsetSubTableBetween: startIndex {IntegerVar}
with: stopIndex {IntegerVar}
with: firstIndex {IntegerVar unused}
"Return a table which contains the elements from start to stop, starting at firstIndex.
Zero-based subclasses will blast if firstIndex is non-zero"
^ self subTableBetween: startIndex with: stopIndex!
*/
}
public abstract ScruTable subTable(XnRegion region);
/*
udanax-top.st:49161:MuArray methodsFor: 'accessing'!
{ScruTable} subTable: region {XnRegion}
self subclassResponsibility!
*/
public abstract ScruTable subTableBetween(IntegerValue startLoc, IntegerValue endLoc);
/*
udanax-top.st:49165:MuArray methodsFor: 'accessing'!
{ScruTable} subTableBetween: startLoc {IntegerVar} with: endLoc {IntegerVar}
self subclassResponsibility!
*/
public ScruTable transformedBy(Dsp dsp) {
if (dsp.inverse().isEqual(dsp)) {
return this;
} else {
return MuArray.offsetScruArray(this, dsp);
}
/*
udanax-top.st:49168:MuArray methodsFor: 'accessing'!
{ScruTable} transformedBy: dsp {Dsp}
(dsp inverse isEqual: dsp)
ifTrue: [^self]
ifFalse: [^MuArray offsetScruArray: self with: dsp]!
*/
}
/////////////////////////////////////////////
// Creation
public abstract ScruTable copy();
/*
udanax-top.st:49176:MuArray methodsFor: 'creation'!
{ScruTable} copy
self subclassResponsibility!
*/
public abstract ScruTable emptySize(IntegerValue size);
/*
udanax-top.st:49179:MuArray methodsFor: 'creation'!
{ScruTable} emptySize: size {IntegerVar}
self subclassResponsibility!
*/
/////////////////////////////////////////////
// Testing
public boolean includesIntKey(IntegerValue aKey) {
return aKey.isGE(IntegerValue.zero()) && (aKey.isLT(count()));
/*
udanax-top.st:49185:MuArray methodsFor: 'testing'!
{BooleanVar} includesIntKey: aKey {IntegerVar}
^aKey >= IntegerVar0 and: [aKey < self count]!
*/
}
public boolean isEmpty() {
return count().isEqual(IntegerValue.zero());
/*
udanax-top.st:49188:MuArray methodsFor: 'testing'!
{BooleanVar} isEmpty
^self count = IntegerVar0!
*/
}
/////////////////////////////////////////////
// Runs
public abstract XnRegion runAtInt(IntegerValue key);
/*
udanax-top.st:49193:MuArray methodsFor: 'runs'!
{XnRegion} runAtInt: key {IntegerVar}
self subclassResponsibility!
*/
/////////////////////////////////////////////
// Enumerating
/**
* Return a stepper on this table.
*/
public abstract TableStepper stepper(OrderSpec order);
/*
udanax-top.st:49198:MuArray methodsFor: 'enumerating'!
{TableStepper} stepper: order {OrderSpec default: NULL}
"Return a stepper on this table."
self subclassResponsibility!
*/
public Heaper theOne() {
if (!count().equals(IntegerValue.one())) {
throw new AboraRuntimeException(AboraRuntimeException.NOT_ONE_ELEMENT);
}
return intFetch(IntegerValue.zero());
/*
udanax-top.st:49203:MuArray methodsFor: 'enumerating'!
{Heaper} theOne
self count ~~ 1 ifTrue:
[ Heaper BLAST: #NotOneElement ].
^ self intFetch: IntegerVar0!
*/
}
/////////////////////////////////////////////
// Bulk Operations
/**
* I 'wipe' from myself all associations whose key
* is in 'region'. See MuTable::wipe
*/
public void wipeAll(XnRegion region) {
if (!(region.coordinateSpace().isEqual(coordinateSpace()))) {
throw new AboraRuntimeException(AboraRuntimeException.WRONG_COORD_SPACE);
}
if (isEmpty()) {
return;
}
if (!region.isSimple()) {
throw new AboraRuntimeException(AboraRuntimeException.NOT_SIMPLE);
}
Stepper stepper = ((region.intersect(domain())).stepper((IntegerSpace.make().getDescending())));
try {
IntegerPos p;
while ((p = (IntegerPos) stepper.fetch()) != null) {
intWipe(p.asIntegerVar());
stepper.step();
}
} finally {
stepper.destroy();
}
/*
udanax-top.st:49210:MuArray methodsFor: 'bulk operations'!
{void} wipeAll: region {XnRegion}
"I 'wipe' from myself all associations whose key
is in 'region'. See MuTable::wipe"
(region coordinateSpace isEqual: self coordinateSpace)
ifFalse: [Heaper BLAST: #WrongCoordSpace].
self isEmpty ifTrue: [^VOID].
region isSimple ifFalse: [Heaper BLAST: #NotSimple].
((region intersect: self domain)
stepper: (IntegerSpace make getDescending))
forEach: [:p {IntegerPos} | self intWipe: p asIntegerVar]!
*/
}
public Heaper atStore(Position key, Heaper value) {
return atIntStore(((IntegerPos) key).asIntegerVar(), value);
/*
udanax-top.st:49224:MuArray methodsFor: 'overload junk'!
{Heaper} at: key {Position} store: value {Heaper}
^ self atInt: (key cast: IntegerPos) asIntegerVar store: value!
*/
}
public Heaper fetch(Position key) {
return intFetch((((IntegerPos) key).asIntegerVar()));
/*
udanax-top.st:49228:MuArray methodsFor: 'overload junk'!
{Heaper} fetch: key {Position}
^ self intFetch: ((key cast: IntegerPos) asIntegerVar)!
*/
}
public boolean includesKey(Position aKey) {
return includesIntKey((((IntegerPos) aKey).asIntegerVar()));
/*
udanax-top.st:49232:MuArray methodsFor: 'overload junk'!
{BooleanVar} includesKey: aKey {Position}
^self includesIntKey: ((aKey cast: IntegerPos) asIntegerVar)!
*/
}
public XnRegion runAt(Position key) {
return runAtInt((((IntegerPos) key).asIntegerVar()));
/*
udanax-top.st:49235:MuArray methodsFor: 'overload junk'!
{XnRegion} runAt: key {Position}
^self runAtInt: ((key quickCast: IntegerPos) asIntegerVar)!
*/
}
public boolean wipe(Position key) {
return intWipe((((IntegerPos) key).asIntegerVar()));
/*
udanax-top.st:49239:MuArray methodsFor: 'overload junk'!
{BooleanVar} wipe: key {Position}
^ self intWipe: ((key cast: IntegerPos) asIntegerVar)!
*/
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.jmx;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.Set;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanConstructorInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.MBeanNotificationInfo;
import javax.management.MBeanOperationInfo;
import javax.management.MBeanParameterInfo;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.management.modelmbean.ModelMBean;
import org.eclipse.jetty.util.LazyList;
import org.eclipse.jetty.util.Loader;
import org.eclipse.jetty.util.TypeUtil;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
/* ------------------------------------------------------------ */
/** ObjectMBean.
* A dynamic MBean that can wrap an arbitary Object instance.
* the attributes and methods exposed by this bean are controlled by
* the merge of property bundles discovered by names related to all
* superclasses and all superinterfaces.
*
* Attributes and methods exported may be "Object" and must exist on the
* wrapped object, or "MBean" and must exist on a subclass of OBjectMBean
* or "MObject" which exists on the wrapped object, but whose values are
* converted to MBean object names.
*
*/
public class ObjectMBean implements DynamicMBean
{
private static final Logger LOG = Log.getLogger(ObjectMBean.class);
private static Class[] OBJ_ARG = new Class[]{Object.class};
protected Object _managed;
private MBeanInfo _info;
private Map _getters=new HashMap();
private Map _setters=new HashMap();
private Map _methods=new HashMap();
private Set _convert=new HashSet();
private ClassLoader _loader;
private MBeanContainer _mbeanContainer;
private static String OBJECT_NAME_CLASS = ObjectName.class.getName();
private static String OBJECT_NAME_ARRAY_CLASS = ObjectName[].class.getName();
/* ------------------------------------------------------------ */
/**
* Create MBean for Object. Attempts to create an MBean for the object by searching the package
* and class name space. For example an object of the type
*
* <PRE>
* class com.acme.MyClass extends com.acme.util.BaseClass implements com.acme.Iface
* </PRE>
*
* Then this method would look for the following classes:
* <UL>
* <LI>com.acme.jmx.MyClassMBean
* <LI>com.acme.util.jmx.BaseClassMBean
* <LI>org.eclipse.jetty.jmx.ObjectMBean
* </UL>
*
* @param o The object
* @return A new instance of an MBean for the object or null.
*/
public static Object mbeanFor(Object o)
{
try
{
Class oClass = o.getClass();
Object mbean = null;
while (mbean == null && oClass != null)
{
String pName = oClass.getPackage().getName();
String cName = oClass.getName().substring(pName.length() + 1);
String mName = pName + ".jmx." + cName + "MBean";
try
{
Class mClass = (Object.class.equals(oClass))?oClass=ObjectMBean.class:Loader.loadClass(oClass,mName,true);
if (LOG.isDebugEnabled())
LOG.debug("mbeanFor " + o + " mClass=" + mClass);
try
{
Constructor constructor = mClass.getConstructor(OBJ_ARG);
mbean=constructor.newInstance(new Object[]{o});
}
catch(Exception e)
{
LOG.ignore(e);
if (ModelMBean.class.isAssignableFrom(mClass))
{
mbean=mClass.newInstance();
((ModelMBean)mbean).setManagedResource(o, "objectReference");
}
}
if (LOG.isDebugEnabled())
LOG.debug("mbeanFor " + o + " is " + mbean);
return mbean;
}
catch (ClassNotFoundException e)
{
// The code below was modified to fix bugs 332200 and JETTY-1416
// The issue was caused by additional information added to the
// message after the class name when running in Apache Felix,
// as well as before the class name when running in JBoss.
if (e.getMessage().contains(mName))
LOG.ignore(e);
else
LOG.warn(e);
}
catch (Error e)
{
LOG.warn(e);
mbean = null;
}
catch (Exception e)
{
LOG.warn(e);
mbean = null;
}
oClass = oClass.getSuperclass();
}
}
catch (Exception e)
{
LOG.ignore(e);
}
return null;
}
public ObjectMBean(Object managedObject)
{
_managed = managedObject;
_loader = Thread.currentThread().getContextClassLoader();
}
public Object getManagedObject()
{
return _managed;
}
public ObjectName getObjectName()
{
return null;
}
public String getObjectContextBasis()
{
return null;
}
public String getObjectNameBasis()
{
return null;
}
protected void setMBeanContainer(MBeanContainer container)
{
this._mbeanContainer = container;
}
public MBeanContainer getMBeanContainer ()
{
return this._mbeanContainer;
}
public MBeanInfo getMBeanInfo()
{
try
{
if (_info==null)
{
// Start with blank lazy lists attributes etc.
String desc=null;
Object attributes=null;
Object constructors=null;
Object operations=null;
Object notifications=null;
// Find list of classes that can influence the mbean
Class o_class=_managed.getClass();
Object influences = findInfluences(null, _managed.getClass());
// Set to record defined items
Set defined=new HashSet();
// For each influence
for (int i=0;i<LazyList.size(influences);i++)
{
Class oClass = (Class)LazyList.get(influences, i);
// look for a bundle defining methods
if (Object.class.equals(oClass))
oClass=ObjectMBean.class;
String pName = oClass.getPackage().getName();
String cName = oClass.getName().substring(pName.length() + 1);
String rName = pName.replace('.', '/') + "/jmx/" + cName+"-mbean";
try
{
LOG.debug(rName);
ResourceBundle bundle = Loader.getResourceBundle(o_class, rName,true,Locale.getDefault());
// Extract meta data from bundle
Enumeration e = bundle.getKeys();
while (e.hasMoreElements())
{
String key = (String)e.nextElement();
String value = bundle.getString(key);
// Determin if key is for mbean , attribute or for operation
if (key.equals(cName))
{
// set the mbean description
if (desc==null)
desc=value;
}
else if (key.indexOf('(')>0)
{
// define an operation
if (!defined.contains(key) && key.indexOf('[')<0)
{
defined.add(key);
operations=LazyList.add(operations,defineOperation(key, value, bundle));
}
}
else
{
// define an attribute
if (!defined.contains(key))
{
defined.add(key);
MBeanAttributeInfo info=defineAttribute(key, value);
if (info!=null)
attributes=LazyList.add(attributes,info);
}
}
}
}
catch(MissingResourceException e)
{
LOG.ignore(e);
}
}
_info = new MBeanInfo(o_class.getName(),
desc,
(MBeanAttributeInfo[])LazyList.toArray(attributes, MBeanAttributeInfo.class),
(MBeanConstructorInfo[])LazyList.toArray(constructors, MBeanConstructorInfo.class),
(MBeanOperationInfo[])LazyList.toArray(operations, MBeanOperationInfo.class),
(MBeanNotificationInfo[])LazyList.toArray(notifications, MBeanNotificationInfo.class));
}
}
catch(RuntimeException e)
{
LOG.warn(e);
throw e;
}
return _info;
}
/* ------------------------------------------------------------ */
public Object getAttribute(String name) throws AttributeNotFoundException, MBeanException, ReflectionException
{
Method getter = (Method) _getters.get(name);
if (getter == null)
throw new AttributeNotFoundException(name);
try
{
Object o = _managed;
if (getter.getDeclaringClass().isInstance(this))
o = this; // mbean method
// get the attribute
Object r=getter.invoke(o, (java.lang.Object[]) null);
// convert to ObjectName if need be.
if (r!=null && _convert.contains(name))
{
if (r.getClass().isArray())
{
ObjectName[] on = new ObjectName[Array.getLength(r)];
for (int i=0;i<on.length;i++)
on[i]=_mbeanContainer.findMBean(Array.get(r, i));
r=on;
}
else if (r instanceof Collection<?>)
{
Collection<Object> c = (Collection<Object>)r;
ObjectName[] on = new ObjectName[c.size()];
int i=0;
for (Object obj :c)
on[i++]=_mbeanContainer.findMBean(obj);
r=on;
}
else
{
ObjectName mbean = _mbeanContainer.findMBean(r);
if (mbean==null)
return null;
r=mbean;
}
}
return r;
}
catch (IllegalAccessException e)
{
LOG.warn(Log.EXCEPTION, e);
throw new AttributeNotFoundException(e.toString());
}
catch (InvocationTargetException e)
{
LOG.warn(Log.EXCEPTION, e);
throw new ReflectionException(new Exception(e.getCause()));
}
}
/* ------------------------------------------------------------ */
public AttributeList getAttributes(String[] names)
{
AttributeList results = new AttributeList(names.length);
for (int i = 0; i < names.length; i++)
{
try
{
results.add(new Attribute(names[i], getAttribute(names[i])));
}
catch (Exception e)
{
LOG.warn(Log.EXCEPTION, e);
}
}
return results;
}
/* ------------------------------------------------------------ */
public void setAttribute(Attribute attr) throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException
{
if (attr == null)
return;
if (LOG.isDebugEnabled())
LOG.debug("setAttribute " + _managed + ":" +attr.getName() + "=" + attr.getValue());
Method setter = (Method) _setters.get(attr.getName());
if (setter == null)
throw new AttributeNotFoundException(attr.getName());
try
{
Object o = _managed;
if (setter.getDeclaringClass().isInstance(this))
o = this;
// get the value
Object value = attr.getValue();
// convert from ObjectName if need be
if (value!=null && _convert.contains(attr.getName()))
{
if (value.getClass().isArray())
{
Class t=setter.getParameterTypes()[0].getComponentType();
Object na = Array.newInstance(t,Array.getLength(value));
for (int i=Array.getLength(value);i-->0;)
Array.set(na, i, _mbeanContainer.findBean((ObjectName)Array.get(value, i)));
value=na;
}
else
value=_mbeanContainer.findBean((ObjectName)value);
}
// do the setting
setter.invoke(o, new Object[]{ value });
}
catch (IllegalAccessException e)
{
LOG.warn(Log.EXCEPTION, e);
throw new AttributeNotFoundException(e.toString());
}
catch (InvocationTargetException e)
{
LOG.warn(Log.EXCEPTION, e);
throw new ReflectionException(new Exception(e.getCause()));
}
}
/* ------------------------------------------------------------ */
public AttributeList setAttributes(AttributeList attrs)
{
LOG.debug("setAttributes");
AttributeList results = new AttributeList(attrs.size());
Iterator iter = attrs.iterator();
while (iter.hasNext())
{
try
{
Attribute attr = (Attribute) iter.next();
setAttribute(attr);
results.add(new Attribute(attr.getName(), getAttribute(attr.getName())));
}
catch (Exception e)
{
LOG.warn(Log.EXCEPTION, e);
}
}
return results;
}
/* ------------------------------------------------------------ */
public Object invoke(String name, Object[] params, String[] signature) throws MBeanException, ReflectionException
{
if (LOG.isDebugEnabled())
LOG.debug("invoke " + name);
String methodKey = name + "(";
if (signature != null)
for (int i = 0; i < signature.length; i++)
methodKey += (i > 0 ? "," : "") + signature[i];
methodKey += ")";
ClassLoader old_loader=Thread.currentThread().getContextClassLoader();
try
{
Thread.currentThread().setContextClassLoader(_loader);
Method method = (Method) _methods.get(methodKey);
if (method == null)
throw new NoSuchMethodException(methodKey);
Object o = _managed;
if (method.getDeclaringClass().isInstance(this))
o = this;
return method.invoke(o, params);
}
catch (NoSuchMethodException e)
{
LOG.warn(Log.EXCEPTION, e);
throw new ReflectionException(e);
}
catch (IllegalAccessException e)
{
LOG.warn(Log.EXCEPTION, e);
throw new MBeanException(e);
}
catch (InvocationTargetException e)
{
LOG.warn(Log.EXCEPTION, e);
throw new ReflectionException(new Exception(e.getCause()));
}
finally
{
Thread.currentThread().setContextClassLoader(old_loader);
}
}
private static Object findInfluences(Object influences, Class aClass)
{
if (aClass!=null)
{
// This class is an influence
influences=LazyList.add(influences,aClass);
// So are the super classes
influences=findInfluences(influences,aClass.getSuperclass());
// So are the interfaces
Class[] ifs = aClass.getInterfaces();
for (int i=0;ifs!=null && i<ifs.length;i++)
influences=findInfluences(influences,ifs[i]);
}
return influences;
}
/* ------------------------------------------------------------ */
/**
* Define an attribute on the managed object. The meta data is defined by looking for standard
* getter and setter methods. Descriptions are obtained with a call to findDescription with the
* attribute name.
*
* @param name
* @param metaData "description" or "access:description" or "type:access:description" where type is
* one of: <ul>
* <li>"Object" The field/method is on the managed object.
* <li>"MBean" The field/method is on the mbean proxy object
* <li>"MObject" The field/method is on the managed object and value should be converted to MBean reference
* <li>"MMBean" The field/method is on the mbean proxy object and value should be converted to MBean reference
* </ul>
* the access is either "RW" or "RO".
*/
public MBeanAttributeInfo defineAttribute(String name, String metaData)
{
String description = "";
boolean writable = true;
boolean onMBean = false;
boolean convert = false;
if (metaData!= null)
{
String[] tokens = metaData.split(":", 3);
for (int t=0;t<tokens.length-1;t++)
{
tokens[t]=tokens[t].trim();
if ("RO".equals(tokens[t]))
writable=false;
else
{
onMBean=("MMBean".equalsIgnoreCase(tokens[t]) || "MBean".equalsIgnoreCase(tokens[t]));
convert=("MMBean".equalsIgnoreCase(tokens[t]) || "MObject".equalsIgnoreCase(tokens[t]));
}
}
description=tokens[tokens.length-1];
}
String uName = name.substring(0, 1).toUpperCase(Locale.ENGLISH) + name.substring(1);
Class oClass = onMBean ? this.getClass() : _managed.getClass();
if (LOG.isDebugEnabled())
LOG.debug("defineAttribute "+name+" "+onMBean+":"+writable+":"+oClass+":"+description);
Class type = null;
Method getter = null;
Method setter = null;
Method[] methods = oClass.getMethods();
for (int m = 0; m < methods.length; m++)
{
if ((methods[m].getModifiers() & Modifier.PUBLIC) == 0)
continue;
// Look for a getter
if (methods[m].getName().equals("get" + uName) && methods[m].getParameterTypes().length == 0)
{
if (getter != null)
{
LOG.warn("Multiple mbean getters for attr " + name+ " in "+oClass);
continue;
}
getter = methods[m];
if (type != null && !type.equals(methods[m].getReturnType()))
{
LOG.warn("Type conflict for mbean attr " + name+ " in "+oClass);
continue;
}
type = methods[m].getReturnType();
}
// Look for an is getter
if (methods[m].getName().equals("is" + uName) && methods[m].getParameterTypes().length == 0)
{
if (getter != null)
{
LOG.warn("Multiple mbean getters for attr " + name+ " in "+oClass);
continue;
}
getter = methods[m];
if (type != null && !type.equals(methods[m].getReturnType()))
{
LOG.warn("Type conflict for mbean attr " + name+ " in "+oClass);
continue;
}
type = methods[m].getReturnType();
}
// look for a setter
if (writable && methods[m].getName().equals("set" + uName) && methods[m].getParameterTypes().length == 1)
{
if (setter != null)
{
LOG.warn("Multiple setters for mbean attr " + name+ " in "+oClass);
continue;
}
setter = methods[m];
if (type != null && !type.equals(methods[m].getParameterTypes()[0]))
{
LOG.warn("Type conflict for mbean attr " + name+ " in "+oClass);
continue;
}
type = methods[m].getParameterTypes()[0];
}
}
if (convert)
{
if (type==null)
{
LOG.warn("No mbean type for " + name+" on "+_managed.getClass());
return null;
}
if (type.isPrimitive() && !type.isArray())
{
LOG.warn("Cannot convert mbean primative " + name);
return null;
}
}
if (getter == null && setter == null)
{
LOG.warn("No mbean getter or setters found for " + name+ " in "+oClass);
return null;
}
try
{
// Remember the methods
_getters.put(name, getter);
_setters.put(name, setter);
MBeanAttributeInfo info=null;
if (convert)
{
_convert.add(name);
if (type.isArray())
info= new MBeanAttributeInfo(name,OBJECT_NAME_ARRAY_CLASS,description,getter!=null,setter!=null,getter!=null&&getter.getName().startsWith("is"));
else
info= new MBeanAttributeInfo(name,OBJECT_NAME_CLASS,description,getter!=null,setter!=null,getter!=null&&getter.getName().startsWith("is"));
}
else
info= new MBeanAttributeInfo(name,description,getter,setter);
return info;
}
catch (Exception e)
{
LOG.warn(name+": "+metaData, e);
throw new IllegalArgumentException(e.toString());
}
}
/* ------------------------------------------------------------ */
/**
* Define an operation on the managed object. Defines an operation with parameters. Refection is
* used to determine find the method and it's return type. The description of the method is
* found with a call to findDescription on "name(signature)". The name and description of each
* parameter is found with a call to findDescription with "name(signature)[n]", the returned
* description is for the last parameter of the partial signature and is assumed to start with
* the parameter name, followed by a colon.
*
* @param metaData "description" or "impact:description" or "type:impact:description", type is
* the "Object","MBean", "MMBean" or "MObject" to indicate the method is on the object, the MBean or on the
* object but converted to an MBean reference, and impact is either "ACTION","INFO","ACTION_INFO" or "UNKNOWN".
*/
private MBeanOperationInfo defineOperation(String signature, String metaData, ResourceBundle bundle)
{
String[] tokens=metaData.split(":",3);
int i=tokens.length-1;
String description=tokens[i--];
String impact_name = i<0?"UNKNOWN":tokens[i--].trim();
if (i==0)
tokens[0]=tokens[0].trim();
boolean onMBean= i==0 && ("MBean".equalsIgnoreCase(tokens[0])||"MMBean".equalsIgnoreCase(tokens[0]));
boolean convert= i==0 && ("MObject".equalsIgnoreCase(tokens[0])||"MMBean".equalsIgnoreCase(tokens[0]));
if (LOG.isDebugEnabled())
LOG.debug("defineOperation "+signature+" "+onMBean+":"+impact_name+":"+description);
Class oClass = onMBean ? this.getClass() : _managed.getClass();
try
{
// Resolve the impact
int impact=MBeanOperationInfo.UNKNOWN;
if (impact_name==null || impact_name.equals("UNKNOWN"))
impact=MBeanOperationInfo.UNKNOWN;
else if (impact_name.equals("ACTION"))
impact=MBeanOperationInfo.ACTION;
else if (impact_name.equals("INFO"))
impact=MBeanOperationInfo.INFO;
else if (impact_name.equals("ACTION_INFO"))
impact=MBeanOperationInfo.ACTION_INFO;
else
LOG.warn("Unknown impact '"+impact_name+"' for "+signature);
// split the signature
String[] parts=signature.split("[\\(\\)]");
String method_name=parts[0];
String arguments=parts.length==2?parts[1]:null;
String[] args=arguments==null?new String[0]:arguments.split(" *, *");
// Check types and normalize signature.
Class[] types = new Class[args.length];
MBeanParameterInfo[] pInfo = new MBeanParameterInfo[args.length];
signature=method_name;
for (i = 0; i < args.length; i++)
{
Class type = TypeUtil.fromName(args[i]);
if (type == null)
type = Thread.currentThread().getContextClassLoader().loadClass(args[i]);
types[i] = type;
args[i] = type.isPrimitive() ? TypeUtil.toName(type) : args[i];
signature+=(i>0?",":"(")+args[i];
}
signature+=(i>0?")":"()");
// Build param infos
for (i = 0; i < args.length; i++)
{
String param_desc = bundle.getString(signature + "[" + i + "]");
parts=param_desc.split(" *: *",2);
if (LOG.isDebugEnabled())
LOG.debug(parts[0]+": "+parts[1]);
pInfo[i] = new MBeanParameterInfo(parts[0].trim(), args[i], parts[1].trim());
}
// build the operation info
Method method = oClass.getMethod(method_name, types);
Class returnClass = method.getReturnType();
_methods.put(signature, method);
if (convert)
_convert.add(signature);
return new MBeanOperationInfo(method_name, description, pInfo, returnClass.isPrimitive() ? TypeUtil.toName(returnClass) : (returnClass.getName()), impact);
}
catch (Exception e)
{
LOG.warn("Operation '"+signature+"'", e);
throw new IllegalArgumentException(e.toString());
}
}
}
| |
/**
* Copyright 2019 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.cloud.azure;
import com.github.ambry.config.CloudConfig;
import com.github.ambry.config.Config;
import com.github.ambry.config.Default;
import com.github.ambry.config.VerifiableProperties;
/**
* The configs for cloud related configurations.
*/
public class AzureCloudConfig {
public static final String AZURE_STORAGE_CONNECTION_STRING = "azure.storage.connection.string";
public static final String COSMOS_ENDPOINT = "cosmos.endpoint";
public static final String COSMOS_COLLECTION_LINK = "cosmos.collection.link";
public static final String COSMOS_DELETED_CONTAINER_COLLECTION_LINK = "cosmos.deleted.container.collection.link";
public static final String COSMOS_KEY = "cosmos.key";
public static final String COSMOS_KEY_SECRET_NAME = "cosmos.key.secret.name";
public static final String COSMOS_VAULT_URL = "cosmos.vault.url";
public static final String COSMOS_DIRECT_HTTPS = "cosmos.direct.https";
public static final String AZURE_STORAGE_AUTHORITY = "azure.storage.authority";
public static final String AZURE_STORAGE_CLIENTID = "azure.storage.clientId";
public static final String AZURE_STORAGE_SECRET = "azure.storage.secret";
public static final String AZURE_STORAGE_SCOPE = "azure.storage.scope";
public static final String AZURE_STORAGE_ENDPOINT = "azure.storage.endpoint";
public static final String AZURE_IDENTITY_TENANT_ID = "azure.identity.tenant.id";
public static final String AZURE_IDENTITY_CLIENT_ID = "azure.identity.client.id";
public static final String AZURE_IDENTITY_SECRET = "azure.identity.secret";
public static final String AZURE_IDENTITY_PROXY_HOST = "azure.identity.proxy.host";
public static final String AZURE_IDENTITY_PROXY_PORT = "azure.identity.proxy.port";
public static final String COSMOS_QUERY_BATCH_SIZE = "cosmos.query.batch.size";
public static final String COSMOS_CONTAINER_DELETION_BATCH_SIZE = "cosmos.container.deletion.batch.size";
public static final String COSMOS_REQUEST_CHARGE_THRESHOLD = "cosmos.request.charge.threshold";
public static final String COSMOS_CONTINUATION_TOKEN_LIMIT_KB = "cosmos.continuation.token.limit.kb";
public static final String AZURE_PURGE_BATCH_SIZE = "azure.purge.batch.size";
public static final String COSMOS_PURGE_BATCH_SIZE = "cosmos.purge.batch.size";
public static final String AZURE_NAME_SCHEME_VERSION = "azure.name.scheme.version";
public static final String AZURE_BLOB_CONTAINER_STRATEGY = "azure.blob.container.strategy";
public static final String AZURE_STORAGE_CLIENT_CLASS = "azure.storage.client.class";
public static final String CONTAINER_COMPACTION_COSMOS_QUERY_LIMIT = "container.compaction.cosmos.query.limit";
public static final String CONTAINER_COMPACTION_ABS_PURGE_LIMIT = "container.compaction.abs.purge.limit";
public static final String AZURE_STORAGE_CLIENT_REFRESH_FACTOR = "azure.storage.client.refresh.factor";
// Per docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
public static final int MAX_PURGE_BATCH_SIZE = 256;
public static final int DEFAULT_PURGE_BATCH_SIZE = 100;
public static final int DEFAULT_QUERY_BATCH_SIZE = 100;
public static final int DEFAULT_COSMOS_CONTINUATION_TOKEN_LIMIT = 4;
public static final int DEFAULT_COSMOS_REQUEST_CHARGE_THRESHOLD = 100;
public static final int DEFAULT_COSMOS_CONTAINER_DELETION_BATCH_SIZE = 100;
public static final int DEFAULT_CONTAINER_COMPACTION_COSMOS_QUERY_LIMIT = 100;
public static final int DEFAULT_CONTAINER_COMPACTION_ABS_PURGE_LIMIT = 100;
public static final double DEFAULT_AZURE_STORAGE_CLIENT_REFRESH_FACTOR = 0.9F;
public static final int DEFAULT_NAME_SCHEME_VERSION = 0;
public static final String DEFAULT_CONTAINER_STRATEGY = "Partition";
public static final String DEFAULT_AZURE_STORAGE_CLIENT_CLASS =
"com.github.ambry.cloud.azure.ConnectionStringBasedStorageClient";
public static final String USE_ASYNC_AZURE_APIS = "use.async.azure.apis";
/**
* The Azure Blob Storage connection string.
*/
@Config(AZURE_STORAGE_CONNECTION_STRING)
@Default("")
public final String azureStorageConnectionString;
/**
* The Cosmos DB endpoint.
*/
@Config(COSMOS_ENDPOINT)
public final String cosmosEndpoint;
/**
* The link (URL) for the Cosmos DB metadata collection.
*/
@Config(COSMOS_COLLECTION_LINK)
public final String cosmosCollectionLink;
/**
* The link (URL) for the Cosmos DB deleted container collection.
*/
@Config(COSMOS_DELETED_CONTAINER_COLLECTION_LINK)
public final String cosmosDeletedContainerCollectionLink;
/**
* The Cosmos DB connection key.
*/
@Config(COSMOS_KEY)
@Default("")
public final String cosmosKey;
/**
* The name of the secret in an Azure KeyVault containing the key to connect to Cosmos DB.
* Used as an alternative to configuring the key directly in {@link #COSMOS_KEY}.
*/
@Config(COSMOS_KEY_SECRET_NAME)
@Default("")
public final String cosmosKeySecretName;
/**
* The URL for the Azure KeyVault containing the cosmos key.
* Used as an alternative to configuring the key directly in {@link #COSMOS_KEY}.
*/
@Config(COSMOS_VAULT_URL)
@Default("")
public final String cosmosVaultUrl;
@Config(AZURE_PURGE_BATCH_SIZE)
@Default("100")
public final int azurePurgeBatchSize;
@Config(COSMOS_PURGE_BATCH_SIZE)
public final int cosmosPurgeBatchSize;
@Config(AZURE_NAME_SCHEME_VERSION)
@Default("0")
public final int azureNameSchemeVersion;
@Config(AZURE_BLOB_CONTAINER_STRATEGY)
@Default("Partition")
public final String azureBlobContainerStrategy;
/**
* Max number of metadata records to fetch in a single Cosmos query.
*/
@Config(COSMOS_QUERY_BATCH_SIZE)
public final int cosmosQueryBatchSize;
@Config(COSMOS_CONTAINER_DELETION_BATCH_SIZE)
public final int cosmosContainerDeletionBatchSize;
/**
* The size limit in KB on Cosmos continuation token.
*/
@Config(COSMOS_CONTINUATION_TOKEN_LIMIT_KB)
public final int cosmosContinuationTokenLimitKb;
/**
* The Cosmos request charge threshold to log.
*/
@Config(COSMOS_REQUEST_CHARGE_THRESHOLD)
public final int cosmosRequestChargeThreshold;
/**
* Flag indicating whether to use DirectHttps CosmosDB connection mode.
* Provides better performance but may not work with all firewall settings.
*/
@Config(COSMOS_DIRECT_HTTPS)
@Default("false")
public final boolean cosmosDirectHttps;
/**
* Azure storage authority.
*/
@Config(AZURE_STORAGE_AUTHORITY)
@Default("")
public final String azureStorageAuthority;
/**
* Azure storage client id.
*/
@Config(AZURE_STORAGE_CLIENTID)
@Default("")
public final String azureStorageClientId;
/**
* Azure storage client secret.
*/
@Config(AZURE_STORAGE_SECRET)
@Default("")
public final String azureStorageSecret;
@Config(AZURE_STORAGE_SCOPE)
@Default("")
public final String azureStorageScope;
/**
* Azure storage endpoint.
*/
@Config(AZURE_STORAGE_ENDPOINT)
@Default("")
public final String azureStorageEndpoint;
/**
* Azure AAD identity tenant id. For use with {@code ClientSecretCredential} auth.
*/
@Config(AZURE_IDENTITY_TENANT_ID)
@Default("")
public final String azureIdentityTenantId;
/**
* Azure AAD identity client id. For use with {@code ClientSecretCredential} auth.
*/
@Config(AZURE_IDENTITY_CLIENT_ID)
@Default("")
public final String azureIdentityClientId;
/**
* Azure AAD identity client secret. For use with {@code ClientSecretCredential} auth.
*/
@Config(AZURE_IDENTITY_SECRET)
@Default("")
public final String azureIdentitySecret;
/**
* Azure AAD identity proxy host. This is a separate config from other services since there are cases where a proxy
* is required only for AAD (since AAD doesn't support private endpoints).
* For use with {@code ClientSecretCredential} auth.
*/
@Config(AZURE_IDENTITY_PROXY_HOST)
@Default("")
public final String azureIdentityProxyHost;
/**
* Azure AAD identity proxy port. For use with {@code ClientSecretCredential} auth.
*/
@Config(AZURE_IDENTITY_PROXY_PORT)
@Default("3128")
public final int azureIdentityProxyPort;
/**
* Factory class to instantiate azure storage client.
*/
@Config(AZURE_STORAGE_CLIENT_CLASS)
public final String azureStorageClientClass;
/*
* Number of blobs to fetch from Cosmos db for each container compaction query.
*/
@Config(CONTAINER_COMPACTION_COSMOS_QUERY_LIMIT)
public int containerCompactionCosmosQueryLimit;
/**
* Number of blobs to purge from ABS in each container compaction purge request.
*/
@Config(CONTAINER_COMPACTION_ABS_PURGE_LIMIT)
public int containerCompactionAbsPurgeLimit;
/**
* Fraction of token expiry time after which storage client token refresh will be attempted.
*/
@Config(AZURE_STORAGE_CLIENT_REFRESH_FACTOR)
public double azureStorageClientRefreshFactor;
/**
* Flag indicating whether to use asynchronous Azure APIs for uploading and downloading of blobs. This is
* temporary and can be removed once we move to use only asynchronous methods.
*/
@Config(USE_ASYNC_AZURE_APIS)
@Default("false")
public final boolean useAsyncAzureAPIs;
public AzureCloudConfig(VerifiableProperties verifiableProperties) {
azureStorageConnectionString = verifiableProperties.getString(AZURE_STORAGE_CONNECTION_STRING, "");
cosmosEndpoint = verifiableProperties.getString(COSMOS_ENDPOINT);
cosmosCollectionLink = verifiableProperties.getString(COSMOS_COLLECTION_LINK);
cosmosDeletedContainerCollectionLink = verifiableProperties.getString(COSMOS_DELETED_CONTAINER_COLLECTION_LINK, "");
cosmosKey = verifiableProperties.getString(COSMOS_KEY, "");
cosmosKeySecretName = verifiableProperties.getString(COSMOS_KEY_SECRET_NAME, "");
cosmosVaultUrl = verifiableProperties.getString(COSMOS_VAULT_URL, "");
azureStorageAuthority = verifiableProperties.getString(AZURE_STORAGE_AUTHORITY, "");
azureStorageClientId = verifiableProperties.getString(AZURE_STORAGE_CLIENTID, "");
azureStorageSecret = verifiableProperties.getString(AZURE_STORAGE_SECRET, "");
azureStorageScope = verifiableProperties.getString(AZURE_STORAGE_SCOPE, "");
azureStorageEndpoint = verifiableProperties.getString(AZURE_STORAGE_ENDPOINT, "");
azureIdentityTenantId = verifiableProperties.getString(AZURE_IDENTITY_TENANT_ID, "");
azureIdentityClientId = verifiableProperties.getString(AZURE_IDENTITY_CLIENT_ID, "");
azureIdentitySecret = verifiableProperties.getString(AZURE_IDENTITY_SECRET, "");
azureIdentityProxyHost = verifiableProperties.getString(AZURE_IDENTITY_PROXY_HOST, "");
azureIdentityProxyPort = verifiableProperties.getInt(AZURE_IDENTITY_PROXY_PORT, CloudConfig.DEFAULT_VCR_PROXY_PORT);
cosmosQueryBatchSize = verifiableProperties.getInt(COSMOS_QUERY_BATCH_SIZE, DEFAULT_QUERY_BATCH_SIZE);
cosmosContinuationTokenLimitKb =
verifiableProperties.getInt(COSMOS_CONTINUATION_TOKEN_LIMIT_KB, DEFAULT_COSMOS_CONTINUATION_TOKEN_LIMIT);
cosmosRequestChargeThreshold =
verifiableProperties.getInt(COSMOS_REQUEST_CHARGE_THRESHOLD, DEFAULT_COSMOS_REQUEST_CHARGE_THRESHOLD);
azurePurgeBatchSize =
verifiableProperties.getIntInRange(AZURE_PURGE_BATCH_SIZE, DEFAULT_PURGE_BATCH_SIZE, 1, MAX_PURGE_BATCH_SIZE);
cosmosPurgeBatchSize = verifiableProperties.getInt(COSMOS_PURGE_BATCH_SIZE, azurePurgeBatchSize);
cosmosDirectHttps = verifiableProperties.getBoolean(COSMOS_DIRECT_HTTPS, false);
azureBlobContainerStrategy =
verifiableProperties.getString(AZURE_BLOB_CONTAINER_STRATEGY, DEFAULT_CONTAINER_STRATEGY);
azureNameSchemeVersion = verifiableProperties.getInt(AZURE_NAME_SCHEME_VERSION, DEFAULT_NAME_SCHEME_VERSION);
azureStorageClientClass =
verifiableProperties.getString(AZURE_STORAGE_CLIENT_CLASS, DEFAULT_AZURE_STORAGE_CLIENT_CLASS);
cosmosContainerDeletionBatchSize =
verifiableProperties.getInt(COSMOS_CONTAINER_DELETION_BATCH_SIZE, DEFAULT_COSMOS_CONTAINER_DELETION_BATCH_SIZE);
containerCompactionAbsPurgeLimit =
verifiableProperties.getInt(CONTAINER_COMPACTION_ABS_PURGE_LIMIT, DEFAULT_CONTAINER_COMPACTION_ABS_PURGE_LIMIT);
containerCompactionCosmosQueryLimit = verifiableProperties.getIntInRange(CONTAINER_COMPACTION_COSMOS_QUERY_LIMIT,
DEFAULT_CONTAINER_COMPACTION_COSMOS_QUERY_LIMIT, 1, Integer.MAX_VALUE);
azureStorageClientRefreshFactor = verifiableProperties.getDoubleInRange(AZURE_STORAGE_CLIENT_REFRESH_FACTOR,
DEFAULT_AZURE_STORAGE_CLIENT_REFRESH_FACTOR, 0.0, 1.0);
useAsyncAzureAPIs = verifiableProperties.getBoolean(USE_ASYNC_AZURE_APIS, false);
}
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2018 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.model;
import org.hibernate.annotations.ForeignKey;
import org.hibernate.annotations.Index;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.MapKeyColumn;
import javax.persistence.OneToMany;
import javax.persistence.PersistenceException;
import javax.persistence.PreRemove;
import javax.persistence.SequenceGenerator;
import javax.validation.constraints.NotNull;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* This class contains a summary of the build results of the execution of a build config set. This includes the start and end
* time, links to the build records for the executed builds, and the overall status (success/failure) of the set execution.
*/
@Entity
public class BuildConfigSetRecord implements GenericEntity<Integer> {
private static final long serialVersionUID = 1L;
public static final String SEQUENCE_NAME = "build_config_set_record_id_seq";
@Id
@SequenceGenerator(name = SEQUENCE_NAME, sequenceName = SEQUENCE_NAME, initialValue = 100, allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SEQUENCE_NAME)
private Integer id;
/**
* The build configuration set which was executed
*/
@NotNull
@ManyToOne
@ForeignKey(name = "fk_buildconfigsetrecord_buildconfigset")
@Index(name="idx_buildconfigsetrecord_buildconfigset")
private BuildConfigurationSet buildConfigurationSet;
/**
* The time at which the first build in the set was started
*/
@NotNull
@Column(columnDefinition="timestamp with time zone")
private Date startTime;
/**
* The time at which the last build in the set was completed
* Temporarily set to null while the set is executing
*/
@Column(columnDefinition="timestamp with time zone")
private Date endTime;
/**
* The user who executed the set.
*/
@NotNull
@ManyToOne
@ForeignKey(name = "fk_buildconfigsetrecord_user")
@Index(name="idx_buildconfigsetrecord_user")
private User user;
/**
* The status (success/failure) of the overall set. If any builds in the set failed, the status of the set is failed.
*/
@Enumerated(EnumType.STRING)
private BuildStatus status;
/**
* The detailed records of the builds that were executed as part of the execution of this set
*
*/
@OneToMany(mappedBy = "buildConfigSetRecord")
private Set<BuildRecord> buildRecords;
@ManyToOne
@ForeignKey(name = "fk_buildconfigsetrecord_productversion")
@Index(name="idx_buildconfigsetrecord_productversion")
private ProductVersion productVersion;
@NotNull
private boolean temporaryBuild;
/**
* Example attributes
* POST_BUILD_REPO_VALIDATION: REPO_SYSTEM_ERROR
*/
@ElementCollection(fetch = FetchType.EAGER)
@CollectionTable(name="build_config_set_record_attributes", joinColumns=@JoinColumn(name="build_config_set_record_id"))
@MapKeyColumn(name="key")
@Column(name="value")
private Map<String, String> attributes = new HashMap<>();
/**
* Instantiates a new project build result.
*/
public BuildConfigSetRecord() {
buildRecords = new HashSet<>();
}
@PreRemove
public void preRemove() {
if(this.temporaryBuild == false)
throw new PersistenceException("The non-temporary builds cannot be deleted! Only deletion of temporary builds is supported");
}
/**
* Gets the id.
*
* @return the id
*/
public Integer getId() {
return id;
}
/**
* Sets the id.
*
* @param id the new id
*/
@Override
public void setId(Integer id) {
this.id = id;
}
/**
* Gets the start time.
*
* @return the start time
*/
public Date getStartTime() {
return startTime;
}
/**
* Sets the start time.
*
* @param startTime the new start time
*/
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
/**
* Gets the end time.
*
* @return the end time
*/
public Date getEndTime() {
return endTime;
}
/**
* Sets the end time.
*
* @param endTime the new end time
*/
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
/**
* Gets the user.
*
* @return the user
*/
public User getUser() {
return user;
}
/**
* Sets the user.
*
* @param user the new user
*/
public void setUser(User user) {
this.user = user;
}
public BuildConfigurationSet getBuildConfigurationSet() {
return buildConfigurationSet;
}
public void setBuildConfigurationSet(BuildConfigurationSet buildConfigurationSet) {
this.buildConfigurationSet = buildConfigurationSet;
}
/**
* Gets the status.
*
* @return the status
*/
public BuildStatus getStatus() {
return status;
}
/**
* Sets the status.
*
* @param status the new status
*/
public void setStatus(BuildStatus status) {
this.status = status;
}
public Set<BuildRecord> getBuildRecords() {
return buildRecords;
}
public void setBuildRecords(Set<BuildRecord> buildRecords) {
this.buildRecords = buildRecords;
}
public boolean addBuildRecord(BuildRecord buildRecord) {
return buildRecords.add(buildRecord);
}
public boolean removeBuildRecord(BuildRecord buildRecord) {
return buildRecords.remove(buildRecord);
}
public ProductVersion getProductVersion() {
return productVersion;
}
/**
* Sets the system image.
*
* @param productVersion the new system image
*/
public void setProductVersion(ProductVersion productVersion) {
this.productVersion = productVersion;
}
public boolean isTemporaryBuild() {
return temporaryBuild;
}
public void setTemporaryBuild(boolean temporaryBuild) {
this.temporaryBuild = temporaryBuild;
}
public Map<String, String> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, String> attributes) {
this.attributes = attributes;
}
@Override
public String toString() {
return "BuildConfigSetRecord [id=" + id + ", buildConfigurationSet=" + buildConfigurationSet.getName() + ", status=" + status + "]";
}
public static class Builder {
private Integer id;
private BuildConfigurationSet buildConfigurationSet;
private Date startTime;
private Date endTime;
private BuildStatus status;
private User user;
private ProductVersion productVersion;
private Set<BuildRecord> buildRecords;
private Boolean temporaryBuild;
public Builder() {
buildRecords = new HashSet<>();
}
public static Builder newBuilder() {
return new Builder();
}
public BuildConfigSetRecord build() {
BuildConfigSetRecord buildConfigSetRecord = new BuildConfigSetRecord();
buildConfigSetRecord.setId(id);
buildConfigSetRecord.setBuildConfigurationSet(buildConfigurationSet);
buildConfigSetRecord.setStartTime(startTime);
buildConfigSetRecord.setEndTime(endTime);
buildConfigSetRecord.setUser(user);
buildConfigSetRecord.setStatus(status);
buildConfigSetRecord.setTemporaryBuild(temporaryBuild);
if (productVersion == null && buildConfigurationSet != null) {
productVersion = buildConfigurationSet.getProductVersion();
}
buildConfigSetRecord.setProductVersion(productVersion);
// Set the bi-directional mapping
for (BuildRecord buildRecord : buildRecords) {
buildRecord.setBuildConfigSetRecord(buildConfigSetRecord);
}
buildConfigSetRecord.setBuildRecords(buildRecords);
return buildConfigSetRecord;
}
public Builder id(Integer id) {
this.id = id;
return this;
}
public Builder buildConfigurationSet(BuildConfigurationSet buildConfigurationSet) {
this.buildConfigurationSet = buildConfigurationSet;
return this;
}
public Builder startTime(Date startTime) {
this.startTime = startTime;
return this;
}
public Builder endTime(Date endTime) {
this.endTime = endTime;
return this;
}
public Builder user(User user) {
this.user = user;
return this;
}
public Builder status(BuildStatus status) {
this.status = status;
return this;
}
public Builder productVersion(ProductVersion productVersion) {
this.productVersion = productVersion;
return this;
}
public Builder buildRecords(Set<BuildRecord> buildRecords) {
this.buildRecords = buildRecords;
return this;
}
public Builder temporaryBuild(boolean temporaryBuild) {
this.temporaryBuild = temporaryBuild;
return this;
}
}
}
| |
package com.tinkerpop.rexster.gremlin.converter;
import com.tinkerpop.blueprints.impls.tg.TinkerGraph;
import com.tinkerpop.blueprints.impls.tg.TinkerGraphFactory;
import com.tinkerpop.pipes.util.structures.Table;
import com.tinkerpop.rexster.protocol.serializer.msgpack.templates.ResultsConverter;
import org.junit.Assert;
import org.junit.Test;
import org.msgpack.MessagePack;
import org.msgpack.template.Template;
import org.msgpack.type.MapValue;
import org.msgpack.type.Value;
import org.msgpack.type.ValueFactory;
import org.msgpack.unpacker.BufferUnpacker;
import org.msgpack.unpacker.Converter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import static org.msgpack.template.Templates.TString;
import static org.msgpack.template.Templates.TValue;
import static org.msgpack.template.Templates.tMap;
public class MsgPackResultConverterTest {
private MsgPackResultConverter converter = new MsgPackResultConverter();
private MessagePack msgpack = new MessagePack();
@Test
public void convertNullResultReturnsNull() throws Exception {
byte[] results = this.converter.convert(null);
Assert.assertNotNull(results);
Assert.assertTrue(msgpack.read(results).isNilValue());
}
@Test
public void convertJSONObjectNullResultReturnsNull() throws Exception {
byte[] results = this.converter.convert(ValueFactory.createNilValue());
Assert.assertNotNull(results);
Assert.assertTrue(msgpack.read(results).isNilValue());
}
@Test
public void convertTable() throws Exception {
Table table = new Table("col1", "col2");
table.addRow("x1", "x2");
table.addRow("y1", "y2");
byte[] results = this.converter.convert(table);
Assert.assertNotNull(results);
final Object unpackedObj = ResultsConverter.deserializeObject(this.msgpack.read(results));
Assert.assertTrue(unpackedObj instanceof ArrayList);
final ArrayList unpacked = (ArrayList) unpackedObj;
Map<String, String> mapX = (Map<String, String>) unpacked.get(0);
Assert.assertTrue(mapX.containsKey("col1"));
Assert.assertTrue(mapX.containsKey("col2"));
Assert.assertEquals("x1", mapX.get("col1"));
Assert.assertEquals("x2", mapX.get("col2"));
Map<String, String> mapY = (Map<String, String>) unpacked.get(1);
Assert.assertTrue(mapY.containsKey("col1"));
Assert.assertTrue(mapY.containsKey("col2"));
Assert.assertEquals("y1", mapY.get("col1"));
Assert.assertEquals("y2", mapY.get("col2"));
}
@Test
public void convertElements() throws Exception {
TinkerGraph g = TinkerGraphFactory.createTinkerGraph();
byte[] converted = this.converter.convert(g.getVertices());
final BufferUnpacker unpacker = msgpack.createBufferUnpacker(converted);
final Object unpacked = unpacker.readValue();
Assert.assertTrue(unpacked instanceof Iterable);
final Iterator unpackerItty = ((Iterable) unpacked).iterator();
int counter = 0;
while (unpackerItty.hasNext()) {
unpackerItty.next();
counter++;
}
Assert.assertEquals(6, counter);
}
@Test
public void convertIterable() throws Exception {
ArrayList<FunObject> funList = new ArrayList<FunObject>();
funList.add(new FunObject("x"));
funList.add(new FunObject("y"));
Iterable<FunObject> iterable = funList;
byte[] converted = this.converter.convert(iterable);
Assert.assertNotNull(converted);
final BufferUnpacker unpacker = msgpack.createBufferUnpacker(converted);
final Object unpacked = unpacker.readValue();
Assert.assertTrue(unpacked instanceof Iterable);
final Iterator unpackerItty = ((Iterable) unpacked).iterator();
int counter = 0;
boolean matchX = false;
boolean matchY = false;
while (unpackerItty.hasNext()) {
final Value v = (Value) unpackerItty.next();
if (v.asRawValue().getString().equals("x")) {
matchX = true;
}
if (v.asRawValue().getString().equals("y")) {
matchY = true;
}
counter++;
}
Assert.assertEquals(2, counter);
Assert.assertTrue(matchX && matchY);
}
@Test
public void convertIterator() throws Exception {
ArrayList<FunObject> funList = new ArrayList<FunObject>();
funList.add(new FunObject("x"));
funList.add(new FunObject("y"));
Iterator<FunObject> iterable = funList.iterator();
byte[] converted = this.converter.convert(iterable);
final BufferUnpacker unpacker = msgpack.createBufferUnpacker(converted);
final Object unpacked = unpacker.readValue();
Assert.assertTrue(unpacked instanceof Iterable);
final Iterator unpackerItty = ((Iterable) unpacked).iterator();
int counter = 0;
boolean matchX = false;
boolean matchY = false;
while (unpackerItty.hasNext()) {
final Value v = (Value) unpackerItty.next();
if (v.asRawValue().getString().equals("x")) {
matchX = true;
}
if (v.asRawValue().getString().equals("y")) {
matchY = true;
}
counter++;
}
Assert.assertEquals(2, counter);
Assert.assertTrue(matchX && matchY);
}
@Test
public void convertIteratorNullElement() throws Exception {
ArrayList<FunObject> funList = new ArrayList<FunObject>();
funList.add(new FunObject("x"));
funList.add(null);
funList.add(new FunObject("y"));
Iterator<FunObject> iterable = funList.iterator();
byte[] converted = this.converter.convert(iterable);
final BufferUnpacker unpacker = msgpack.createBufferUnpacker(converted);
final Object unpacked = unpacker.readValue();
final Iterator unpackerItty = ((Iterable) unpacked).iterator();
int counter = 0;
boolean matchX = false;
boolean matchY = false;
boolean matchNil = false;
while (unpackerItty.hasNext()) {
final Value v = (Value) unpackerItty.next();
if (v.isRawValue() && v.asRawValue().getString().equals("x")) {
matchX = true;
}
if (v.isRawValue() && v.asRawValue().getString().equals("y")) {
matchY = true;
}
if (v.isNilValue()) {
matchNil = true;
}
counter++;
}
Assert.assertEquals(3, counter);
Assert.assertTrue(matchX && matchY && matchNil);
}
@Test
public void convertMap() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
Map<String, String> innerMap = new HashMap<String, String>();
innerMap.put("a", "b");
map.put("x", new FunObject("x"));
map.put("y", "some");
map.put("z", innerMap);
byte[] converted = this.converter.convert(map);
Assert.assertNotNull(converted);
final BufferUnpacker unpacker = msgpack.createBufferUnpacker(converted);
final Template<Map<String, Value>> mapTmpl = tMap(TString, TValue);
Map<String, Value> unpackedMap = unpacker.read(mapTmpl);
Assert.assertTrue(unpackedMap.containsKey("x"));
Assert.assertTrue(unpackedMap.containsKey("y"));
Assert.assertTrue(unpackedMap.containsKey("z"));
Assert.assertEquals("x", unpackedMap.get("x").asRawValue().getString());
Assert.assertEquals("some", unpackedMap.get("y").asRawValue().getString());
MapValue mapValue = unpackedMap.get("z").asMapValue();
Map innerMapValue = new Converter(mapValue).read(tMap(TString, TString));
Assert.assertNotNull(innerMapValue);
Assert.assertEquals("b", innerMapValue.get("a"));
}
private class FunObject {
private String val;
public FunObject(String val) {
this.val = val;
}
public String toString() {
return this.val;
}
}
}
| |
package org.apache.streams.hdfs;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.streams.core.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.Flushable;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Queue;
public class WebHdfsPersistWriter implements StreamsPersistWriter, Flushable, Closeable, DatumStatusCountable
{
public final static String STREAMS_ID = "WebHdfsPersistWriter";
private final static Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistWriter.class);
private final static char DELIMITER = '\t';
private final static int DEFAULT_LINES_PER_FILE = 50000;
private FileSystem client;
private Path path;
private String filePart = "default";
private int linesPerFile = 1000;
private int totalRecordsWritten = 0;
private final List<Path> writtenFiles = new ArrayList<Path>();
private int fileLineCounter = 0;
private OutputStreamWriter currentWriter = null;
private static final int BYTES_IN_MB = 1024*1024;
private static final int BYTES_BEFORE_FLUSH = 64 * BYTES_IN_MB;
private volatile int totalByteCount = 0;
private volatile int byteCount = 0;
public boolean terminate = false;
protected volatile Queue<StreamsDatum> persistQueue;
private ObjectMapper mapper = new ObjectMapper();
private HdfsWriterConfiguration hdfsConfiguration;
public WebHdfsPersistWriter(HdfsWriterConfiguration hdfsConfiguration) {
this.hdfsConfiguration = hdfsConfiguration;
}
public URI getURI() throws URISyntaxException { return new URI(WebHdfsFileSystem.SCHEME + "://" + hdfsConfiguration.getHost() + ":" + hdfsConfiguration.getPort()); }
public boolean isConnected() { return (client != null); }
public final synchronized FileSystem getFileSystem()
{
// Check to see if we are connected.
if(!isConnected())
connectToWebHDFS();
return this.client;
}
private synchronized void connectToWebHDFS()
{
try
{
LOGGER.info("User : {}", this.hdfsConfiguration.getUser());
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser());
ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE);
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
LOGGER.info("WebURI : {}", getURI().toString());
client = FileSystem.get(getURI(), conf);
LOGGER.info("Connected to WebHDFS");
/*
* ************************************************************************************************
* This code is an example of how you would work with HDFS and you weren't going over
* the webHDFS protocol.
*
* Smashew: 2013-10-01
* ************************************************************************************************
conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName);
conf.set("namenode.host","0.0.0.0");
conf.set("hadoop.job.ugi", userName);
conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner");
fileSystem.createNewFile(new Path("/user/"+ userName + "/test"));
FileStatus[] status = fs.listStatus(new Path("/user/" + userName));
for(int i=0;i<status.length;i++)
{
LOGGER.info("Directory: {}", status[i].getPath());
}
*/
return null;
}
});
}
catch (Exception e)
{
LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again");
e.printStackTrace();
}
}
@Override
public void write(StreamsDatum streamsDatum) {
synchronized (this)
{
// Check to see if we need to reset the file that we are currently working with
if (this.currentWriter == null || (this.fileLineCounter > this.linesPerFile))
try {
resetFile();
} catch (Exception e) {
e.printStackTrace();
}
String line = convertResultToString(streamsDatum);
try {
this.currentWriter.write(line);
} catch (IOException e) {
e.printStackTrace();
}
int bytesInLine = line.getBytes().length;
totalRecordsWritten++;
totalByteCount += bytesInLine;
byteCount += bytesInLine;
if(byteCount > BYTES_BEFORE_FLUSH)
try {
flush();
} catch (IOException e) {
e.printStackTrace();
}
this.fileLineCounter++;
}
}
public void flush() throws IOException
{
if(this.currentWriter != null && byteCount > BYTES_BEFORE_FLUSH)
{
this.currentWriter.flush();
byteCount = 0;
}
}
private synchronized void resetFile() throws Exception
{
// this will keep it thread safe, so we don't create too many files
if(this.fileLineCounter == 0 && this.currentWriter != null)
return;
// if there is a current writer, we must close it first.
if (this.currentWriter != null)
{
flush();
close();
}
this.fileLineCounter = 0;
// Create the path for where the file is going to live.
Path filePath = this.path.suffix("/" + hdfsConfiguration.getWriterFilePrefix() + "-" + new Date().getTime() + ".tsv");
try
{
// Check to see if a file of the same name exists, if it does, then we are not going to be able to proceed.
if(client.exists(filePath))
throw new RuntimeException("Unable to create file: " + filePath);
this.currentWriter = new OutputStreamWriter(client.create(filePath));
// Add another file to the list of written files.
writtenFiles.add(filePath);
LOGGER.info("File Created: {}", filePath);
}
catch (Exception e)
{
LOGGER.error("COULD NOT CreateFile: {}", filePath);
LOGGER.error(e.getMessage());
throw e;
}
}
public synchronized void close() throws IOException
{
if(this.currentWriter != null)
{
this.currentWriter.flush();
this.currentWriter.close();
this.currentWriter = null;
LOGGER.info("File Closed");
}
}
private String convertResultToString(StreamsDatum entry)
{
String metadata = null;
try {
metadata = mapper.writeValueAsString(entry.getMetadata());
} catch (JsonProcessingException e) {
e.printStackTrace();
}
String documentJson = null;
try {
documentJson = mapper.writeValueAsString(entry.getDocument());
} catch (JsonProcessingException e) {
e.printStackTrace();
}
if(Strings.isNullOrEmpty(documentJson))
return null;
else
return new StringBuilder()
.append(entry.getSequenceid())
.append(DELIMITER)
.append(entry.getTimestamp())
.append(DELIMITER)
.append(metadata)
.append(DELIMITER)
.append(documentJson)
.append("\n")
.toString();
}
@Override
public void prepare(Object configurationObject) {
connectToWebHDFS();
path = new Path(hdfsConfiguration.getPath() + "/" + hdfsConfiguration.getWriterPath());
}
@Override
public void cleanUp() {
try {
flush();
} catch (IOException e) {
e.printStackTrace();
}
try {
close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public DatumStatusCounter getDatumStatusCounter() {
DatumStatusCounter counters = new DatumStatusCounter();
counters.incrementAttempt(this.totalRecordsWritten);
counters.incrementStatus(DatumStatus.SUCCESS, this.totalRecordsWritten);
return counters;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.internal.focus;
import com.intellij.execution.filters.TextConsoleBuilderFactory;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.wm.impl.FocusRequestInfo;
import com.intellij.ui.JBColor;
import com.intellij.ui.JBSplitter;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.table.JBTable;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableColumnModel;
import java.awt.*;
import java.awt.datatransfer.StringSelection;
import java.awt.event.ActionEvent;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* @author Konstantin Bulenkov
*/
public class FocusTracesDialog extends DialogWrapper {
private final JBTable myRequestsTable;
private final List<FocusRequestInfo> myRequests;
private static final String[] COLUMNS = {"Time", "Forced", "Component"};
private final ConsoleView consoleView;
public FocusTracesDialog(Project project, ArrayList<FocusRequestInfo> requests) {
super(project);
myRequests = requests;
setTitle("Focus Traces");
final String[][] data = new String[requests.size()][];
SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss.SSS");
for (int i = 0; i < data.length; i++) {
final FocusRequestInfo r = requests.get(i);
data[i] = new String[]{
dateFormat.format(new Date(r.timestamp)),
String.valueOf(r.forced),
r.componentString + (!"null".equals(r.componentString) &&
r.component.get() == null ? " <collected>" : "")};
}
setModal(false);
myRequestsTable = new JBTable(new DefaultTableModel(data, COLUMNS) {
@Override
public boolean isCellEditable(int row, int column) {
return false;
}
});
final ListSelectionListener selectionListener = new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (consoleView == null) return;
final int index = myRequestsTable.getSelectedRow();
consoleView.clear();
if (-1 < index && index < myRequests.size()) {
consoleView.print(ExceptionUtil.getThrowableText(myRequests.get(index).trace), ConsoleViewContentType.NORMAL_OUTPUT);
}
}
};
myRequestsTable.getSelectionModel().addListSelectionListener(selectionListener);
final TableColumnModel columnModel = myRequestsTable.getColumnModel();
columnModel.getColumn(0).setMinWidth(120);
columnModel.getColumn(0).setMaxWidth(120);
columnModel.getColumn(0).setPreferredWidth(120);
columnModel.getColumn(1).setMinWidth(60);
columnModel.getColumn(1).setMaxWidth(60);
columnModel.getColumn(1).setPreferredWidth(60);
myRequestsTable.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
myRequestsTable.changeSelection(0, 0, false, true);
consoleView = TextConsoleBuilderFactory.getInstance().createBuilder(ProjectManager.getInstance().getDefaultProject()).getConsole();
init();
}
@Override
protected String getDimensionServiceKey() {
return "ide.internal.focus.trace.dialog";
}
@Override
protected JComponent createCenterPanel() {
JPanel panel = new JPanel(new BorderLayout());
JBSplitter splitter = new JBSplitter(true, .5F, .2F, .8F);
splitter.setFirstComponent(new JBScrollPane(myRequestsTable, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER));
JComponent consoleComponent = new JPanel(new BorderLayout());
consoleComponent.add(consoleView.getComponent(), BorderLayout.CENTER);
int row = myRequestsTable.getSelectedRow();
if (row >= 0) {
consoleView.print(ExceptionUtil.getThrowableText(myRequests.get(row).trace), ConsoleViewContentType.NORMAL_OUTPUT);
}
splitter.setSecondComponent(
new JBScrollPane(consoleComponent, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER));
panel.add(splitter, BorderLayout.CENTER);
return panel;
}
@Override
public JComponent getPreferredFocusedComponent() {
return myRequestsTable;
}
@NotNull
@Override
protected Action[] createActions() {
return new Action[] {getOKAction(), getCopyStackTraceAction()};
}
private Action getCopyStackTraceAction() {
return new AbstractAction("&Copy stacktrace") {
@Override
public void actionPerformed(ActionEvent e) {
String text = ExceptionUtil.getThrowableText(myRequests.get(myRequestsTable.getSelectedRow()).trace);
CopyPasteManager.getInstance().setContents(new StringSelection(text));
}
};
}
@Override
public void show() {
final BorderDrawer drawer = new BorderDrawer();
drawer.start();
super.show();
drawer.setDone();
}
class BorderDrawer extends Thread {
Component prev = null;
private volatile boolean running = true;
BorderDrawer() {
super("Focus Border Drawer");
}
@Override
public void run() {
try {
while (running) {
//noinspection BusyWait
sleep(100);
paintBorder();
}
if (prev != null) {
prev.repaint();
}
}
catch (InterruptedException ignored) {
}
}
private void paintBorder() {
final int row = FocusTracesDialog.this.myRequestsTable.getSelectedRow();
if (row != -1) {
final FocusRequestInfo info = FocusTracesDialog.this.myRequests.get(row);
if (prev != null && prev != info.component.get()) {
prev.repaint();
}
prev = info.component.get();
if (prev != null && prev.isDisplayable()) {
final Graphics g = prev.getGraphics();
g.setColor(JBColor.RED);
final Dimension sz = prev.getSize();
UIUtil.drawDottedRectangle(g, 1, 1, sz.width - 2, sz.height - 2);
}
}
}
public void setDone() {
running = false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query.continuous;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import javax.cache.event.CacheEntryEvent;
import javax.cache.event.CacheEntryEventFilter;
import javax.cache.event.CacheEntryUpdatedListener;
import javax.cache.event.EventType;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.CacheEntryEventSerializableFilter;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.events.CacheQueryExecutedEvent;
import org.apache.ignite.events.CacheQueryReadEvent;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteDeploymentCheckedException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.managers.communication.GridIoPolicy;
import org.apache.ignite.internal.managers.deployment.GridDeployment;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfo;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfoBean;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheAdapter;
import org.apache.ignite.internal.processors.cache.GridCacheAffinityManager;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheDeploymentManager;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicAbstractUpdateFuture;
import org.apache.ignite.internal.processors.cache.query.CacheQueryType;
import org.apache.ignite.internal.processors.cache.query.continuous.CacheContinuousQueryManager.JCacheQueryLocalListener;
import org.apache.ignite.internal.processors.cache.query.continuous.CacheContinuousQueryManager.JCacheQueryRemoteFilter;
import org.apache.ignite.internal.processors.continuous.GridContinuousBatch;
import org.apache.ignite.internal.processors.continuous.GridContinuousHandler;
import org.apache.ignite.internal.processors.continuous.GridContinuousQueryBatch;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformContinuousQueryFilter;
import org.apache.ignite.internal.util.GridConcurrentSkipListSet;
import org.apache.ignite.internal.util.GridLongList;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteAsyncCallback;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.thread.IgniteStripedThreadPoolExecutor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentLinkedDeque8;
import static org.apache.ignite.events.EventType.EVT_CACHE_QUERY_EXECUTED;
import static org.apache.ignite.events.EventType.EVT_CACHE_QUERY_OBJECT_READ;
/**
* Continuous query handler.
*/
public class CacheContinuousQueryHandler<K, V> implements GridContinuousHandler {
/** */
private static final long serialVersionUID = 0L;
/** */
private static final int BACKUP_ACK_THRESHOLD = 100;
/** Cache name. */
private String cacheName;
/** Topic for ordered messages. */
private Object topic;
/** Local listener. */
private transient CacheEntryUpdatedListener<K, V> locLsnr;
/** Remote filter. */
private CacheEntryEventSerializableFilter<K, V> rmtFilter;
/** Deployable object for filter. */
private DeployableObject rmtFilterDep;
/** Internal flag. */
private boolean internal;
/** Notify existing flag. */
private boolean notifyExisting;
/** Old value required flag. */
private boolean oldValRequired;
/** Synchronous flag. */
private boolean sync;
/** Ignore expired events flag. */
private boolean ignoreExpired;
/** Task name hash code. */
private int taskHash;
/** Whether to skip primary check for REPLICATED cache. */
private transient boolean skipPrimaryCheck;
/** Backup queue. */
private transient volatile Collection<CacheContinuousQueryEntry> backupQueue;
/** */
private boolean locCache;
/** */
private boolean keepBinary;
/** */
private transient ConcurrentMap<Integer, PartitionRecovery> rcvs;
/** */
private transient ConcurrentMap<Integer, EntryBuffer> entryBufs;
/** */
private transient AcknowledgeBuffer ackBuf;
/** */
private transient int cacheId;
/** */
private transient volatile Map<Integer, Long> initUpdCntrs;
/** */
private transient volatile Map<UUID, Map<Integer, Long>> initUpdCntrsPerNode;
/** */
private transient volatile AffinityTopologyVersion initTopVer;
/** */
private transient boolean ignoreClsNotFound;
/** */
private transient boolean asyncCallback;
/** */
private transient UUID nodeId;
/** */
private transient UUID routineId;
/** */
private transient GridKernalContext ctx;
/** */
private transient IgniteLogger log;
/**
* Required by {@link Externalizable}.
*/
public CacheContinuousQueryHandler() {
// No-op.
}
/**
* Constructor.
*
* @param cacheName Cache name.
* @param topic Topic for ordered messages.
* @param locLsnr Local listener.
* @param rmtFilter Remote filter.
* @param oldValRequired Old value required flag.
* @param sync Synchronous flag.
* @param ignoreExpired Ignore expired events flag.
*/
public CacheContinuousQueryHandler(
String cacheName,
Object topic,
CacheEntryUpdatedListener<K, V> locLsnr,
CacheEntryEventSerializableFilter<K, V> rmtFilter,
boolean oldValRequired,
boolean sync,
boolean ignoreExpired,
boolean ignoreClsNotFound) {
assert topic != null;
assert locLsnr != null;
this.cacheName = cacheName;
this.topic = topic;
this.locLsnr = locLsnr;
this.rmtFilter = rmtFilter;
this.oldValRequired = oldValRequired;
this.sync = sync;
this.ignoreExpired = ignoreExpired;
this.ignoreClsNotFound = ignoreClsNotFound;
cacheId = CU.cacheId(cacheName);
}
/**
* @param internal Internal query.
*/
public void internal(boolean internal) {
this.internal = internal;
}
/**
* @param notifyExisting Notify existing.
*/
public void notifyExisting(boolean notifyExisting) {
this.notifyExisting = notifyExisting;
}
/**
* @param locCache Local cache.
*/
public void localCache(boolean locCache) {
this.locCache = locCache;
}
/**
* @param taskHash Task hash.
*/
public void taskNameHash(int taskHash) {
this.taskHash = taskHash;
}
/**
* @param skipPrimaryCheck Whether to skip primary check for REPLICATED cache.
*/
public void skipPrimaryCheck(boolean skipPrimaryCheck) {
this.skipPrimaryCheck = skipPrimaryCheck;
}
/** {@inheritDoc} */
@Override public boolean isEvents() {
return false;
}
/** {@inheritDoc} */
@Override public boolean isMessaging() {
return false;
}
/** {@inheritDoc} */
@Override public boolean isQuery() {
return true;
}
/** {@inheritDoc} */
@Override public boolean keepBinary() {
return keepBinary;
}
/**
* @param keepBinary Keep binary flag.
*/
public void keepBinary(boolean keepBinary) {
this.keepBinary = keepBinary;
}
/** {@inheritDoc} */
@Override public String cacheName() {
return cacheName;
}
/** {@inheritDoc} */
@Override public void updateCounters(AffinityTopologyVersion topVer, Map<UUID, Map<Integer, Long>> cntrsPerNode,
Map<Integer, Long> cntrs) {
this.initUpdCntrsPerNode = cntrsPerNode;
this.initUpdCntrs = cntrs;
this.initTopVer = topVer;
}
/** {@inheritDoc} */
@Override public RegisterStatus register(final UUID nodeId, final UUID routineId, final GridKernalContext ctx)
throws IgniteCheckedException {
assert nodeId != null;
assert routineId != null;
assert ctx != null;
if (locLsnr != null) {
if (locLsnr instanceof JCacheQueryLocalListener) {
ctx.resource().injectGeneric(((JCacheQueryLocalListener)locLsnr).impl);
asyncCallback = ((JCacheQueryLocalListener)locLsnr).async();
}
else {
ctx.resource().injectGeneric(locLsnr);
asyncCallback = U.hasAnnotation(locLsnr, IgniteAsyncCallback.class);
}
}
final CacheEntryEventFilter filter = getEventFilter();
if (filter != null) {
if (filter instanceof JCacheQueryRemoteFilter) {
if (((JCacheQueryRemoteFilter)filter).impl != null)
ctx.resource().injectGeneric(((JCacheQueryRemoteFilter)filter).impl);
if (!asyncCallback)
asyncCallback = ((JCacheQueryRemoteFilter)filter).async();
}
else {
ctx.resource().injectGeneric(filter);
if (!asyncCallback)
asyncCallback = U.hasAnnotation(filter, IgniteAsyncCallback.class);
}
}
entryBufs = new ConcurrentHashMap<>();
backupQueue = new ConcurrentLinkedDeque8<>();
ackBuf = new AcknowledgeBuffer();
rcvs = new ConcurrentHashMap<>();
this.nodeId = nodeId;
this.routineId = routineId;
this.ctx = ctx;
final boolean loc = nodeId.equals(ctx.localNodeId());
assert !skipPrimaryCheck || loc;
log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
CacheContinuousQueryListener<K, V> lsnr = new CacheContinuousQueryListener<K, V>() {
@Override public void onExecution() {
if (ctx.event().isRecordable(EVT_CACHE_QUERY_EXECUTED)) {
ctx.event().record(new CacheQueryExecutedEvent<>(
ctx.discovery().localNode(),
"Continuous query executed.",
EVT_CACHE_QUERY_EXECUTED,
CacheQueryType.CONTINUOUS.name(),
cacheName,
null,
null,
null,
filter instanceof CacheEntryEventSerializableFilter ?
(CacheEntryEventSerializableFilter)filter : null,
null,
nodeId,
taskName()
));
}
}
@Override public boolean keepBinary() {
return keepBinary;
}
@Override public void onEntryUpdated(final CacheContinuousQueryEvent<K, V> evt,
boolean primary,
final boolean recordIgniteEvt,
GridDhtAtomicAbstractUpdateFuture fut) {
if (ignoreExpired && evt.getEventType() == EventType.EXPIRED)
return;
if (log.isDebugEnabled())
log.debug("Entry updated on affinity node [evt=" + evt + ", primary=" + primary + ']');
final GridCacheContext<K, V> cctx = cacheContext(ctx);
// Check that cache stopped.
if (cctx == null)
return;
// skipPrimaryCheck is set only when listen locally for replicated cache events.
assert !skipPrimaryCheck || (cctx.isReplicated() && ctx.localNodeId().equals(nodeId));
if (asyncCallback) {
ContinuousQueryAsyncClosure clsr = new ContinuousQueryAsyncClosure(
primary,
evt,
recordIgniteEvt,
fut);
ctx.asyncCallbackPool().execute(clsr, evt.partitionId());
}
else {
final boolean notify = filter(evt, primary);
if (log.isDebugEnabled())
log.debug("Filter invoked for event [evt=" + evt + ", primary=" + primary
+ ", notify=" + notify + ']');
if (primary || skipPrimaryCheck) {
if (fut == null)
onEntryUpdate(evt, notify, loc, recordIgniteEvt);
else {
fut.addContinuousQueryClosure(new CI1<Boolean>() {
@Override public void apply(Boolean suc) {
if (!suc)
evt.entry().markFiltered();
onEntryUpdate(evt, notify, loc, recordIgniteEvt);
}
}, sync);
}
}
}
}
@Override public void onUnregister() {
if (filter instanceof PlatformContinuousQueryFilter)
((PlatformContinuousQueryFilter)filter).onQueryUnregister();
}
@Override public void cleanupBackupQueue(Map<Integer, Long> updateCntrs) {
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null) {
Iterator<CacheContinuousQueryEntry> it = backupQueue0.iterator();
while (it.hasNext()) {
CacheContinuousQueryEntry backupEntry = it.next();
Long updateCntr = updateCntrs.get(backupEntry.partition());
if (updateCntr != null && backupEntry.updateCounter() <= updateCntr)
it.remove();
}
}
}
@Override public void flushBackupQueue(GridKernalContext ctx, AffinityTopologyVersion topVer) {
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 == null)
return;
try {
ClusterNode nodeId0 = ctx.discovery().node(nodeId);
if (nodeId0 != null) {
GridCacheContext<K, V> cctx = cacheContext(ctx);
for (CacheContinuousQueryEntry e : backupQueue0) {
if (!e.isFiltered())
prepareEntry(cctx, nodeId, e);
e.topologyVersion(topVer);
}
ctx.continuous().addBackupNotification(nodeId, routineId, backupQueue0, topic);
}
else
// Node which start CQ leave topology. Not needed to put data to backup queue.
backupQueue = null;
backupQueue0.clear();
}
catch (IgniteCheckedException e) {
U.error(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY),
"Failed to send backup event notification to node: " + nodeId, e);
}
}
@Override public void acknowledgeBackupOnTimeout(GridKernalContext ctx) {
sendBackupAcknowledge(ackBuf.acknowledgeOnTimeout(), routineId, ctx);
}
@Override public void skipUpdateEvent(CacheContinuousQueryEvent<K, V> evt,
AffinityTopologyVersion topVer, boolean primary) {
assert evt != null;
CacheContinuousQueryEntry e = evt.entry();
e.markFiltered();
onEntryUpdated(evt, primary, false, null);
}
@Override public void onPartitionEvicted(int part) {
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null) {
for (Iterator<CacheContinuousQueryEntry> it = backupQueue0.iterator(); it.hasNext(); ) {
if (it.next().partition() == part)
it.remove();
}
}
}
@Override public boolean oldValueRequired() {
return oldValRequired;
}
@Override public boolean notifyExisting() {
return notifyExisting;
}
private String taskName() {
return ctx.security().enabled() ? ctx.task().resolveTaskName(taskHash) : null;
}
};
CacheContinuousQueryManager mgr = manager(ctx);
if (mgr == null)
return RegisterStatus.DELAYED;
return mgr.registerListener(routineId, lsnr, internal);
}
/**
* @return Cache entry event filter.
*/
public CacheEntryEventFilter getEventFilter() {
return rmtFilter;
}
/**
* @param cctx Context.
* @param nodeId ID of the node that started routine.
* @param entry Entry.
* @throws IgniteCheckedException In case of error.
*/
private void prepareEntry(GridCacheContext cctx, UUID nodeId, CacheContinuousQueryEntry entry)
throws IgniteCheckedException {
if (cctx.kernalContext().config().isPeerClassLoadingEnabled() && cctx.discovery().node(nodeId) != null) {
entry.prepareMarshal(cctx);
cctx.deploy().prepare(entry);
}
else
entry.prepareMarshal(cctx);
}
/**
* Wait topology.
*/
public void waitTopologyFuture(GridKernalContext ctx) throws IgniteCheckedException {
GridCacheContext<K, V> cctx = cacheContext(ctx);
if (!cctx.isLocal()) {
cacheContext(ctx).affinity().affinityReadyFuture(initTopVer).get();
for (int partId = 0; partId < cacheContext(ctx).affinity().partitions(); partId++)
getOrCreatePartitionRecovery(ctx, partId);
}
}
/** {@inheritDoc} */
@Override public void unregister(UUID routineId, GridKernalContext ctx) {
assert routineId != null;
assert ctx != null;
GridCacheAdapter<K, V> cache = ctx.cache().internalCache(cacheName);
if (cache != null)
cache.context().continuousQueries().unregisterListener(internal, routineId);
}
/**
* @param ctx Kernal context.
* @return Continuous query manager.
*/
private CacheContinuousQueryManager manager(GridKernalContext ctx) {
GridCacheContext<K, V> cacheCtx = cacheContext(ctx);
return cacheCtx == null ? null : cacheCtx.continuousQueries();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void notifyCallback(final UUID nodeId,
final UUID routineId,
Collection<?> objs,
final GridKernalContext ctx) {
assert nodeId != null;
assert routineId != null;
assert objs != null;
assert ctx != null;
if (objs.isEmpty())
return;
if (asyncCallback) {
final List<CacheContinuousQueryEntry> entries = objs instanceof List ? (List)objs : new ArrayList(objs);
IgniteStripedThreadPoolExecutor asyncPool = ctx.asyncCallbackPool();
int threadId = asyncPool.threadId(entries.get(0).partition());
int startIdx = 0;
if (entries.size() != 1) {
for (int i = 1; i < entries.size(); i++) {
int curThreadId = asyncPool.threadId(entries.get(i).partition());
// If all entries from one partition avoid creation new collections.
if (curThreadId == threadId)
continue;
final int i0 = i;
final int startIdx0 = startIdx;
asyncPool.execute(new Runnable() {
@Override public void run() {
notifyCallback0(nodeId, ctx, entries.subList(startIdx0, i0));
}
}, threadId);
startIdx = i0;
threadId = curThreadId;
}
}
final int startIdx0 = startIdx;
asyncPool.execute(new Runnable() {
@Override public void run() {
notifyCallback0(nodeId, ctx,
startIdx0 == 0 ? entries : entries.subList(startIdx0, entries.size()));
}
}, threadId);
}
else
notifyCallback0(nodeId, ctx, (Collection)objs);
}
/**
* @param nodeId Node id.
* @param ctx Kernal context.
* @param entries Entries.
*/
private void notifyCallback0(UUID nodeId,
final GridKernalContext ctx,
Collection<CacheContinuousQueryEntry> entries) {
final GridCacheContext cctx = cacheContext(ctx);
if (cctx == null) {
IgniteLogger log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
if (log.isDebugEnabled())
log.debug("Failed to notify callback, cache is not found: " + cacheId);
return;
}
final Collection<CacheEntryEvent<? extends K, ? extends V>> entries0 = new ArrayList<>(entries.size());
for (CacheContinuousQueryEntry e : entries) {
GridCacheDeploymentManager depMgr = cctx.deploy();
ClassLoader ldr = depMgr.globalLoader();
if (ctx.config().isPeerClassLoadingEnabled()) {
GridDeploymentInfo depInfo = e.deployInfo();
if (depInfo != null) {
depMgr.p2pContext(nodeId, depInfo.classLoaderId(), depInfo.userVersion(), depInfo.deployMode(),
depInfo.participants(), depInfo.localDeploymentOwner());
}
}
try {
e.unmarshal(cctx, ldr);
Collection<CacheEntryEvent<? extends K, ? extends V>> evts = handleEvent(ctx, e);
if (evts != null && !evts.isEmpty())
entries0.addAll(evts);
}
catch (IgniteCheckedException ex) {
if (ignoreClsNotFound)
assert internal;
else
U.error(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY), "Failed to unmarshal entry.", ex);
}
}
if (!entries0.isEmpty())
locLsnr.onUpdated(entries0);
}
/**
* @param ctx Context.
* @param e entry.
* @return Entry collection.
*/
private Collection<CacheEntryEvent<? extends K, ? extends V>> handleEvent(GridKernalContext ctx,
CacheContinuousQueryEntry e) {
assert e != null;
GridCacheContext<K, V> cctx = cacheContext(ctx);
final IgniteCache cache = cctx.kernalContext().cache().jcache(cctx.name());
if (internal) {
if (e.isFiltered())
return Collections.emptyList();
else
return F.<CacheEntryEvent<? extends K, ? extends V>>
asList(new CacheContinuousQueryEvent<K, V>(cache, cctx, e));
}
// Initial query entry or evicted entry. These events should be fired immediately.
if (e.updateCounter() == -1L) {
return !e.isFiltered() ? F.<CacheEntryEvent<? extends K, ? extends V>>asList(
new CacheContinuousQueryEvent<K, V>(cache, cctx, e)) :
Collections.<CacheEntryEvent<? extends K, ? extends V>>emptyList();
}
PartitionRecovery rec = getOrCreatePartitionRecovery(ctx, e.partition());
return rec.collectEntries(e, cctx, cache);
}
/**
* @param primary Primary.
* @param evt Query event.
* @return {@code True} if event passed filter otherwise {@code true}.
*/
public boolean filter(CacheContinuousQueryEvent evt, boolean primary) {
CacheContinuousQueryEntry entry = evt.entry();
boolean notify = !entry.isFiltered();
try {
if (notify && getEventFilter() != null)
notify = getEventFilter().evaluate(evt);
}
catch (Exception e) {
U.error(log, "CacheEntryEventFilter failed: " + e);
}
if (!notify)
entry.markFiltered();
if (!primary && !internal && entry.updateCounter() != -1L /* Skip init query and expire entries */) {
entry.markBackup();
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null)
backupQueue0.add(entry.forBackupQueue());
}
return notify;
}
/**
* @param evt Continuous query event.
* @param notify Notify flag.
* @param loc Listener deployed on this node.
* @param recordIgniteEvt Record ignite event.
*/
private void onEntryUpdate(CacheContinuousQueryEvent evt, boolean notify, boolean loc, boolean recordIgniteEvt) {
try {
GridCacheContext<K, V> cctx = cacheContext(ctx);
if (cctx == null)
return;
final CacheContinuousQueryEntry entry = evt.entry();
if (loc) {
if (!locCache) {
Collection<CacheEntryEvent<? extends K, ? extends V>> evts = handleEvent(ctx, entry);
if (!evts.isEmpty())
locLsnr.onUpdated(evts);
if (!internal && !skipPrimaryCheck)
sendBackupAcknowledge(ackBuf.onAcknowledged(entry), routineId, ctx);
}
else {
if (!entry.isFiltered())
locLsnr.onUpdated(F.<CacheEntryEvent<? extends K, ? extends V>>asList(evt));
}
}
else {
if (!entry.isFiltered())
prepareEntry(cctx, nodeId, entry);
CacheContinuousQueryEntry e = handleEntry(entry);
if (e != null) {
if (log.isDebugEnabled())
log.debug("Send the following event to listener: " + e);
ctx.continuous().addNotification(nodeId, routineId, entry, topic, sync, true);
}
}
}
catch (ClusterTopologyCheckedException ex) {
if (log.isDebugEnabled())
log.debug("Failed to send event notification to node, node left cluster " +
"[node=" + nodeId + ", err=" + ex + ']');
}
catch (IgniteCheckedException ex) {
U.error(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY), "Failed to send event notification to node: " + nodeId, ex);
}
if (recordIgniteEvt && notify) {
ctx.event().record(new CacheQueryReadEvent<>(
ctx.discovery().localNode(),
"Continuous query executed.",
EVT_CACHE_QUERY_OBJECT_READ,
CacheQueryType.CONTINUOUS.name(),
cacheName,
null,
null,
null,
getEventFilter() instanceof CacheEntryEventSerializableFilter ?
(CacheEntryEventSerializableFilter)getEventFilter() : null,
null,
nodeId,
taskName(),
evt.getKey(),
evt.getValue(),
evt.getOldValue(),
null
));
}
}
/**
* @return Task name.
*/
private String taskName() {
return ctx.security().enabled() ? ctx.task().resolveTaskName(taskHash) : null;
}
/** {@inheritDoc} */
@Override public void onClientDisconnected() {
if (internal)
return;
for (PartitionRecovery rec : rcvs.values())
rec.resetTopologyCache();
}
/**
* @param ctx Context.
* @param partId Partition id.
* @return Partition recovery.
*/
@NotNull private PartitionRecovery getOrCreatePartitionRecovery(GridKernalContext ctx, int partId) {
PartitionRecovery rec = rcvs.get(partId);
if (rec == null) {
Long partCntr = null;
AffinityTopologyVersion initTopVer0 = initTopVer;
if (initTopVer0 != null) {
GridCacheContext<K, V> cctx = cacheContext(ctx);
GridCacheAffinityManager aff = cctx.affinity();
if (initUpdCntrsPerNode != null) {
for (ClusterNode node : aff.nodesByPartition(partId, initTopVer)) {
Map<Integer, Long> map = initUpdCntrsPerNode.get(node.id());
if (map != null) {
partCntr = map.get(partId);
break;
}
}
}
else if (initUpdCntrs != null)
partCntr = initUpdCntrs.get(partId);
}
rec = new PartitionRecovery(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY), initTopVer0, partCntr);
PartitionRecovery oldRec = rcvs.putIfAbsent(partId, rec);
if (oldRec != null)
rec = oldRec;
}
return rec;
}
/**
* @param e Entry.
* @return Entry.
*/
private CacheContinuousQueryEntry handleEntry(CacheContinuousQueryEntry e) {
assert e != null;
assert entryBufs != null;
if (internal) {
if (e.isFiltered())
return null;
else
return e;
}
// Initial query entry.
// This events should be fired immediately.
if (e.updateCounter() == -1)
return e;
EntryBuffer buf = entryBufs.get(e.partition());
if (buf == null) {
buf = new EntryBuffer();
EntryBuffer oldRec = entryBufs.putIfAbsent(e.partition(), buf);
if (oldRec != null)
buf = oldRec;
}
return buf.handle(e);
}
/**
*
*/
private static class PartitionRecovery {
/** Event which means hole in sequence. */
private static final CacheContinuousQueryEntry HOLE = new CacheContinuousQueryEntry();
/** */
private final static int MAX_BUFF_SIZE = 100;
/** */
private IgniteLogger log;
/** */
private long lastFiredEvt;
/** */
private AffinityTopologyVersion curTop = AffinityTopologyVersion.NONE;
/** */
private final Map<Long, CacheContinuousQueryEntry> pendingEvts = new TreeMap<>();
/**
* @param log Logger.
* @param topVer Topology version.
* @param initCntr Update counters.
*/
PartitionRecovery(IgniteLogger log, AffinityTopologyVersion topVer, @Nullable Long initCntr) {
this.log = log;
if (initCntr != null) {
assert topVer.topologyVersion() > 0 : topVer;
this.lastFiredEvt = initCntr;
curTop = topVer;
}
}
/**
* Resets cached topology.
*/
void resetTopologyCache() {
curTop = AffinityTopologyVersion.NONE;
}
/**
* Add continuous entry.
*
* @param cctx Cache context.
* @param cache Cache.
* @param entry Cache continuous query entry.
* @return Collection entries which will be fired. This collection should contains only non-filtered events.
*/
<K, V> Collection<CacheEntryEvent<? extends K, ? extends V>> collectEntries(
CacheContinuousQueryEntry entry,
GridCacheContext cctx,
IgniteCache cache
) {
assert entry != null;
if (entry.topologyVersion() == null) { // Possible if entry is sent from old node.
assert entry.updateCounter() == 0L : entry;
return F.<CacheEntryEvent<? extends K, ? extends V>>
asList(new CacheContinuousQueryEvent<K, V>(cache, cctx, entry));
}
List<CacheEntryEvent<? extends K, ? extends V>> entries;
synchronized (pendingEvts) {
if (log.isDebugEnabled()) {
log.debug("Handling event [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
}
// Received first event.
if (curTop == AffinityTopologyVersion.NONE) {
lastFiredEvt = entry.updateCounter();
curTop = entry.topologyVersion();
if (log.isDebugEnabled()) {
log.debug("First event [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() + ']');
}
return !entry.isFiltered() ?
F.<CacheEntryEvent<? extends K, ? extends V>>
asList(new CacheContinuousQueryEvent<K, V>(cache, cctx, entry)) :
Collections.<CacheEntryEvent<? extends K, ? extends V>>emptyList();
}
if (curTop.compareTo(entry.topologyVersion()) < 0) {
if (entry.updateCounter() == 1L && !entry.isBackup()) {
entries = new ArrayList<>(pendingEvts.size());
for (CacheContinuousQueryEntry evt : pendingEvts.values()) {
if (evt != HOLE && !evt.isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, evt));
}
pendingEvts.clear();
curTop = entry.topologyVersion();
lastFiredEvt = entry.updateCounter();
if (!entry.isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, entry));
if (log.isDebugEnabled())
log.debug("Partition was lost [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
return entries;
}
curTop = entry.topologyVersion();
}
// Check duplicate.
if (entry.updateCounter() > lastFiredEvt) {
pendingEvts.put(entry.updateCounter(), entry);
// Put filtered events.
if (entry.filteredEvents() != null) {
for (long cnrt : entry.filteredEvents()) {
if (cnrt > lastFiredEvt)
pendingEvts.put(cnrt, HOLE);
}
}
}
else {
if (log.isDebugEnabled())
log.debug("Skip duplicate continuous query message: " + entry);
return Collections.emptyList();
}
if (pendingEvts.isEmpty()) {
if (log.isDebugEnabled()) {
log.debug("Nothing sent to listener [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() + ']');
}
return Collections.emptyList();
}
Iterator<Map.Entry<Long, CacheContinuousQueryEntry>> iter = pendingEvts.entrySet().iterator();
entries = new ArrayList<>();
if (pendingEvts.size() >= MAX_BUFF_SIZE) {
for (int i = 0; i < MAX_BUFF_SIZE - (MAX_BUFF_SIZE / 10); i++) {
Map.Entry<Long, CacheContinuousQueryEntry> e = iter.next();
if (e.getValue() != HOLE && !e.getValue().isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, e.getValue()));
lastFiredEvt = e.getKey();
iter.remove();
}
if (log.isDebugEnabled()) {
log.debug("Pending events reached max of buffer size [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
}
}
else {
// Elements are consistently.
while (iter.hasNext()) {
Map.Entry<Long, CacheContinuousQueryEntry> e = iter.next();
if (e.getKey() == lastFiredEvt + 1) {
++lastFiredEvt;
if (e.getValue() != HOLE && !e.getValue().isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, e.getValue()));
iter.remove();
}
else
break;
}
}
}
if (log.isDebugEnabled()) {
log.debug("Will send to listener the following events [entries=" + entries +
", lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
}
return entries;
}
}
/**
*
*/
private static class EntryBuffer {
/** */
private final static int MAX_BUFF_SIZE = 100;
/** */
private final GridConcurrentSkipListSet<Long> buf = new GridConcurrentSkipListSet<>();
/** */
private AtomicLong lastFiredCntr = new AtomicLong();
/**
* @param newVal New value.
* @return Old value if previous value less than new value otherwise {@code -1}.
*/
private long updateFiredCounter(long newVal) {
long prevVal = lastFiredCntr.get();
while (prevVal < newVal) {
if (lastFiredCntr.compareAndSet(prevVal, newVal))
return prevVal;
else
prevVal = lastFiredCntr.get();
}
return prevVal >= newVal ? -1 : prevVal;
}
/**
* Add continuous entry.
*
* @param e Cache continuous query entry.
* @return Collection entries which will be fired.
*/
public CacheContinuousQueryEntry handle(CacheContinuousQueryEntry e) {
assert e != null;
if (e.isFiltered()) {
Long last = buf.lastx();
Long first = buf.firstx();
if (last != null && first != null && last - first >= MAX_BUFF_SIZE) {
NavigableSet<Long> prevHoles = buf.subSet(first, true, last, true);
GridLongList filteredEvts = new GridLongList((int)(last - first));
int size = 0;
Long cntr;
while ((cntr = prevHoles.pollFirst()) != null) {
filteredEvts.add(cntr);
++size;
}
filteredEvts.truncate(size, true);
e.filteredEvents(filteredEvts);
return e;
}
if (lastFiredCntr.get() > e.updateCounter() || e.updateCounter() == 1)
return e;
else {
buf.add(e.updateCounter());
// Double check. If another thread sent a event with counter higher than this event.
if (lastFiredCntr.get() > e.updateCounter() && buf.contains(e.updateCounter())) {
buf.remove(e.updateCounter());
return e;
}
else
return null;
}
}
else {
long prevVal = updateFiredCounter(e.updateCounter());
if (prevVal == -1)
return e;
else {
NavigableSet<Long> prevHoles = buf.subSet(prevVal, true, e.updateCounter(), true);
GridLongList filteredEvts = new GridLongList((int)(e.updateCounter() - prevVal));
int size = 0;
Long cntr;
while ((cntr = prevHoles.pollFirst()) != null) {
filteredEvts.add(cntr);
++size;
}
filteredEvts.truncate(size, true);
e.filteredEvents(filteredEvts);
return e;
}
}
}
}
/** {@inheritDoc} */
@Override public void onNodeLeft() {
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null)
backupQueue = null;
}
/** {@inheritDoc} */
@Override public void p2pMarshal(GridKernalContext ctx) throws IgniteCheckedException {
assert ctx != null;
assert ctx.config().isPeerClassLoadingEnabled();
if (rmtFilter != null && !U.isGrid(rmtFilter.getClass()))
rmtFilterDep = new DeployableObject(rmtFilter, ctx);
}
/** {@inheritDoc} */
@Override public void p2pUnmarshal(UUID nodeId, GridKernalContext ctx) throws IgniteCheckedException {
assert nodeId != null;
assert ctx != null;
assert ctx.config().isPeerClassLoadingEnabled();
if (rmtFilterDep != null)
rmtFilter = rmtFilterDep.unmarshal(nodeId, ctx);
}
/** {@inheritDoc} */
@Override public GridContinuousBatch createBatch() {
return new GridContinuousQueryBatch();
}
/** {@inheritDoc} */
@Override public void onBatchAcknowledged(final UUID routineId,
GridContinuousBatch batch,
final GridKernalContext ctx) {
sendBackupAcknowledge(ackBuf.onAcknowledged(batch), routineId, ctx);
}
/**
* @param t Acknowledge information.
* @param routineId Routine ID.
* @param ctx Context.
*/
private void sendBackupAcknowledge(final IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>> t,
final UUID routineId,
final GridKernalContext ctx) {
if (t != null) {
ctx.closure().runLocalSafe(new Runnable() {
@Override public void run() {
GridCacheContext<K, V> cctx = cacheContext(ctx);
CacheContinuousQueryBatchAck msg = new CacheContinuousQueryBatchAck(cctx.cacheId(),
routineId,
t.get1());
for (AffinityTopologyVersion topVer : t.get2()) {
for (ClusterNode node : ctx.discovery().cacheAffinityNodes(cctx.name(), topVer)) {
if (!node.isLocal()) {
try {
cctx.io().send(node, msg, GridIoPolicy.SYSTEM_POOL);
}
catch (ClusterTopologyCheckedException ignored) {
IgniteLogger log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
if (log.isDebugEnabled())
log.debug("Failed to send acknowledge message, node left " +
"[msg=" + msg + ", node=" + node + ']');
}
catch (IgniteCheckedException e) {
IgniteLogger log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
U.error(log, "Failed to send acknowledge message " +
"[msg=" + msg + ", node=" + node + ']', e);
}
}
}
}
}
});
}
}
/** {@inheritDoc} */
@Nullable @Override public Object orderedTopic() {
return topic;
}
/** {@inheritDoc} */
@Override public GridContinuousHandler clone() {
try {
return (GridContinuousHandler)super.clone();
}
catch (CloneNotSupportedException e) {
throw new IllegalStateException(e);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CacheContinuousQueryHandler.class, this);
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
U.writeString(out, cacheName);
out.writeObject(topic);
boolean b = rmtFilterDep != null;
out.writeBoolean(b);
if (b)
out.writeObject(rmtFilterDep);
else
out.writeObject(rmtFilter);
out.writeBoolean(internal);
out.writeBoolean(notifyExisting);
out.writeBoolean(oldValRequired);
out.writeBoolean(sync);
out.writeBoolean(ignoreExpired);
out.writeInt(taskHash);
out.writeBoolean(keepBinary);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
cacheName = U.readString(in);
topic = in.readObject();
boolean b = in.readBoolean();
if (b)
rmtFilterDep = (DeployableObject)in.readObject();
else
rmtFilter = (CacheEntryEventSerializableFilter<K, V>)in.readObject();
internal = in.readBoolean();
notifyExisting = in.readBoolean();
oldValRequired = in.readBoolean();
sync = in.readBoolean();
ignoreExpired = in.readBoolean();
taskHash = in.readInt();
keepBinary = in.readBoolean();
cacheId = CU.cacheId(cacheName);
}
/**
* @param ctx Kernal context.
* @return Cache context.
*/
private GridCacheContext<K, V> cacheContext(GridKernalContext ctx) {
assert ctx != null;
return ctx.cache().<K, V>context().cacheContext(cacheId);
}
/** */
private static class AcknowledgeBuffer {
/** */
private int size;
/** */
@GridToStringInclude
private Map<Integer, Long> updateCntrs = new HashMap<>();
/** */
@GridToStringInclude
private Set<AffinityTopologyVersion> topVers = U.newHashSet(1);
/**
* @param batch Batch.
* @return Non-null tuple if acknowledge should be sent to backups.
*/
@SuppressWarnings("unchecked")
@Nullable synchronized IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>>
onAcknowledged(GridContinuousBatch batch) {
assert batch instanceof GridContinuousQueryBatch;
size += ((GridContinuousQueryBatch)batch).entriesCount();
Collection<CacheContinuousQueryEntry> entries = (Collection)batch.collect();
for (CacheContinuousQueryEntry e : entries)
addEntry(e);
return size >= BACKUP_ACK_THRESHOLD ? acknowledgeData() : null;
}
/**
* @param e Entry.
* @return Non-null tuple if acknowledge should be sent to backups.
*/
@Nullable synchronized IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>>
onAcknowledged(CacheContinuousQueryEntry e) {
size++;
addEntry(e);
return size >= BACKUP_ACK_THRESHOLD ? acknowledgeData() : null;
}
/**
* @param e Entry.
*/
private void addEntry(CacheContinuousQueryEntry e) {
topVers.add(e.topologyVersion());
Long cntr0 = updateCntrs.get(e.partition());
if (cntr0 == null || e.updateCounter() > cntr0)
updateCntrs.put(e.partition(), e.updateCounter());
}
/**
* @return Non-null tuple if acknowledge should be sent to backups.
*/
@Nullable synchronized IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>>
acknowledgeOnTimeout() {
return size > 0 ? acknowledgeData() : null;
}
/**
* @return Tuple with acknowledge information.
*/
private IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>> acknowledgeData() {
assert size > 0;
Map<Integer, Long> cntrs = new HashMap<>(updateCntrs);
IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>> res =
new IgniteBiTuple<>(cntrs, topVers);
topVers = U.newHashSet(1);
size = 0;
return res;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(AcknowledgeBuffer.class, this);
}
}
/**
*
*/
private class ContinuousQueryAsyncClosure implements Runnable {
/** */
private final CacheContinuousQueryEvent<K, V> evt;
/** */
private final boolean primary;
/** */
private final boolean recordIgniteEvt;
/** */
private final IgniteInternalFuture<?> fut;
/**
* @param primary Primary flag.
* @param evt Event.
* @param recordIgniteEvt Fired event.
* @param fut Dht future.
*/
ContinuousQueryAsyncClosure(
boolean primary,
CacheContinuousQueryEvent<K, V> evt,
boolean recordIgniteEvt,
IgniteInternalFuture<?> fut) {
this.primary = primary;
this.evt = evt;
this.recordIgniteEvt = recordIgniteEvt;
this.fut = fut;
}
/** {@inheritDoc} */
@Override public void run() {
final boolean notify = filter(evt, primary);
if (!primary())
return;
if (fut == null) {
onEntryUpdate(evt, notify, nodeId.equals(ctx.localNodeId()), recordIgniteEvt);
return;
}
if (fut.isDone()) {
if (fut.error() != null)
evt.entry().markFiltered();
onEntryUpdate(evt, notify, nodeId.equals(ctx.localNodeId()), recordIgniteEvt);
}
else {
fut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> f) {
if (f.error() != null)
evt.entry().markFiltered();
ctx.asyncCallbackPool().execute(new Runnable() {
@Override public void run() {
onEntryUpdate(evt, notify, nodeId.equals(ctx.localNodeId()), recordIgniteEvt);
}
}, evt.entry().partition());
}
});
}
}
/**
* @return {@code True} if event fired on this node.
*/
private boolean primary() {
return primary || skipPrimaryCheck;
}
/** {@inheritDoc} */
public String toString() {
return S.toString(ContinuousQueryAsyncClosure.class, this);
}
}
/**
* Deployable object.
*/
protected static class DeployableObject implements Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** Serialized object. */
private byte[] bytes;
/** Deployment class name. */
private String clsName;
/** Deployment info. */
private GridDeploymentInfo depInfo;
/**
* Required by {@link Externalizable}.
*/
public DeployableObject() {
// No-op.
}
/**
* @param obj Object.
* @param ctx Kernal context.
* @throws IgniteCheckedException In case of error.
*/
protected DeployableObject(Object obj, GridKernalContext ctx) throws IgniteCheckedException {
assert obj != null;
assert ctx != null;
Class cls = U.detectClass(obj);
clsName = cls.getName();
GridDeployment dep = ctx.deploy().deploy(cls, U.detectClassLoader(cls));
if (dep == null)
throw new IgniteDeploymentCheckedException("Failed to deploy object: " + obj);
depInfo = new GridDeploymentInfoBean(dep);
bytes = U.marshal(ctx, obj);
}
/**
* @param nodeId Node ID.
* @param ctx Kernal context.
* @return Deserialized object.
* @throws IgniteCheckedException In case of error.
*/
<T> T unmarshal(UUID nodeId, GridKernalContext ctx) throws IgniteCheckedException {
assert ctx != null;
GridDeployment dep = ctx.deploy().getGlobalDeployment(depInfo.deployMode(), clsName, clsName,
depInfo.userVersion(), nodeId, depInfo.classLoaderId(), depInfo.participants(), null);
if (dep == null)
throw new IgniteDeploymentCheckedException("Failed to obtain deployment for class: " + clsName);
return U.unmarshal(ctx, bytes, U.resolveClassLoader(dep.classLoader(), ctx.config()));
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
U.writeByteArray(out, bytes);
U.writeString(out, clsName);
out.writeObject(depInfo);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
bytes = U.readByteArray(in);
clsName = U.readString(in);
depInfo = (GridDeploymentInfo)in.readObject();
}
}
}
| |
/*
Copyright 2011-2013 The Cassandra Consortium (cassandra-fp7.eu)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package eu.cassandra.training.utils;
import java.awt.Color;
import java.text.DecimalFormat;
import java.util.Arrays;
import org.apache.log4j.Logger;
import org.jfree.chart.ChartFactory;
import org.jfree.chart.ChartPanel;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.CategoryAxis;
import org.jfree.chart.axis.CategoryLabelPosition;
import org.jfree.chart.axis.CategoryLabelPositions;
import org.jfree.chart.axis.CategoryLabelWidthType;
import org.jfree.chart.axis.NumberAxis;
import org.jfree.chart.axis.NumberTickUnit;
import org.jfree.chart.labels.PieSectionLabelGenerator;
import org.jfree.chart.labels.StandardPieSectionLabelGenerator;
import org.jfree.chart.plot.CategoryPlot;
import org.jfree.chart.plot.PiePlot;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.plot.XYPlot;
import org.jfree.data.category.CategoryDataset;
import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.general.DatasetUtilities;
import org.jfree.data.general.DefaultPieDataset;
import org.jfree.data.xy.XYSeries;
import org.jfree.data.xy.XYSeriesCollection;
import org.jfree.text.TextBlockAnchor;
import org.jfree.ui.RectangleAnchor;
import org.jfree.ui.RectangleInsets;
import org.jfree.ui.TextAnchor;
import eu.cassandra.training.entities.Person;
/**
* This class contains static functions that are used for the creation of
* visualization charts that are appearing in the Training Module GUI.
*
* @author Antonios Chrysopoulos
* @version 0.9, Date: 29.07.2013
*/
public class ChartUtils
{
static Logger log = Logger.getLogger(ChartUtils.class);
/**
* This function is used for the visualization of a Comparative Response Model
* Histogram.
*
* @param title
* The title of the chart.
* @param x
* The unit on the X axis of the chart.
* @param y
* The unit on the Y axis of the chart.
* @param dataBefore
* The array of values before the response.
* @param dataAfter
* The array of values after the response.
* @return a chart panel with the graphical representation.
*/
public static ChartPanel createResponseHistogram (String title, String x,
String y,
double[] dataBefore,
double[] dataAfter)
{
XYSeries series1 = new XYSeries("Basic Pricing Scheme");
for (int i = 0; i < dataBefore.length; i++) {
series1.add(i, dataBefore[i]);
}
XYSeries series2 = new XYSeries("New Pricing Scheme");
for (int i = 0; i < dataAfter.length; i++) {
series2.add(i, dataAfter[i]);
}
XYSeriesCollection dataset = new XYSeriesCollection();
dataset.addSeries(series1);
dataset.addSeries(series2);
PlotOrientation orientation = PlotOrientation.VERTICAL;
boolean show = true;
boolean toolTips = false;
boolean urls = false;
JFreeChart chart =
ChartFactory.createXYLineChart(title, x, y, dataset, orientation, show,
toolTips, urls);
XYPlot xyplot = (XYPlot) chart.getPlot();
xyplot.setDomainPannable(true);
xyplot.setRangePannable(true);
xyplot.setForegroundAlpha(0.85F);
NumberAxis domainAxis = (NumberAxis) xyplot.getDomainAxis();
// domainAxis.setRange(0.0, 1440.0);
domainAxis.setTickUnit(new NumberTickUnit(10));
NumberAxis numberaxis = (NumberAxis) xyplot.getRangeAxis();
numberaxis.setTickUnit(new NumberTickUnit(0.1));
return new ChartPanel(chart);
}
/**
* This function is used for the visualization of a Comparative Response Model
* Histogram.
*
* @param title
* The title of the chart.
* @param x
* The unit on the X axis of the chart.
* @param y
* The unit on the Y axis of the chart.
* @param dataBefore
* The array of values before the response.
* @param dataAfter
* The array of values after the response.
* @return a chart panel with the graphical representation.
*/
public static ChartPanel createDailyResponseHistogram (String title,
String x, String y,
double[] dataBefore,
double[] dataAfter)
{
final DefaultCategoryDataset dataset = new DefaultCategoryDataset();
for (int i = 0; i < dataBefore.length; i++) {
dataset.addValue(dataBefore[i], "Basic Scheme", "" + i + "");
if (i < dataAfter.length)
dataset.addValue(dataAfter[i], "New Scheme", "" + i + "");
else
dataset.addValue(0, "New Scheme", "" + i + "");
}
JFreeChart chart = ChartFactory.createBarChart3D(title, // chart title
x, // domain axis label
y, // range axis label
dataset, // data
PlotOrientation.VERTICAL, // orientation
true, // include legend
true, // tooltips
false // urls
);
final CategoryPlot plot = chart.getCategoryPlot();
plot.setForegroundAlpha(1.0f);
// left align the category labels...
final CategoryAxis axis = plot.getDomainAxis();
final CategoryLabelPositions p = axis.getCategoryLabelPositions();
final CategoryLabelPosition left =
new CategoryLabelPosition(RectangleAnchor.LEFT,
TextBlockAnchor.CENTER_LEFT,
TextAnchor.CENTER_LEFT, 0.0,
CategoryLabelWidthType.RANGE, 0.30f);
axis.setCategoryLabelPositions(CategoryLabelPositions
.replaceLeftPosition(p, left));
return new ChartPanel(chart);
}
/**
* This function is used for the visualization of a Line Diagram.
*
* @param title
* The title of the chart.
* @param x
* The unit on the X axis of the chart.
* @param y
* The unit on the Y axis of the chart.
* @param data
* The array of values.
* @return a chart panel with the graphical representation.
*/
public static ChartPanel createLineDiagram (String title, String x, String y,
double[] data)
{
XYSeries series1 = new XYSeries("Active Power");
for (int i = 0; i < data.length; i++) {
series1.add(i, data[i]);
}
XYSeriesCollection dataset = new XYSeriesCollection();
dataset.addSeries(series1);
PlotOrientation orientation = PlotOrientation.VERTICAL;
boolean show = true;
boolean toolTips = false;
boolean urls = false;
JFreeChart chart =
ChartFactory.createXYLineChart(title, x, y, dataset, orientation, show,
toolTips, urls);
return new ChartPanel(chart);
}
public static ChartPanel createLineDiagram (String title, String x, String y,
double[] data, double[] data2)
{
XYSeries series1 = new XYSeries("Active Power");
for (int i = 0; i < data.length; i++) {
series1.add(i, data[i]);
}
XYSeries series2 = new XYSeries("Reactive Power");
for (int i = 0; i < data2.length; i++) {
series2.add(i, data2[i]);
}
XYSeriesCollection dataset = new XYSeriesCollection();
dataset.addSeries(series1);
dataset.addSeries(series2);
PlotOrientation orientation = PlotOrientation.VERTICAL;
boolean show = true;
boolean toolTips = false;
boolean urls = false;
JFreeChart chart =
ChartFactory.createXYLineChart(title, x, y, dataset, orientation, show,
toolTips, urls);
return new ChartPanel(chart);
}
/**
* This function is used for the visualization of a Histogram.
*
* @param title
* The title of the chart.
* @param x
* The unit on the X axis of the chart.
* @param y
* The unit on the Y axis of the chart.
* @param data
* The array of values.
* @return a chart panel with the graphical representation.
*/
public static ChartPanel createHistogram (String title, String x, String y,
double[] data)
{
DefaultCategoryDataset dataset = new DefaultCategoryDataset();
log.info(Arrays.toString(data));
for (int i = 0; i < data.length; i++) {
if (title.contains("Start")) {
log.info(i + " " + data[i]);
dataset.addValue(data[i], y, (Comparable) i);
}
else if (data[i] != 0) {
log.info(i + " " + data[i]);
dataset.addValue(data[i], y, (Comparable) i);
}
}
PlotOrientation orientation = PlotOrientation.VERTICAL;
boolean show = false;
boolean toolTips = false;
boolean urls = false;
JFreeChart chart =
ChartFactory.createBarChart(title, x, y, dataset, orientation, show,
toolTips, urls);
return new ChartPanel(chart);
}
/**
* This function is used for the visualization of two Area Diagrams.
*
* @param title
* The title of the chart.
* @param x
* The unit on the X axis of the chart.
* @param y
* The unit on the Y axis of the chart.
* @param doubles
* The array of values of the first array.
* @param doubles2
* The array of values of the second array.
* @return a chart panel with the graphical representation.
*/
public static ChartPanel createArea (String title, String x, String y,
Double[] doubles, Double[] doubles2)
{
JFreeChart chart = null;
if (doubles.length != doubles2.length) {
System.out.println("ERROR with lengths.");
}
else {
Double[][] data = new Double[2][doubles.length];
data[0] = doubles;
data[1] = doubles2;
final CategoryDataset dataset =
DatasetUtilities.createCategoryDataset("Power ", "", data);
chart =
ChartFactory.createAreaChart(title, x, y, dataset,
PlotOrientation.VERTICAL, true, true,
false);
chart.setBackgroundPaint(Color.white);
CategoryPlot plot = chart.getCategoryPlot();
plot.setForegroundAlpha(0.5f);
// plot.setAxisOffset(new Spacer(Spacer.ABSOLUTE, 5.0, 5.0, 5.0, 5.0));
plot.setBackgroundPaint(Color.lightGray);
CategoryAxis domainAxis = plot.getDomainAxis();
domainAxis.setCategoryLabelPositions(CategoryLabelPositions.UP_45);
// domainAxis.setTickUnit(new NumberTickUnit(10));
NumberAxis numberaxis = (NumberAxis) plot.getRangeAxis();
numberaxis.setStandardTickUnits(NumberAxis.createIntegerTickUnits());
}
return new ChartPanel(chart);
}
/**
* This function is used for the visualization of two Area Diagrams.
*
* @param title
* The title of the chart.
* @param x
* The unit on the X axis of the chart.
* @param y
* The unit on the Y axis of the chart.
* @param doubles
* The array of values of the first array.
*
* @return a chart panel with the graphical representation.
*/
public static ChartPanel createExpectedPowerChart (String title, String x,
String y, double[] data)
{
JFreeChart chart = null;
XYSeries series1 = new XYSeries("Expected Power");
for (int i = 0; i < data.length; i++) {
series1.add(i, data[i]);
}
XYSeriesCollection dataset = new XYSeriesCollection();
dataset.addSeries(series1);
PlotOrientation orientation = PlotOrientation.VERTICAL;
boolean show = false;
boolean toolTips = false;
boolean urls = false;
chart =
ChartFactory.createXYLineChart(title, x, y, dataset, orientation, show,
toolTips, urls);
chart.setBackgroundPaint(Color.white);
XYPlot plot = (XYPlot) chart.getPlot();
plot.setBackgroundPaint(Color.lightGray);
plot.setDomainGridlinePaint(Color.white);
plot.setRangeGridlinePaint(Color.white);
plot.setAxisOffset(new RectangleInsets(5.0, 5.0, 5.0, 5.0));
plot.setDomainCrosshairVisible(true);
plot.setRangeCrosshairVisible(true);
NumberAxis domainAxis = (NumberAxis) plot.getDomainAxis();
domainAxis.setVerticalTickLabels(true);
domainAxis.setRange(0.0, 1440.0);
domainAxis.setTickUnit(new NumberTickUnit(100));
return new ChartPanel(chart);
}
/**
* This function is used for the visualization of a Gaussian Mixture
* Distribution.
*
* @param title
* The title of the chart.
* @param x
* The unit on the X axis of the chart.
* @param y
* The unit on the Y axis of the chart.
* @param data
* The array of values.
* @return a chart panel with the graphical representation.
*/
public static ChartPanel createMixtureDistribution (String title, String x,
String y, double[] data)
{
XYSeries series1 = new XYSeries("First");
for (int i = 0; i < data.length; i++) {
series1.add(i, data[i]);
}
final XYSeriesCollection dataset = new XYSeriesCollection();
dataset.addSeries(series1);
PlotOrientation orientation = PlotOrientation.VERTICAL;
boolean show = false;
boolean toolTips = false;
boolean urls = false;
JFreeChart chart =
ChartFactory.createXYLineChart(title, x, y, dataset, orientation, show,
toolTips, urls);
XYPlot xyplot = (XYPlot) chart.getPlot();
xyplot.setDomainPannable(true);
xyplot.setRangePannable(true);
xyplot.setForegroundAlpha(0.85F);
NumberAxis domainAxis = (NumberAxis) xyplot.getDomainAxis();
if (data.length != 1440)
domainAxis.setTickUnit(new NumberTickUnit(data.length / 10));
else
domainAxis.setTickUnit(new NumberTickUnit(100));
NumberAxis numberaxis = (NumberAxis) xyplot.getRangeAxis();
numberaxis.setStandardTickUnits(NumberAxis.createIntegerTickUnits());
return new ChartPanel(chart);
}
/**
* This function is used for parsing and presenting the basic and the new
* pricing schema.
*
* @param basic
* The basic pricing schema
* @param after
* The new pricing schema
*
* @return a chart panel with the
* graphical representation.
*/
public static ChartPanel parsePricingScheme (String basic, String after)
{
double[] data = Utils.parseScheme(basic);
double[] data2 = Utils.parseScheme(after);
XYSeries series1 = new XYSeries("Basic Pricing Scheme");
for (int i = 0; i < data.length; i++) {
series1.add(i, data[i]);
}
XYSeries series2 = new XYSeries("New Pricing Scheme");
for (int i = 0; i < data2.length; i++) {
series2.add(i, data2[i]);
}
XYSeriesCollection dataset = new XYSeriesCollection();
dataset.addSeries(series1);
dataset.addSeries(series2);
PlotOrientation orientation = PlotOrientation.VERTICAL;
boolean show = true;
boolean toolTips = false;
boolean urls = false;
JFreeChart chart =
ChartFactory.createXYLineChart("Pricing Schemes", "Minute of Day",
"Euros/kWh", dataset, orientation, show,
toolTips, urls);
XYPlot xyplot = (XYPlot) chart.getPlot();
xyplot.setDomainPannable(true);
xyplot.setRangePannable(true);
xyplot.setForegroundAlpha(0.85F);
NumberAxis domainAxis = (NumberAxis) xyplot.getDomainAxis();
domainAxis.setTickUnit(new NumberTickUnit(100));
NumberAxis numberaxis = (NumberAxis) xyplot.getRangeAxis();
numberaxis.setStandardTickUnits(NumberAxis.createIntegerTickUnits());
return new ChartPanel(chart);
}
/**
* This function is used for creating a pie chart of a Person Model's
* statistical attributes.
*
* @param title
* The chart's title
* @param person
* The person under consideration
*
* @return a chart panel with the statistical graphical representation.
*/
public static ChartPanel createPieChart (String title, Person person)
{
DefaultPieDataset dataset = new DefaultPieDataset();
dataset.setValue("Activity Models", person.getActivityModelsSize());
dataset.setValue("Response Models", person.getResponseModelsSize());
JFreeChart chart =
ChartFactory.createPieChart(title, dataset, true, true, true);
PiePlot plot = (PiePlot) chart.getPlot();
PieSectionLabelGenerator generator =
new StandardPieSectionLabelGenerator("{0} = {1}", new DecimalFormat("0"),
new DecimalFormat("0.00%"));
plot.setLabelGenerator(generator);
return new ChartPanel(chart);
}
}
| |
/*=========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import com.gemstone.gemfire.cache.CacheCallback;
import com.gemstone.gemfire.cache.CacheWriterException;
import com.gemstone.gemfire.cache.EntryNotFoundException;
import com.gemstone.gemfire.cache.Operation;
import com.gemstone.gemfire.cache.TimeoutException;
import com.gemstone.gemfire.cache.TransactionId;
import com.gemstone.gemfire.cache.query.internal.IndexUpdater;
import com.gemstone.gemfire.internal.cache.lru.LRUMapCallbacks;
import com.gemstone.gemfire.internal.cache.tier.sockets.ClientProxyMembershipID;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.cache.versions.RegionVersionVector;
import com.gemstone.gemfire.internal.cache.versions.VersionHolder;
import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
/**
* Internal interface used by {@link LocalRegion} to access the map that holds
* its entries. Note that the value of every entry in this map will
* implement {@link RegionEntry}.
*
* @since 3.5.1
*
* @author Darrel Schneider
*
*/
public interface RegionMap extends LRUMapCallbacks {
/**
* Parameter object used to facilitate construction of an EntriesMap.
* Modification of fields after the map is constructed has no effect.
*/
static class Attributes {
/** The initial capacity. The implementation
* performs internal sizing to accommodate this many elements. */
int initialCapacity = 16;
/** the load factor threshold, used to control resizing. */
float loadFactor = 0.75f;
/** the estimated number of concurrently
* updating threads. The implementation performs internal sizing
* to try to accommodate this many threads. */
int concurrencyLevel = 16;
/** whether "api" statistics are enabled */
boolean statisticsEnabled = false;
/** whether LRU stats are required */
// boolean lru = false;
}
public RegionEntryFactory getEntryFactory();
/**
* This method should be called before region is initialized
* to ensure there is no mix of region entries
*/
public void setEntryFactory(RegionEntryFactory f);
/**
* Gets the attributes that this map was created with.
*/
public Attributes getAttributes();
/**
* Tells this map what region owns it.
*/
public void setOwner(Object r);
public void changeOwner(LocalRegion r);
public int size();
public boolean isEmpty();
/**
* @return number of entries cached in the backing CHM
*/
public int sizeInVM();
public Set keySet();
/** Returns a collection of RegionEntry instances.
*/
public Collection<RegionEntry> regionEntries();
public boolean containsKey(Object key);
/**
* fetches the entry from the backing ConcurrentHashMap
* @param key
* @return the RegionEntry from memory or disk
*/
public RegionEntry getEntry(Object key);
/**
* fetches the entry from the backing ConcurrentHashMap.
* @param key
* @return the RegionEntry from memory
*/
public RegionEntry getEntryInVM(Object key);
// /**
// * Removes any entry associated with <code>key</code>.
// * Do nothing if the map has no entry for key.
// */
// public void remove(Object key);
// /**
// * Removes the entry associated with <code>key</code>
// * if it is <code>entry</code>.
// * Otherwise do nothing.
// */
// public void remove(Object key, RegionEntry entry);
/**
* Clear the region and, if the parameter rvv is not null,
* return a collection of the IDs of version sources that are
* still in the map when the operation completes.
*/
public Set<VersionSource> clear(RegionVersionVector rvv);
/**
* Used by disk regions when recovering data from backup.
* Currently this "put" is done at a very low level to keep it from
* generating events or pushing updates to others.
* @return the created RegionEntry or null if entry already existed
*/
public RegionEntry initRecoveredEntry(Object key, DiskEntry.RecoveredEntry value);
/**
* Used by disk regions when recovering data from backup and
* initRecoveredEntry has already been called for the given key.
* Currently this "put" is done at a very low level to keep it from
* generating events or pushing updates to others.
* @return the updated RegionEntry
*/
public RegionEntry updateRecoveredEntry(Object key, DiskEntry.RecoveredEntry value);
/**
* Used to modify an existing RegionEntry or create a new one
* when processing the values obtained during a getInitialImage.
* @param wasRecovered true if the current entry in the cache was
* recovered from disk.
* @param entryVersion version information from InitialImageOperation or RegisterInterest
* @param sender the sender of the initial image, if IIO. Not needed on clients
* @param forceValue TODO
*/
public boolean initialImagePut(Object key,
long lastModified,
Object newValue,
boolean wasRecovered,
boolean deferLRUCallback,
VersionTag entryVersion, InternalDistributedMember sender, boolean forceValue);
/**
* Destroy an entry the map.
* @param event indicates entry to destroy as well as data for a <code>CacheCallback</code>
* @param inTokenMode true if destroy is occurring during region initialization
* @param duringRI true if destroy is occurring during register interest
* @param cacheWrite true if a cacheWriter should be called
* @param isEviction true if destroy was called in the context of an LRU Eviction
* @param expectedOldValue if non-null, only destroy if key exists and value is
* is equal to expectedOldValue
* @return true if the entry was destroyed, false otherwise
*
* @see LocalRegion
* @see AbstractRegionMap
* @see CacheCallback
* @see AbstractLRURegionMap
*/
public boolean destroy(EntryEventImpl event,
boolean inTokenMode,
boolean duringRI,
boolean cacheWrite,
boolean isEviction,
Object expectedOldValue,
boolean removeRecoveredEntry)
throws CacheWriterException, EntryNotFoundException, TimeoutException;
/**
* @param forceNewEntry
* used during GII, this forces us to leave an invalid token in the
* cache, even if the entry doesn't exist
* @param forceCallbacks
* using for PRs with eviction enabled, this forces invalidate
* callbacks and events even if the entry doesn't exist in the cache.
* This differs from the forceNewEntry mode in that it doesn't leave
* an Invalid token in the cache.
* @return true if invalidate was done
*/
public boolean invalidate(EntryEventImpl event,
boolean invokeCallbacks,
boolean forceNewEntry,
boolean forceCallbacks)
throws EntryNotFoundException;
public void evictValue(Object key);
/**
* @param event the event object for this operation, with the exception that
* the oldValue parameter is not yet filled in. The oldValue will be filled
* in by this operation.
*
* @param lastModified the lastModified time to set with the value; if 0L,
* then the lastModified time will be set to now.
* @param ifNew true if this operation must not overwrite an existing key
* @param ifOld true if this operation must not create a new entry
* @param expectedOldValue
* only succeed if old value is equal to this value. If null,
* then doesn't matter what old value is. If INVALID token,
* must be INVALID.
* @param requireOldValue if old value needs to be returned to caller in event
* (e.g. failed putIfAbsent)
* @param overwriteDestroyed true if okay to overwrite the DESTROYED token:
* when this is true has the following effect:
* even when ifNew is true will write over DESTROYED token
* when overwriteDestroyed is false and ifNew or ifOld is true
* then if the put doesn't occur because there is a DESTROYED token
* present then the entry flag blockedDestroyed is set.
* @return null if put was not done; otherwise reference to put entry
*/
public RegionEntry basicPut(EntryEventImpl event,
long lastModified,
boolean ifNew,
boolean ifOld,
Object expectedOldValue,
boolean requireOldValue,
boolean overwriteDestroyed)
throws CacheWriterException, TimeoutException;
/**
* Write synchronizes the given entry and invokes the runable
* while holding the lock. Does nothing if the entry does not exist.
*/
public void writeSyncIfPresent(Object key, Runnable runner);
/**
* Remove the entry with the given key if it has been marked as destroyed
* This is currently used in the cleanup phase of getInitialImage.
*/
public void removeIfDestroyed(Object key);
/**
* @param key the key of the entry to destroy
* @param rmtOrigin true if transaction being applied had a remote origin
* @param event filled in if operation performed
* @param inTokenMode true if caller has determined we are in destroy token
* mode and will keep us in that mode while this call is executing.
* @param inRI the region is performing registerInterest so we need a token
* @param op the destroy operation to apply
* @param eventId filled in if operation performed
* @param aCallbackArgument callback argument passed by user
* @param filterRoutingInfo
* @param isOperationRemote whether the operation is remote or originated here
* @param txEntryState when not null, txEntryState.versionTag is set (used on near-side to pass versionTag to TXCommitMessage)
* @param versionTag when not null, it is the tag generated on near-side to be associated with the entry on far-side
* @param tailKey when not -1, it is the tailKey generated on near-side to be associated with entry on far-side for WAN
*/
public void txApplyDestroy(Object key,
TransactionId rmtOrigin,
TXRmtEvent event,
boolean inTokenMode, boolean inRI,
Operation op,
EventID eventId,
Object aCallbackArgument,
List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo,
ClientProxyMembershipID bridgeContext,
boolean isOperationRemote, TXEntryState txEntryState, VersionTag versionTag, long tailKey);
/**
* @param key the key of the entry to invalidate
* @param newValue the new value of the entry
* @param didDestroy true if tx destroyed this entry at some point
* @param rmtOrigin true if transaction being applied had a remote origin
* @param event filled in if operation performed
* @param localOp true for localInvalidates, false otherwise
* @param aCallbackArgument callback argument passed by user
* @param filterRoutingInfo
* @param txEntryState when not null, txEntryState.versionTag is set (used on near-side to pass versionTag to TXCommitMessage)
* @param versionTag when not null, it is the tag generated on near-side to be associated with the entry on far-side
* @param tailKey when not -1, it is the tailKey generated on near-side to be associated with entry on far-side for WAN
*/
public void txApplyInvalidate(Object key,
Object newValue,
boolean didDestroy,
TransactionId rmtOrigin,
TXRmtEvent event,
boolean localOp,
EventID eventId,
Object aCallbackArgument,
List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo,
ClientProxyMembershipID bridgeContext, TXEntryState txEntryState, VersionTag versionTag, long tailKey);
/**
* @param putOp describes the operation that did the put
* @param key the key of the entry to put
* @param newValue the new value of the entry
* @param didDestroy true if tx destroyed this entry at some point
* @param rmtOrigin true if transaction being applied had a remote origin
* @param event filled in if operation performed
* @param aCallbackArgument callback argument passed by user
* @param filterRoutingInfo
* @param txEntryState when not null, txEntryState.versionTag is set (used on near-side to pass versionTag to TXCommitMessage)
* @param versionTag when not null, it is the tag generated on near-side to be associated with the entry on far-side
* @param tailKey when not -1, it is the tailKey generated on near-side to be associated with entry on far-side for WAN
*/
public void txApplyPut(Operation putOp,
Object key,
Object newValue,
boolean didDestroy,
TransactionId rmtOrigin,
TXRmtEvent event,
EventID eventId,
Object aCallbackArgument,
List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo,
ClientProxyMembershipID bridgeContext, TXEntryState txEntryState, VersionTag versionTag, long tailKey);
/**
* removes the given key if the enclosing RegionEntry is still in
* this map
*/
public void removeEntry(Object key, RegionEntry value, boolean updateStats);
/**
* Removes the given key if the enclosing RegionEntry is still in this map for
* the given EntryEvent and updating the given {@link IndexUpdater} of the
* region ({@link #getIndexUpdater()}) for the event.
*/
public void removeEntry(Object key, RegionEntry re, boolean updateStat,
EntryEventImpl event, LocalRegion owner, IndexUpdater indexUpdater);
public void copyRecoveredEntries(RegionMap rm);
public IndexUpdater getIndexUpdater();
/**
* Removes an entry that was previously destroyed and made into a tombstone.
*
* @param re the entry that was destroyed
* @param destroyedVersion the version that was destroyed
* @param isEviction true if the tombstone is being evicted by LRU
* @param isScheduledTombstone TODO
* @return true if the tombstone entry was removed from the entry map
*/
public boolean removeTombstone(RegionEntry re, VersionHolder destroyedVersion, boolean isEviction, boolean isScheduledTombstone);
/**
* Checks to see if the given version is still the version in the map
* @param re the entry that was destroyed
* @param destroyedVersion the version that was destroyed
* @return true of the tombstone is no longer needed (entry was resurrected or evicted)
*/
public boolean isTombstoneNotNeeded(RegionEntry re, int destroyedVersion);
/**
* a tombstone has been unscheduled - update LRU stats if necessary
*/
public void unscheduleTombstone(RegionEntry re);
public void updateEntryVersion(EntryEventImpl event);
/**
* Decrements the transaction reference count.
* Some features, like eviction and expiration, will
* not modify an entry while it is referenced by a transaction.
*/
public void decTxRefCount(RegionEntry e);
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.cluster.routing.allocation;
import org.elasticsearch.cluster.routing.allocation.AllocationDecision;
import org.elasticsearch.cluster.routing.allocation.MoveDecision;
import org.elasticsearch.cluster.routing.allocation.NodeAllocationResult;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.test.action.support.replication.ClusterStateCreationUtils;
import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.testframework.cluster.ESAllocationTestCase;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.Balancer;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static java.util.Collections.emptySet;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.startsWith;
/**
* Tests for balancing a single shard, see {@link Balancer#decideRebalance(ShardRouting)}.
*/
public class BalancedSingleShardTests extends ESAllocationTestCase {
public void testRebalanceNonStartedShardNotAllowed() {
BalancedShardsAllocator allocator = new BalancedShardsAllocator(Settings.EMPTY);
ClusterState clusterState = ClusterStateCreationUtils.state("idx", randomBoolean(),
randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.UNASSIGNED, ShardRoutingState.RELOCATING));
ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard();
MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, newRoutingAllocation(
new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), clusterState)).getMoveDecision();
assertSame(MoveDecision.NOT_TAKEN, rebalanceDecision);
}
public void testRebalanceNotAllowedDuringPendingAsyncFetch() {
BalancedShardsAllocator allocator = new BalancedShardsAllocator(Settings.EMPTY);
ClusterState clusterState = ClusterStateCreationUtils.state("idx", randomBoolean(), ShardRoutingState.STARTED);
ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard();
RoutingAllocation routingAllocation = newRoutingAllocation(
new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), clusterState);
routingAllocation.setHasPendingAsyncFetch();
MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision();
assertNotNull(rebalanceDecision.getClusterRebalanceDecision());
assertEquals(AllocationDecision.AWAITING_INFO, rebalanceDecision.getAllocationDecision());
assertThat(rebalanceDecision.getExplanation(),
startsWith("cannot rebalance as information about existing copies of this shard in the cluster is still being gathered"));
assertEquals(clusterState.nodes().getSize() - 1, rebalanceDecision.getNodeDecisions().size());
assertNull(rebalanceDecision.getTargetNode());
assertAssignedNodeRemainsSame(allocator, routingAllocation, shard);
}
public void testRebalancingNotAllowedDueToCanRebalance() {
final Decision canRebalanceDecision = randomFrom(Decision.NO, Decision.THROTTLE);
AllocationDecider noRebalanceDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
return allocation.decision(canRebalanceDecision, "TEST", "foobar");
}
};
BalancedShardsAllocator allocator = new BalancedShardsAllocator(Settings.EMPTY);
ClusterState clusterState = ClusterStateCreationUtils.state("idx", randomBoolean(), ShardRoutingState.STARTED);
ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard();
RoutingAllocation routingAllocation = newRoutingAllocation(
new AllocationDeciders(Settings.EMPTY, Collections.singleton(noRebalanceDecider)), clusterState);
MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision();
assertEquals(canRebalanceDecision.type(), rebalanceDecision.getClusterRebalanceDecision().type());
assertEquals(AllocationDecision.fromDecisionType(canRebalanceDecision.type()), rebalanceDecision.getAllocationDecision());
assertThat(rebalanceDecision.getExplanation(), containsString(canRebalanceDecision.type() == Type.THROTTLE ?
"rebalancing is throttled" : "rebalancing is not allowed"));
assertNotNull(rebalanceDecision.getNodeDecisions());
assertNull(rebalanceDecision.getTargetNode());
assertEquals(1, rebalanceDecision.getClusterRebalanceDecision().getDecisions().size());
for (Decision subDecision : rebalanceDecision.getClusterRebalanceDecision().getDecisions()) {
assertEquals("foobar", ((Decision.Single) subDecision).getExplanation());
}
assertAssignedNodeRemainsSame(allocator, routingAllocation, shard);
}
public void testRebalancePossible() {
AllocationDecider canAllocateDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return Decision.YES;
}
};
Tuple<ClusterState, MoveDecision> rebalance = setupStateAndRebalance(canAllocateDecider, Settings.EMPTY, true);
ClusterState clusterState = rebalance.v1();
MoveDecision rebalanceDecision = rebalance.v2();
assertEquals(Type.YES, rebalanceDecision.getClusterRebalanceDecision().type());
assertNotNull(rebalanceDecision.getExplanation());
assertEquals(clusterState.nodes().getSize() - 1, rebalanceDecision.getNodeDecisions().size());
}
public void testRebalancingNotAllowedDueToCanAllocate() {
AllocationDecider canAllocateDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return Decision.NO;
}
};
Tuple<ClusterState, MoveDecision> rebalance = setupStateAndRebalance(canAllocateDecider, Settings.EMPTY, false);
ClusterState clusterState = rebalance.v1();
MoveDecision rebalanceDecision = rebalance.v2();
assertEquals(Type.YES, rebalanceDecision.getClusterRebalanceDecision().type());
assertEquals(AllocationDecision.NO, rebalanceDecision.getAllocationDecision());
assertThat(rebalanceDecision.getExplanation(), startsWith(
"cannot rebalance as no target node exists that can both allocate this shard and improve the cluster balance"));
assertEquals(clusterState.nodes().getSize() - 1, rebalanceDecision.getNodeDecisions().size());
assertNull(rebalanceDecision.getTargetNode());
int prevRanking = 0;
for (NodeAllocationResult result : rebalanceDecision.getNodeDecisions()) {
assertThat(result.getWeightRanking(), greaterThanOrEqualTo(prevRanking));
prevRanking = result.getWeightRanking();
}
}
public void testDontBalanceShardWhenThresholdNotMet() {
AllocationDecider canAllocateDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return Decision.YES;
}
};
// ridiculously high threshold setting so we won't rebalance
Settings balancerSettings = Settings.builder().put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 1000f).build();
Tuple<ClusterState, MoveDecision> rebalance = setupStateAndRebalance(canAllocateDecider, balancerSettings, false);
ClusterState clusterState = rebalance.v1();
MoveDecision rebalanceDecision = rebalance.v2();
assertEquals(Type.YES, rebalanceDecision.getClusterRebalanceDecision().type());
assertEquals(AllocationDecision.NO, rebalanceDecision.getAllocationDecision());
assertNotNull(rebalanceDecision.getExplanation());
assertEquals(clusterState.nodes().getSize() - 1, rebalanceDecision.getNodeDecisions().size());
assertNull(rebalanceDecision.getTargetNode());
int prevRanking = 0;
for (NodeAllocationResult result : rebalanceDecision.getNodeDecisions()) {
assertThat(result.getWeightRanking(), greaterThanOrEqualTo(prevRanking));
prevRanking = result.getWeightRanking();
}
}
public void testSingleShardBalanceProducesSameResultsAsBalanceStep() {
final String[] indices = { "idx1", "idx2" };
// Create a cluster state with 2 indices, each with 1 started primary shard, and only
// one node initially so that all primary shards get allocated to the same node. We are only
// using 2 indices (i.e. 2 total primary shards) because if we have any more than 2 started shards
// in the routing table, then we have no guarantees about the order in which the 3 or more shards
// are selected to be rebalanced to the new node, and hence the node to which they are rebalanced
// is not deterministic. Using only two shards guarantees that only one of those two shards will
// be rebalanced, and so we pick the one that was chosen to be rebalanced and execute the single-shard
// rebalance step on it to make sure it gets assigned to the same node.
ClusterState clusterState = ClusterStateCreationUtils.state(1, indices, 1);
// add new nodes so one of the primaries can be rebalanced
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(clusterState.nodes());
int numAddedNodes = randomIntBetween(1, 5);
// randomly select a subset of the newly added nodes to set filter allocation on (but not all)
int excludeNodesSize = randomIntBetween(0, numAddedNodes - 1);
final Set<String> excludeNodes = new HashSet<>();
for (int i = 0; i < numAddedNodes; i++) {
DiscoveryNode discoveryNode = newNode(randomAlphaOfLength(7));
nodesBuilder.add(discoveryNode);
if (i < excludeNodesSize) {
excludeNodes.add(discoveryNode.getId());
}
}
clusterState = ClusterState.builder(clusterState).nodes(nodesBuilder).build();
AllocationDecider allocationDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
if (excludeNodes.contains(node.nodeId())) {
return Decision.NO;
}
return Decision.YES;
}
};
AllocationDecider rebalanceDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
return Decision.YES;
}
};
List<AllocationDecider> allocationDeciders = Arrays.asList(rebalanceDecider, allocationDecider);
RoutingAllocation routingAllocation = newRoutingAllocation(
new AllocationDeciders(Settings.EMPTY, allocationDeciders), clusterState);
// allocate and get the node that is now relocating
BalancedShardsAllocator allocator = new BalancedShardsAllocator(Settings.EMPTY);
allocator.allocate(routingAllocation);
ShardRouting shardToRebalance = null;
for (RoutingNode routingNode : routingAllocation.routingNodes()) {
List<ShardRouting> relocatingShards = routingNode.shardsWithState(ShardRoutingState.RELOCATING);
if (relocatingShards.size() > 0) {
shardToRebalance = randomFrom(relocatingShards);
break;
}
}
routingAllocation = newRoutingAllocation(new AllocationDeciders(Settings.EMPTY, allocationDeciders), clusterState);
routingAllocation.debugDecision(true);
ShardRouting shard = clusterState.getRoutingNodes().activePrimary(shardToRebalance.shardId());
MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision();
assertEquals(shardToRebalance.relocatingNodeId(), rebalanceDecision.getTargetNode().getId());
// make sure all excluded nodes returned a NO decision
for (NodeAllocationResult nodeResult : rebalanceDecision.getNodeDecisions()) {
if (excludeNodes.contains(nodeResult.getNode().getId())) {
assertEquals(Type.NO, nodeResult.getCanAllocateDecision().type());
}
}
}
public void testNodeDecisionsRanking() {
// only one shard, so moving it will not create a better balance anywhere, so all node decisions should
// return the same ranking as the current node
ClusterState clusterState = ClusterStateCreationUtils.state(randomIntBetween(1, 10), new String[] { "idx" }, 1);
ShardRouting shardToRebalance = clusterState.routingTable().index("idx").shardsWithState(ShardRoutingState.STARTED).get(0);
MoveDecision decision = executeRebalanceFor(shardToRebalance, clusterState, emptySet(), -1);
int currentRanking = decision.getCurrentNodeRanking();
assertEquals(1, currentRanking);
for (NodeAllocationResult result : decision.getNodeDecisions()) {
assertEquals(1, result.getWeightRanking());
}
// start off with one node and several shards assigned to that node, then add a few nodes to the cluster,
// each of these new nodes should have a better ranking than the current, given a low enough threshold
clusterState = ClusterStateCreationUtils.state(1, new String[] { "idx" }, randomIntBetween(2, 10));
shardToRebalance = clusterState.routingTable().index("idx").shardsWithState(ShardRoutingState.STARTED).get(0);
clusterState = addNodesToClusterState(clusterState, randomIntBetween(1, 10));
decision = executeRebalanceFor(shardToRebalance, clusterState, emptySet(), 0.01f);
for (NodeAllocationResult result : decision.getNodeDecisions()) {
assertThat(result.getWeightRanking(), lessThan(decision.getCurrentNodeRanking()));
}
// start off with 3 nodes and 7 shards, so that one of the 3 nodes will have 3 shards assigned, the remaining 2
// nodes will have 2 shard each. then, add another node. pick a shard on one of the nodes that has only 2 shard
// to rebalance. the new node should have the best ranking (because it has no shards), followed by the node currently
// holding the shard as well as the other node with only 2 shards (they should have the same ranking), followed by the
// node with 3 shards which will have the lowest ranking.
clusterState = ClusterStateCreationUtils.state(3, new String[] { "idx" }, 7);
shardToRebalance = null;
Set<String> nodesWithTwoShards = new HashSet<>();
String nodeWithThreeShards = null;
for (RoutingNode node : clusterState.getRoutingNodes()) {
if (node.numberOfShardsWithState(ShardRoutingState.STARTED) == 2) {
nodesWithTwoShards.add(node.nodeId());
if (shardToRebalance == null) {
shardToRebalance = node.shardsWithState(ShardRoutingState.STARTED).get(0);
}
} else {
assertEquals(3, node.numberOfShardsWithState(ShardRoutingState.STARTED));
assertNull(nodeWithThreeShards); // should only have one of these
nodeWithThreeShards = node.nodeId();
}
}
clusterState = addNodesToClusterState(clusterState, 1);
decision = executeRebalanceFor(shardToRebalance, clusterState, emptySet(), 0.01f);
for (NodeAllocationResult result : decision.getNodeDecisions()) {
if (result.getWeightRanking() < decision.getCurrentNodeRanking()) {
// highest ranked node should not be any of the initial nodes
assertFalse(nodesWithTwoShards.contains(result.getNode().getId()));
assertNotEquals(nodeWithThreeShards, result.getNode().getId());
} else if (result.getWeightRanking() > decision.getCurrentNodeRanking()) {
// worst ranked should be the node with two shards
assertEquals(nodeWithThreeShards, result.getNode().getId());
} else {
assertTrue(nodesWithTwoShards.contains(result.getNode().getId()));
}
}
}
private MoveDecision executeRebalanceFor(final ShardRouting shardRouting, final ClusterState clusterState,
final Set<String> noDecisionNodes, final float threshold) {
Settings settings = Settings.EMPTY;
if (Float.compare(-1.0f, threshold) != 0) {
settings = Settings.builder().put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), threshold).build();
}
AllocationDecider allocationDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
if (noDecisionNodes.contains(node.nodeId())) {
return Decision.NO;
}
return Decision.YES;
}
};
AllocationDecider rebalanceDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
return Decision.YES;
}
};
BalancedShardsAllocator allocator = new BalancedShardsAllocator(settings);
RoutingAllocation routingAllocation = newRoutingAllocation(
new AllocationDeciders(Settings.EMPTY, Arrays.asList(allocationDecider, rebalanceDecider)), clusterState);
return allocator.decideShardAllocation(shardRouting, routingAllocation).getMoveDecision();
}
private ClusterState addNodesToClusterState(ClusterState clusterState, int numNodesToAdd) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(clusterState.nodes());
for (int i = 0; i < numNodesToAdd; i++) {
DiscoveryNode discoveryNode = newNode(randomAlphaOfLength(7));
nodesBuilder.add(discoveryNode);
}
return ClusterState.builder(clusterState).nodes(nodesBuilder).build();
}
private Tuple<ClusterState, MoveDecision> setupStateAndRebalance(AllocationDecider allocationDecider,
Settings balancerSettings,
boolean rebalanceExpected) {
AllocationDecider rebalanceDecider = new AllocationDecider(Settings.EMPTY) {
@Override
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
return Decision.YES;
}
};
List<AllocationDecider> allocationDeciders = Arrays.asList(rebalanceDecider, allocationDecider);
final int numShards = randomIntBetween(8, 13);
BalancedShardsAllocator allocator = new BalancedShardsAllocator(balancerSettings);
ClusterState clusterState = ClusterStateCreationUtils.state("idx", 2, numShards);
// add a new node so shards can be rebalanced there
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(clusterState.nodes());
nodesBuilder.add(newNode(randomAlphaOfLength(7)));
clusterState = ClusterState.builder(clusterState).nodes(nodesBuilder).build();
ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard();
RoutingAllocation routingAllocation = newRoutingAllocation(
new AllocationDeciders(Settings.EMPTY, allocationDeciders), clusterState);
MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision();
if (rebalanceExpected == false) {
assertAssignedNodeRemainsSame(allocator, routingAllocation, shard);
}
return Tuple.tuple(clusterState, rebalanceDecision);
}
private RoutingAllocation newRoutingAllocation(AllocationDeciders deciders, ClusterState state) {
RoutingAllocation allocation = new RoutingAllocation(
deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime());
allocation.debugDecision(true);
return allocation;
}
private void assertAssignedNodeRemainsSame(BalancedShardsAllocator allocator, RoutingAllocation routingAllocation,
ShardRouting originalRouting) {
allocator.allocate(routingAllocation);
RoutingNodes routingNodes = routingAllocation.routingNodes();
// make sure the previous node id is the same as the current one after rerouting
assertEquals(originalRouting.currentNodeId(), routingNodes.activePrimary(originalRouting.shardId()).currentNodeId());
}
}
| |
package com.codeplex.peerly.couchdbtest;
import Acme.Serve.SSLAcceptor;
import Acme.Serve.Serve;
import android.app.Activity;
import android.app.Fragment;
import android.os.Bundle;
import android.view.*;
import com.codeplex.peerly.couchdbserverandroid.R;
import com.msopentech.thali.utilities.universal.CouchDBDocumentKeyClassForTests;
import com.msopentech.thali.utilities.universal.ThaliCryptoUtilities;
import com.msopentech.thali.utilities.universal.ThaliPublicKeyComparer;
import com.msopentech.thali.utilities.universal.ThaliTestEktorpClient;
import com.couchbase.cblite.*;
import com.couchbase.cblite.listener.CBLListener;
import com.couchbase.cblite.router.CBLRequestAuthorization;
import com.couchbase.cblite.router.CBLRouter;
import com.couchbase.cblite.router.CBLURLConnection;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLSession;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.security.KeyStore;
import java.util.EnumSet;
import java.util.List;
import java.util.Properties;
public class MainActivity extends Activity {
private CBLListener cblListener = null;
private final int defaultCouchPort = 9898;
private final String defaultCouchAddress = "127.0.0.1";
private final String tjwsSslAcceptor = "com.codeplex.peerly.couchdbtest.ThaliSelfSignedMutualAuthSSLAcceptor";
private final String deviceKeyAlias = "com.codeplex.peerly.names.devicealias";
private final String keystoreFileName = "com.codeplex.peerly.names.keystore";
private final Logger Log = LoggerFactory.getLogger(MainActivity.class);
public class Authorize implements CBLRequestAuthorization {
@Override
public boolean Authorize(CBLServer cblServer, CBLURLConnection cblurlConnection) {
List<String> pathSegments = CBLRouter.splitPath(cblurlConnection.getURL());
// For now all we really care about are attempts to access the data database.
if (pathSegments.size() == 0 || pathSegments.get(0).equals(ThaliTestEktorpClient.TestDatabaseName) == false) {
return true;
}
CBLDatabase keyDatabase = cblServer.getExistingDatabaseNamed(ThaliTestEktorpClient.KeyDatabaseName);
// No database? Then no one is authorized.
if (keyDatabase == null) {
InsecureConnection(cblurlConnection);
return false;
}
CBLRevisionList revisionList = keyDatabase.getAllRevisionsOfDocumentID(ThaliTestEktorpClient.KeyId, true);
EnumSet<CBLDatabase.TDContentOptions> tdContentOptionses = EnumSet.noneOf(CBLDatabase.TDContentOptions.class);
CBLRevision revision =
keyDatabase.getDocumentWithIDAndRev(
ThaliTestEktorpClient.KeyId,
revisionList.getAllRevIds().get(revisionList.getAllRevIds().size() - 1),
tdContentOptionses);
ObjectMapper mapper = new ObjectMapper();
try {
CouchDBDocumentKeyClassForTests keyClassForTests = mapper.readValue(revision.getJson(), CouchDBDocumentKeyClassForTests.class);
if (CouchDBDocumentKeyClassForTests.RSAKeyType.equals(keyClassForTests.getKeyType()) == false) {
// A 500 would be more appropriate but we are just testing
InsecureConnection(cblurlConnection);
return false;
}
SSLSession sslSession = cblurlConnection.getSSLSession();
try {
javax.security.cert.X509Certificate[] certChain = sslSession.getPeerCertificateChain();
if (new ThaliPublicKeyComparer(certChain[certChain.length - 1]
.getPublicKey())
.KeysEqual(keyClassForTests.generatePublicKey()) == false) {
InsecureConnection(cblurlConnection);
return false;
}
return true;
} catch (Exception e) {
// A 500 would be better
InsecureConnection(cblurlConnection);
return false;
}
} catch (IOException e) {
InsecureConnection(cblurlConnection);
return false;
}
}
private void InsecureConnection(CBLURLConnection cblurlConnection) {
cblurlConnection.setResponseCode(CBLStatus.FORBIDDEN);
try {
cblurlConnection.getResponseOutputStream().close();
} catch (IOException e) {
android.util.Log.e("ThaliTestServer", "Error closing empty output stream");
}
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (savedInstanceState == null) {
getFragmentManager().beginTransaction()
.add(R.id.container, new PlaceholderFragment())
.commit();
}
MakeSureDeviceKeyStoreExists();
String filesDir = getFilesDir().getAbsolutePath();
try {
// Start the CouchDB Lite server
CBLServer server = new CBLServer (filesDir);
Properties tjwsProperties = new Properties();
tjwsProperties.setProperty(Serve.ARG_ACCEPTOR_CLASS, tjwsSslAcceptor);
tjwsProperties.setProperty(SSLAcceptor.ARG_KEYSTORETYPE, ThaliCryptoUtilities.PrivateKeyHolderFormat);
tjwsProperties.setProperty(SSLAcceptor.ARG_KEYSTOREFILE, GetKeyStoreAbsolutePath());
tjwsProperties.setProperty(SSLAcceptor.ARG_KEYSTOREPASS, new String(ThaliCryptoUtilities.DefaultPassPhrase));
tjwsProperties.setProperty(SSLAcceptor.ARG_CLIENTAUTH, "true");
tjwsProperties.setProperty(Serve.ARG_BINDADDRESS, defaultCouchAddress);
Authorize authorize = new Authorize();
cblListener = new CBLListener(server, defaultCouchPort, tjwsProperties, authorize);
cblListener.start();
} catch (IOException e) {
Log.error("Error starting TDServer", e);
}
Log.debug("Got this far, woohoo!");
}
/**
* Return the absolute path of the Peerly device key store.
*
* I had wanted this to just be a property of the class but getFilesDir() doesn't seem to be
* initialized until onCreate is called so setting the value either as a property or via
* the constructor won't work. Hence this method.
* @return
*/
private String GetKeyStoreAbsolutePath() {
return new File(getFilesDir(), keystoreFileName).getAbsolutePath();
}
/**
* If no key store exists to hold the device's keying information than this method
* will create on.
*/
private void MakeSureDeviceKeyStoreExists() {
File keyStoreFile = new File(GetKeyStoreAbsolutePath());
if (keyStoreFile.exists()) {
// TODO: We need to check if the device's cert is expired and renew it, but this will probably require getting a new root chain, so let's wait until that's figured out
return;
}
KeyStore keyStore =
ThaliCryptoUtilities.CreatePKCS12KeyStoreWithPublicPrivateKeyPair(
ThaliCryptoUtilities.GeneratePeerlyAcceptablePublicPrivateKeyPair(), deviceKeyAlias, ThaliCryptoUtilities.DefaultPassPhrase);
// TODO: I really need to figure out if I can safely use Java 7 features like try with resources and Android, the fact that Android Studio defaults to not support Java 7 makes me very nervous
FileOutputStream fileOutputStream = null;
try {
// Yes this can swallow exceptions (if you got an exception inside this try and then the finally has an exception, but given what I'm doing here I don't care.
try {
fileOutputStream = new FileOutputStream(keyStoreFile);
keyStore.store(fileOutputStream, ThaliCryptoUtilities.DefaultPassPhrase);
} finally {
if (fileOutputStream != null) {
fileOutputStream.close();
}
}
} catch (Exception e) {
Log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
protected void onDestroy() {
if (cblListener != null) {
cblListener.stop();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
switch (item.getItemId()) {
case R.id.action_settings:
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
public PlaceholderFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container, false);
return rootView;
}
}
}
| |
package org.newdawn.slick.geom;
import org.newdawn.slick.util.FastTrig;
/**
* A 2 dimensional transformation that can be applied to <code>Shape</code> implemenations.
*
* @author Mark
*/
public class Transform {
/**
* Value for each position in the matrix
*
* |0 1 2|
* |3 4 5|
* |6 7 8|
*/
private float matrixPosition[];
/**
* Create and identity transform
*
*/
public Transform() {
matrixPosition = new float[]{1, 0, 0, 0, 1, 0, 0, 0, 1};
}
/**
* Copy a transform
*
* @param other The other transform to copy
*/
public Transform(Transform other) {
matrixPosition = new float[9];
for (int i=0;i<9;i++) {
matrixPosition[i] = other.matrixPosition[i];
}
}
/**
* Concatanate to transform into one
*
* @param t1 The first transform to join
* @param t2 The second transform to join
*/
public Transform(Transform t1, Transform t2) {
this(t1);
concatenate(t2);
}
/**
* Create a transform for the given positions
*
* @param matrixPosition An array of float[6] to set up a transform
* @throws RuntimeException if the array is not of length 6
*/
public Transform(float matrixPosition[]) {
if(matrixPosition.length != 6) {
throw new RuntimeException("The parameter must be a float array of length 6.");
}
this.matrixPosition = new float[]{matrixPosition[0], matrixPosition[1], matrixPosition[2],
matrixPosition[3], matrixPosition[4], matrixPosition[5],
0, 0, 1};
}
/**
* Create a transform for the given positions
*
* @param point00 float for the first position
* @param point01 float for the second position
* @param point02 float for the third position
* @param point10 float for the fourth position
* @param point11 float for the fifth position
* @param point12 float for the sixth position
*/
public Transform(float point00, float point01, float point02, float point10, float point11, float point12) {
matrixPosition = new float[]{point00, point01, point02, point10, point11, point12, 0, 0, 1};
}
/**
* Transform the point pairs in the source array and store them in the destination array.
* All operations will be done before storing the results in the destination. This way the source
* and destination array can be the same without worry of overwriting information before it is transformed.
*
* @param source Array of floats containing the points to be transformed
* @param sourceOffset Where in the array to start processing
* @param destination Array of floats to store the results.
* @param destOffset Where in the array to start storing
* @param numberOfPoints Number of points to be transformed
* @throws ArrayIndexOutOfBoundsException if sourceOffset + numberOfPoints * 2 > source.length or the same operation on the destination array
*/
public void transform(float source[], int sourceOffset, float destination[], int destOffset, int numberOfPoints) {
//TODO performance can be improved by removing the safety to the destination array
float result[] = new float[numberOfPoints * 2];
for(int i=0;i<numberOfPoints * 2;i+=2) {
for(int j=0;j<6;j+=3) {
result[i + (j / 3)] = source[i + sourceOffset] * matrixPosition[j] + source[i + sourceOffset + 1] * matrixPosition[j + 1] + 1 * matrixPosition[j + 2];
}
}
//for safety of the destination, the results are copied after the entire operation.
for(int i=0;i<numberOfPoints * 2;i+=2) {
destination[i + destOffset] = result[i];
destination[i + destOffset + 1] = result[i + 1];
}
}
/**
* Update this Transform by concatenating the given Transform to this one.
*
* @param tx The Transfrom to concatenate to this one.
* @return The resulting Transform
*/
public Transform concatenate(Transform tx) {
float[] mp = new float[9];
float n00 = matrixPosition[0] * tx.matrixPosition[0] + matrixPosition[1] * tx.matrixPosition[3];
float n01 = matrixPosition[0] * tx.matrixPosition[1] + matrixPosition[1] * tx.matrixPosition[4];
float n02 = matrixPosition[0] * tx.matrixPosition[2] + matrixPosition[1] * tx.matrixPosition[5] + matrixPosition[2];
float n10 = matrixPosition[3] * tx.matrixPosition[0] + matrixPosition[4] * tx.matrixPosition[3];
float n11 = matrixPosition[3] * tx.matrixPosition[1] + matrixPosition[4] * tx.matrixPosition[4];
float n12 = matrixPosition[3] * tx.matrixPosition[2] + matrixPosition[4] * tx.matrixPosition[5] + matrixPosition[5];
mp[0] = n00;
mp[1] = n01;
mp[2] = n02;
mp[3] = n10;
mp[4] = n11;
mp[5] = n12;
//
// mp[0] = matrixPosition[0] * transform.matrixPosition[0] + matrixPosition[0] * transform.matrixPosition[3] + matrixPosition[0] * transform.matrixPosition[6];
// mp[1] = matrixPosition[1] * transform.matrixPosition[1] + matrixPosition[1] * transform.matrixPosition[4] + matrixPosition[1] * transform.matrixPosition[7];
// mp[2] = matrixPosition[2] * transform.matrixPosition[2] + matrixPosition[2] * transform.matrixPosition[5] + matrixPosition[2] * transform.matrixPosition[8];
// mp[3] = matrixPosition[3] * transform.matrixPosition[0] + matrixPosition[3] * transform.matrixPosition[3] + matrixPosition[3] * transform.matrixPosition[6];
// mp[4] = matrixPosition[4] * transform.matrixPosition[1] + matrixPosition[4] * transform.matrixPosition[4] + matrixPosition[4] * transform.matrixPosition[7];
// mp[5] = matrixPosition[5] * transform.matrixPosition[2] + matrixPosition[5] * transform.matrixPosition[5] + matrixPosition[5] * transform.matrixPosition[8];
//
matrixPosition = mp;
return this;
}
/**
* Convert this Transform to a String.
*
* @return This Transform in human readable format.
*/
public String toString() {
String result = "Transform[[" + matrixPosition[0] + "," + matrixPosition[1] + "," + matrixPosition[2] +
"][" + matrixPosition[3] + "," + matrixPosition[4] + "," + matrixPosition[5] +
"][" + matrixPosition[6] + "," + matrixPosition[7] + "," + matrixPosition[8] + "]]";
return result.toString();
}
/**
* Get an array representing this Transform.
*
* @return an array representing this Transform.
*/
public float[] getMatrixPosition() {
return matrixPosition;
}
/**
* Create a new rotation Transform
*
* @param angle The angle in radians to set the transform.
* @return The resulting Transform
*/
public static Transform createRotateTransform(float angle) {
return new Transform((float)FastTrig.cos(angle), -(float)FastTrig.sin(angle), 0, (float)FastTrig.sin(angle), (float)FastTrig.cos(angle), 0);
}
/**
* Create a new rotation Transform around the specified point
*
* @param angle The angle in radians to set the transform.
* @param x The x coordinate around which to rotate.
* @param y The y coordinate around which to rotate.
* @return The resulting Transform
*/
public static Transform createRotateTransform(float angle, float x, float y) {
Transform temp = Transform.createRotateTransform(angle);
float sinAngle = temp.matrixPosition[3];
float oneMinusCosAngle = 1.0f - temp.matrixPosition[3];
temp.matrixPosition[2] = x * oneMinusCosAngle + y * sinAngle;
temp.matrixPosition[5] = y * oneMinusCosAngle - x * sinAngle;
return temp;
}
/**
* Create a new translation Transform
*
* @param xOffset The amount to move in the x direction
* @param yOffset The amount to move in the y direction
* @return The resulting Transform
*/
public static Transform createTranslateTransform(float xOffset, float yOffset) {
return new Transform(1, 0, xOffset, 0, 1, yOffset);
}
/**
* Create an new scaling Transform
*
* @param xScale The amount to scale in the x coordinate
* @param yScale The amount to scale in the x coordinate
* @return The resulting Transform
*/
public static Transform createScaleTransform(float xScale, float yScale) {
return new Transform(xScale, 0, 0, 0, yScale, 0);
}
/**
* Transform the vector2f based on the matrix defined in this transform
*
* @param pt The point to be transformed
* @return The resulting point transformed by this matrix
*/
public Vector2f transform(Vector2f pt) {
float[] in = new float[] {pt.x, pt.y};
float[] out = new float[2];
transform(in, 0, out, 0, 1);
return new Vector2f(out[0], out[1]);
}
}
| |
package org.deeplearning4j.nn.layers.convolution;
import org.datavec.api.io.labels.PathLabelGenerator;
import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.split.FileSplit;
import org.datavec.api.util.ClassPathResource;
import org.datavec.image.recordreader.ImageRecordReader;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.ConvolutionLayer;
import org.deeplearning4j.nn.conf.layers.*;
import org.deeplearning4j.nn.conf.layers.setup.ConvolutionLayerSetup;
import org.deeplearning4j.nn.conf.preprocessor.CnnToFeedForwardPreProcessor;
import org.deeplearning4j.nn.conf.preprocessor.FeedForwardToCnnPreProcessor;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.params.BatchNormalizationParamInitializer;
import org.deeplearning4j.nn.weights.WeightInit;
import org.junit.Test;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.nd4j.linalg.util.FeatureUtil;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
/**
* @author Adam Gibson
*/
public class ConvolutionLayerSetupTest {
@Test
public void testConvolutionLayerSetup() {
MultiLayerConfiguration.Builder builder = inComplete();
new ConvolutionLayerSetup(builder, 28, 28, 1);
MultiLayerConfiguration completed = complete().build();
MultiLayerConfiguration test = builder.build();
assertEquals(completed, test);
}
@Test
public void testDenseToOutputLayer() {
final int numRows = 76;
final int numColumns = 76;
int nChannels = 3;
int outputNum = 6;
int iterations = 3;
int seed = 123;
//setup the network
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations)
.regularization(true).l1(1e-1).l2(2e-4).useDropConnect(true).dropOut(0.5).miniBatch(true)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.build())
.layer(2, new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.build())
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build())
.backprop(true).pretrain(false);
new ConvolutionLayerSetup(builder, numRows, numColumns, nChannels);
DataSet d = new DataSet(Nd4j.rand(12345, 10, nChannels, numRows, numColumns),
FeatureUtil.toOutcomeMatrix(new int[] {1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, 6));
MultiLayerNetwork network = new MultiLayerNetwork(builder.build());
network.init();
network.fit(d);
}
@Test
public void testMnistLenet() throws Exception {
MultiLayerConfiguration.Builder incomplete = incompleteMnistLenet();
incomplete.setInputType(InputType.convolutionalFlat(28, 28, 1));
MultiLayerConfiguration testConf = incomplete.build();
assertEquals(800, ((FeedForwardLayer) testConf.getConf(4).getLayer()).getNIn());
assertEquals(500, ((FeedForwardLayer) testConf.getConf(5).getLayer()).getNIn());
//test instantiation
DataSetIterator iter = new MnistDataSetIterator(10, 10);
MultiLayerNetwork network = new MultiLayerNetwork(testConf);
network.init();
network.fit(iter.next());
}
@Test
public void testMultiChannel() throws Exception {
INDArray in = Nd4j.rand(new int[] {10, 3, 28, 28});
INDArray labels = Nd4j.rand(10, 2);
DataSet next = new DataSet(in, labels);
NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLFW();
new ConvolutionLayerSetup(builder, 28, 28, 3);
MultiLayerConfiguration conf = builder.build();
ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(2).getLayer();
assertEquals(6, layer2.getNIn());
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.fit(next);
}
@Test
public void testLRN() throws Exception {
List<String> labels = new ArrayList<>(Arrays.asList("Zico", "Ziwang_Xu"));
String rootDir = new ClassPathResource("lfwtest").getFile().getAbsolutePath();
RecordReader reader = new ImageRecordReader(28, 28, 3);
reader.initialize(new FileSplit(new File(rootDir)));
DataSetIterator recordReader = new RecordReaderDataSetIterator(reader, 10, 1, labels.size());
labels.remove("lfwtest");
NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLRN();
builder.setInputType(InputType.convolutional(28, 28, 3));
MultiLayerConfiguration conf = builder.build();
ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(3).getLayer();
assertEquals(6, layer2.getNIn());
}
public MultiLayerConfiguration.Builder incompleteLRN() {
MultiLayerConfiguration.Builder builder =
new NeuralNetConfiguration.Builder().seed(3)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nOut(6).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}).build())
.layer(2, new LocalResponseNormalization.Builder().build())
.layer(3, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nOut(6).build())
.layer(4, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}).build())
.layer(5, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(2)
.build());
return builder;
}
public MultiLayerConfiguration.Builder incompleteLFW() {
MultiLayerConfiguration.Builder builder =
new NeuralNetConfiguration.Builder().seed(3)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nOut(6).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}).build())
.layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nOut(6).build())
.layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}).build())
.layer(4, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(2)
.build());
return builder;
}
public MultiLayerConfiguration.Builder incompleteMnistLenet() {
MultiLayerConfiguration.Builder builder =
new NeuralNetConfiguration.Builder().seed(3)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nIn(1).nOut(20).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}, new int[] {2, 2}).build())
.layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nIn(20).nOut(50).build())
.layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}, new int[] {2, 2}).build())
.layer(4, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(500)
.build())
.layer(5, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.activation(Activation.SOFTMAX).nOut(10)
.build());
return builder;
}
public MultiLayerConfiguration mnistLenet() {
MultiLayerConfiguration builder =
new NeuralNetConfiguration.Builder().seed(3)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nIn(1).nOut(6).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {5, 5}, new int[] {2, 2}).build())
.layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nIn(1).nOut(6).build())
.layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {5, 5}, new int[] {2, 2}).build())
.layer(4, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(150)
.nOut(10).build())
.build();
return builder;
}
public MultiLayerConfiguration.Builder inComplete() {
int nChannels = 1;
int outputNum = 10;
int iterations = 10;
int seed = 123;
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations)
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(new int[] {10, 10},
new int[] {2, 2}).nIn(nChannels).nOut(6).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build())
.backprop(true).pretrain(false);
return builder;
}
public MultiLayerConfiguration.Builder complete() {
final int numRows = 28;
final int numColumns = 28;
int nChannels = 1;
int outputNum = 10;
int iterations = 10;
int seed = 123;
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations)
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(new int[] {10, 10},
new int[] {2, 2}).nIn(nChannels).nOut(6).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nIn(5 * 5 * 1 * 6) //216
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build())
.inputPreProcessor(0, new FeedForwardToCnnPreProcessor(numRows, numColumns, nChannels))
.inputPreProcessor(2, new CnnToFeedForwardPreProcessor(5, 5, 6)).backprop(true).pretrain(false);
return builder;
}
@Test
public void testSubSamplingWithPadding() {
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
.layer(0, new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build()) //(28-2+0)/2+1 = 14
.layer(1, new SubsamplingLayer.Builder().kernelSize(2, 2).padding(1, 1).stride(2, 2).build()) //(14-2+2)/2+1 = 8 -> 8x8x3
.layer(2, new OutputLayer.Builder().nOut(3).build());
new ConvolutionLayerSetup(builder, 28, 28, 1);
MultiLayerConfiguration conf = builder.build();
assertNotNull(conf.getInputPreProcess(2));
assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
assertEquals(8, proc.getInputHeight());
assertEquals(8, proc.getInputWidth());
assertEquals(3, proc.getNumChannels());
assertEquals(8 * 8 * 3, ((FeedForwardLayer) conf.getConf(2).getLayer()).getNIn());
}
@Test
public void testCNNDBNMultiLayer() throws Exception {
DataSetIterator iter = new MnistDataSetIterator(2, 2);
DataSet next = iter.next();
// Run with separate activation layer
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(2).seed(123)
.weightInit(WeightInit.XAVIER).list()
.layer(0, new ConvolutionLayer.Builder(new int[] {1, 1}, new int[] {1, 1}).nIn(1).nOut(6)
.activation(Activation.IDENTITY).build())
.layer(1, new BatchNormalization.Builder().build())
.layer(2, new ActivationLayer.Builder().activation(Activation.RELU).build())
.layer(3, new DenseLayer.Builder().nIn(28 * 28 * 6).nOut(10).activation(Activation.IDENTITY)
.build())
.layer(4, new BatchNormalization.Builder().nOut(10).build())
.layer(5, new ActivationLayer.Builder().activation(Activation.RELU).build())
.layer(6, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(10).build())
.backprop(true).pretrain(false).cnnInputSize(28, 28, 1).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.setInput(next.getFeatureMatrix());
INDArray activationsActual = network.preOutput(next.getFeatureMatrix());
assertEquals(10, activationsActual.shape()[1], 1e-2);
network.fit(next);
INDArray actualGammaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.GAMMA);
INDArray actualBetaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.BETA);
assertTrue(actualGammaParam != null);
assertTrue(actualBetaParam != null);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.support.single.instance;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.replication.ClusterStateCreationUtils;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.EmptySystemIndices;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.core.IsEqual.equalTo;
public class TransportInstanceSingleOperationActionTests extends ESTestCase {
private static ThreadPool THREAD_POOL;
private ClusterService clusterService;
private CapturingTransport transport;
private TransportService transportService;
private TestTransportInstanceSingleOperationAction action;
public static class Request extends InstanceShardOperationRequest<Request> {
public Request() {}
public Request(StreamInput in) throws IOException {
super(null, in);
}
}
public static class Response extends ActionResponse {
public Response() {}
public Response(StreamInput in) throws IOException {
super(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {}
}
class TestTransportInstanceSingleOperationAction extends TransportInstanceSingleOperationAction<Request, Response> {
private final Map<ShardId, Object> shards = new HashMap<>();
TestTransportInstanceSingleOperationAction(
String actionName,
TransportService transportService,
ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
Writeable.Reader<Request> request
) {
super(
actionName,
THREAD_POOL,
TransportInstanceSingleOperationActionTests.this.clusterService,
transportService,
actionFilters,
indexNameExpressionResolver,
request
);
}
public Map<ShardId, Object> getResults() {
return shards;
}
@Override
protected String executor(ShardId shardId) {
return ThreadPool.Names.SAME;
}
@Override
protected void shardOperation(Request request, ActionListener<Response> listener) {
throw new UnsupportedOperationException("Not implemented in test class");
}
@Override
protected Response newResponse(StreamInput in) throws IOException {
return new Response();
}
@Override
protected void resolveRequest(ClusterState state, Request request) {}
@Override
protected ShardIterator shards(ClusterState clusterState, Request request) {
return clusterState.routingTable().index(request.concreteIndex()).shard(request.shardId.getId()).primaryShardIt();
}
}
static class MyResolver extends IndexNameExpressionResolver {
MyResolver() {
super(new ThreadContext(Settings.EMPTY), EmptySystemIndices.INSTANCE);
}
@Override
public String[] concreteIndexNames(ClusterState state, IndicesRequest request) {
return request.indices();
}
}
@BeforeClass
public static void startThreadPool() {
THREAD_POOL = new TestThreadPool(TransportInstanceSingleOperationActionTests.class.getSimpleName());
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
transport = new CapturingTransport();
clusterService = createClusterService(THREAD_POOL);
transportService = transport.createTransportService(
clusterService.getSettings(),
THREAD_POOL,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> clusterService.localNode(),
null,
Collections.emptySet()
);
transportService.start();
transportService.acceptIncomingRequests();
action = new TestTransportInstanceSingleOperationAction(
"indices:admin/test",
transportService,
new ActionFilters(new HashSet<>()),
new MyResolver(),
Request::new
);
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
transportService.close();
}
@AfterClass
public static void destroyThreadPool() {
ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS);
// since static must set to null to be eligible for collection
THREAD_POOL = null;
}
public void testGlobalBlock() {
Request request = new Request();
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ClusterBlocks.Builder block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "", false, true, false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
try {
action.new AsyncSingleAction(request, listener).start();
listener.get();
fail("expected ClusterBlockException");
} catch (Exception e) {
if (ExceptionsHelper.unwrap(e, ClusterBlockException.class) == null) {
logger.info("expected ClusterBlockException but got ", e);
fail("expected ClusterBlockException");
}
}
}
public void testBasicRequestWorks() throws InterruptedException, ExecutionException, TimeoutException {
Request request = new Request().index("test");
request.shardId = new ShardId("test", "_na_", 0);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED));
action.new AsyncSingleAction(request, listener).start();
assertThat(transport.capturedRequests().length, equalTo(1));
transport.handleResponse(transport.capturedRequests()[0].requestId, new Response());
listener.get();
}
public void testFailureWithoutRetry() throws Exception {
Request request = new Request().index("test");
request.shardId = new ShardId("test", "_na_", 0);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED));
action.new AsyncSingleAction(request, listener).start();
assertThat(transport.capturedRequests().length, equalTo(1));
long requestId = transport.capturedRequests()[0].requestId;
transport.clear();
// this should not trigger retry or anything and the listener should report exception immediately
transport.handleRemoteError(
requestId,
new TransportException("a generic transport exception", new Exception("generic test exception"))
);
try {
// result should return immediately
assertTrue(listener.isDone());
listener.get();
fail("this should fail with a transport exception");
} catch (ExecutionException t) {
if (ExceptionsHelper.unwrap(t, TransportException.class) == null) {
logger.info("expected TransportException but got ", t);
fail("expected and TransportException");
}
}
}
public void testSuccessAfterRetryWithClusterStateUpdate() throws Exception {
Request request = new Request().index("test");
request.shardId = new ShardId("test", "_na_", 0);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
boolean local = randomBoolean();
setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.INITIALIZING));
action.new AsyncSingleAction(request, listener).start();
// this should fail because primary not initialized
assertThat(transport.capturedRequests().length, equalTo(0));
setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED));
// this time it should work
assertThat(transport.capturedRequests().length, equalTo(1));
transport.handleResponse(transport.capturedRequests()[0].requestId, new Response());
listener.get();
}
public void testSuccessAfterRetryWithExceptionFromTransport() throws Exception {
Request request = new Request().index("test");
request.shardId = new ShardId("test", "_na_", 0);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
boolean local = randomBoolean();
setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED));
action.new AsyncSingleAction(request, listener).start();
assertThat(transport.capturedRequests().length, equalTo(1));
long requestId = transport.capturedRequests()[0].requestId;
transport.clear();
DiscoveryNode node = clusterService.state().getNodes().getLocalNode();
transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception"));
// trigger cluster state observer
setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED));
assertThat(transport.capturedRequests().length, equalTo(1));
transport.handleResponse(transport.capturedRequests()[0].requestId, new Response());
listener.get();
}
public void testRetryOfAnAlreadyTimedOutRequest() throws Exception {
Request request = new Request().index("test").timeout(new TimeValue(0, TimeUnit.MILLISECONDS));
request.shardId = new ShardId("test", "_na_", 0);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED));
action.new AsyncSingleAction(request, listener).start();
assertThat(transport.capturedRequests().length, equalTo(1));
long requestId = transport.capturedRequests()[0].requestId;
transport.clear();
DiscoveryNode node = clusterService.state().getNodes().getLocalNode();
transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception"));
// wait until the timeout was triggered and we actually tried to send for the second time
assertBusy(() -> assertThat(transport.capturedRequests().length, equalTo(1)));
// let it fail the second time too
requestId = transport.capturedRequests()[0].requestId;
transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception"));
try {
// result should return immediately
assertTrue(listener.isDone());
listener.get();
fail("this should fail with a transport exception");
} catch (ExecutionException t) {
if (ExceptionsHelper.unwrap(t, ConnectTransportException.class) == null) {
logger.info("expected ConnectTransportException but got ", t);
fail("expected and ConnectTransportException");
}
}
}
public void testUnresolvableRequestDoesNotHang() throws InterruptedException, ExecutionException, TimeoutException {
action = new TestTransportInstanceSingleOperationAction(
"indices:admin/test_unresolvable",
transportService,
new ActionFilters(new HashSet<>()),
new MyResolver(),
Request::new
) {
@Override
protected void resolveRequest(ClusterState state, Request request) {
throw new IllegalStateException("request cannot be resolved");
}
};
Request request = new Request().index("test");
request.shardId = new ShardId("test", "_na_", 0);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED));
action.new AsyncSingleAction(request, listener).start();
assertThat(transport.capturedRequests().length, equalTo(0));
try {
listener.get();
} catch (Exception e) {
if (ExceptionsHelper.unwrap(e, IllegalStateException.class) == null) {
logger.info("expected IllegalStateException but got ", e);
fail("expected and IllegalStateException");
}
}
}
}
| |
/*
************************************************************************************
* Copyright (C) 2001-2011 encuestame: system online surveys Copyright (C) 2011
* encuestame Development Team.
* Licensed under the Apache Software License version 2.0
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
************************************************************************************
*/
package org.encuestame.core.security;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import org.apache.log4j.Logger;
import org.encuestame.core.security.details.EnMeSocialUserAccount;
import org.encuestame.core.security.details.EnMeUserAccountDetails;
import org.encuestame.core.security.token.EnMeSecurityToken;
import org.encuestame.core.security.token.SocialAuthenticationToken;
import org.encuestame.core.util.ConvertDomainsToSecurityContext;
import org.encuestame.core.util.EnMeUtils;
import org.encuestame.persistence.domain.security.SocialAccount;
import org.encuestame.persistence.domain.security.UserAccount;
import org.encuestame.utils.enums.EnMePermission;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.context.SecurityContextHolder;
/**
* Description Class.
* @author Picado, Juan juanATencuestame.org
* @since Dec 31, 2010 1:56:03 AM
* @version Id:
*/
public class SecurityUtils {
/*
* Log.
*/
private static Logger log = Logger.getLogger(SecurityUtils.class);
/**
*
* @param user
* @return
*/
public static EnMeSocialUserAccount convertUserAccountToUserDetails(final SocialAccount connection) {
final UserAccount user = connection.getUserOwner();
final Collection<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>();
authorities.addAll(ConvertDomainsToSecurityContext.convertEnMePermission(user.getSecUserPermissions()));
final EnMeSocialUserAccount enMeSocialUserAccount = new EnMeSocialUserAccount(user.getUsername(),
authorities,
user.isUserStatus() == null ? false : user.isUserStatus(),
true, // account not expired
true, // credentials not expired
true, // account not locked
user.getCompleteName() == null ? "" : user.getCompleteName(), // complete name
user.getUserEmail(), // user email
user, connection.getAccounType(),
connection.getSocialProfileId(),
connection.getProfilePictureUrl());
return enMeSocialUserAccount;
}
/**
* Convert User Account to {@link EnMeUserAccountDetails}.
* @param user
* @param roleUserAuth
* @return
*/
public static EnMeUserAccountDetails convertUserAccountToUserDetails(
final UserAccount user, final Boolean roleUserAuth) {
//log.debug("convertToUserDetails username "+user.getUsername());
final Collection<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>();
// search if authorities if the group are activated
/* if (this.roleGroupAuth) {
// search groups of the user
final Set<SecGroup> groups = user.getSecGroups();
for (final SecGroup secGroups : groups) {
authorities.addAll(ConvertDomainsToSecurityContext.convertEnMePermission(secGroups.getSecPermissions()));
}
}*/
// permissions
if (roleUserAuth) {
authorities.addAll(ConvertDomainsToSecurityContext.convertEnMePermission(user.getSecUserPermissions()));
// log.debug("EnMeUserDetails: Authorities size :"+authorities.size());
}
//creating user details
final EnMeUserAccountDetails userDetails = new EnMeUserAccountDetails(
user.getUsername(),
user.getPassword(),
authorities,
user.isUserStatus() == null ? false : user.isUserStatus(),
true, // account not expired
true, // credentials not expired
true, // account not locked
user.getCompleteName() == null ? "" : user.getCompleteName(), // complete name
user.getUserEmail(), // user email
user
);
userDetails.setSocialCredentials(false);
userDetails.setAccountNonExpired(true);
userDetails.setAccountNonLocked(true);
//log.debug("EnMeUserDetails : "+userDetails.toString());
return userDetails;
}
/**
*
* @param account
* @param password
* @param socialSignIn
*/
public static void socialAuthentication(final SocialAccount accountConnection) {
final UserAccount account = accountConnection.getUserOwner();
log.trace("Register SOCIAL LOGIN USER: " + account.getUsername());
// building granted authorities
final Collection<GrantedAuthority> authorities = ConvertDomainsToSecurityContext
.convertEnMePermission(account.getSecUserPermissions());
// create user detail based on user account.
final EnMeSocialUserAccount details = SecurityUtils.convertUserAccountToUserDetails(accountConnection);
// set the social credentials permission.
details.setSocialCredentials(true);
final SocialAuthenticationToken token = new SocialAuthenticationToken(details, authorities);
token.setProfileId(accountConnection.getSocialProfileId());
token.setProvider(accountConnection.getAccounType());
//clear the context.
SecurityContextHolder.clearContext();
//set new authentication.
SecurityContextHolder.getContext().setAuthentication(token);
if (log.isInfoEnabled()) {
log.info("Username " + account.getUsername() + " is logged at "
+ new Date());
log.debug("created EnMeSocialUserAccount" +details);
}
}
/**
* Authenticate {@link UserAccount}.
* @param account {@link UserAccount}.
*/
public static void authenticate(final UserAccount account){
final EnMeUserAccountDetails details = SecurityUtils.convertUserAccountToUserDetails(account, true);
//log.debug("+++++++++++ authenticate +++++++++++++");
//final Collection<GrantedAuthority> authorities = ConvertDomainsToSecurityContext
//.convertEnMePermission(account.getSecUserPermissions());
final EnMeSecurityToken securityToken = new EnMeSecurityToken(account);
//clear the context.
SecurityContextHolder.clearContext();
//set new authentication.
SecurityContextHolder.getContext().setAuthentication(securityToken);
if (log.isInfoEnabled()) {
log.info("{authenticate} Username " + account.getUsername() + " is logged at "
+ new Date());
log.debug("created EnMeUserAccountDetails" +details);
//log.debug("created EnMeUserAccountDetails" +details.getAuthorities());
}
//log.debug("+++++++++++ end authenticate +++++++++++++");
}
/**
* Check is Session is Expired.
* Iterate the existing permission stored in the {@link Authentication} and check if at least
* the ENCUESTAME_USER exist and return true if this condition exist.
* @param authentication
* @return
*/
public static boolean checkIsSessionIsExpired(final Authentication authentication){
boolean session = true;
if(authentication != null){
session = authentication.isAuthenticated();
for (GrantedAuthority authority : authentication.getAuthorities()) {
SimpleGrantedAuthority auth = (SimpleGrantedAuthority) authority;
if (auth.getAuthority().equals(EnMePermission.ENCUESTAME_USER.toString())) {
session = false;
break;
}
}
}
log.trace("checkIsSessionIsExpired->"+session);
return session;
}
/**
* Check is Session is Expired.
* @param authentication
* @return
*/
public static boolean checkIsSessionIsAnonymousUser(final Authentication authentication){
boolean anonymous = false;
if (authentication != null) {
if (EnMeUtils.ANONYMOUS_USER.equals(authentication.getName())) {
anonymous = true;
}
log.trace("checkIsSessionIsExpired->"+anonymous);
}
return anonymous;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.schema;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.Objects;
import com.google.common.base.MoreObjects;
import com.google.common.collect.*;
import org.apache.cassandra.auth.DataResource;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.utils.AbstractIterator;
import org.github.jamm.Unmetered;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static com.google.common.collect.Iterables.transform;
import static org.apache.cassandra.schema.IndexMetadata.isNameValid;
@Unmetered
public final class TableMetadata
{
public enum Flag
{
SUPER, COUNTER, DENSE, COMPOUND;
public static Set<Flag> fromStringSet(Set<String> strings)
{
return strings.stream().map(String::toUpperCase).map(Flag::valueOf).collect(toSet());
}
public static Set<String> toStringSet(Set<Flag> flags)
{
return flags.stream().map(Flag::toString).map(String::toLowerCase).collect(toSet());
}
}
public final String keyspace;
public final String name;
public final TableId id;
public final IPartitioner partitioner;
public final TableParams params;
public final ImmutableSet<Flag> flags;
private final boolean isView;
private final String indexName; // derived from table name
/*
* All CQL3 columns definition are stored in the columns map.
* On top of that, we keep separated collection of each kind of definition, to
* 1) allow easy access to each kind and
* 2) for the partition key and clustering key ones, those list are ordered by the "component index" of the elements.
*/
public final ImmutableMap<ByteBuffer, DroppedColumn> droppedColumns;
final ImmutableMap<ByteBuffer, ColumnMetadata> columns;
private final ImmutableList<ColumnMetadata> partitionKeyColumns;
private final ImmutableList<ColumnMetadata> clusteringColumns;
private final RegularAndStaticColumns regularAndStaticColumns;
public final Indexes indexes;
public final Triggers triggers;
// derived automatically from flags and columns
public final AbstractType<?> partitionKeyType;
public final ClusteringComparator comparator;
/*
* For dense tables, this alias the single non-PK column the table contains (since it can only have one). We keep
* that as convenience to access that column more easily (but we could replace calls by regularAndStaticColumns().iterator().next()
* for those tables in practice).
*/
public final ColumnMetadata compactValueColumn;
// performance hacks; TODO see if all are really necessary
public final DataResource resource;
private TableMetadata(Builder builder)
{
keyspace = builder.keyspace;
name = builder.name;
id = builder.id;
partitioner = builder.partitioner;
params = builder.params.build();
flags = Sets.immutableEnumSet(builder.flags);
isView = builder.isView;
indexName = name.contains(".")
? name.substring(name.indexOf('.') + 1)
: null;
droppedColumns = ImmutableMap.copyOf(builder.droppedColumns);
Collections.sort(builder.partitionKeyColumns);
partitionKeyColumns = ImmutableList.copyOf(builder.partitionKeyColumns);
Collections.sort(builder.clusteringColumns);
clusteringColumns = ImmutableList.copyOf(builder.clusteringColumns);
regularAndStaticColumns = RegularAndStaticColumns.builder().addAll(builder.regularAndStaticColumns).build();
columns = ImmutableMap.copyOf(builder.columns);
indexes = builder.indexes;
triggers = builder.triggers;
partitionKeyType = partitionKeyColumns.size() == 1
? partitionKeyColumns.get(0).type
: CompositeType.getInstance(transform(partitionKeyColumns, t -> t.type));
comparator = new ClusteringComparator(transform(clusteringColumns, c -> c.type));
compactValueColumn = isCompactTable()
? CompactTables.getCompactValueColumn(regularAndStaticColumns, isSuper())
: null;
resource = DataResource.table(keyspace, name);
}
public static Builder builder(String keyspace, String table)
{
return new Builder(keyspace, table);
}
public static Builder builder(String keyspace, String table, TableId id)
{
return new Builder(keyspace, table, id);
}
public Builder unbuild()
{
return builder(keyspace, name, id)
.partitioner(partitioner)
.params(params)
.flags(flags)
.isView(isView)
.addColumns(columns())
.droppedColumns(droppedColumns)
.indexes(indexes)
.triggers(triggers);
}
public boolean isView()
{
return isView;
}
public boolean isIndex()
{
return indexName != null;
}
public Optional<String> indexName()
{
return Optional.ofNullable(indexName);
}
/*
* We call dense a CF for which each component of the comparator is a clustering column, i.e. no
* component is used to store a regular column names. In other words, non-composite static "thrift"
* and CQL3 CF are *not* dense.
*/
public boolean isDense()
{
return flags.contains(Flag.DENSE);
}
public boolean isCompound()
{
return flags.contains(Flag.COMPOUND);
}
public boolean isSuper()
{
return flags.contains(Flag.SUPER);
}
public boolean isCounter()
{
return flags.contains(Flag.COUNTER);
}
public boolean isCQLTable()
{
return !isSuper() && !isDense() && isCompound();
}
public boolean isCompactTable()
{
return !isCQLTable();
}
public boolean isStaticCompactTable()
{
return !isSuper() && !isDense() && !isCompound();
}
public ImmutableCollection<ColumnMetadata> columns()
{
return columns.values();
}
public Iterable<ColumnMetadata> primaryKeyColumns()
{
return Iterables.concat(partitionKeyColumns, clusteringColumns);
}
public ImmutableList<ColumnMetadata> partitionKeyColumns()
{
return partitionKeyColumns;
}
public ImmutableList<ColumnMetadata> clusteringColumns()
{
return clusteringColumns;
}
public RegularAndStaticColumns regularAndStaticColumns()
{
return regularAndStaticColumns;
}
public Columns regularColumns()
{
return regularAndStaticColumns.regulars;
}
public Columns staticColumns()
{
return regularAndStaticColumns.statics;
}
/*
* An iterator over all column definitions but that respect the order of a SELECT *.
* This also "hide" the clustering/regular columns for a non-CQL3 non-dense table for backward compatibility
* sake.
*/
public Iterator<ColumnMetadata> allColumnsInSelectOrder()
{
final boolean isStaticCompactTable = isStaticCompactTable();
final boolean noNonPkColumns = isCompactTable() && CompactTables.hasEmptyCompactValue(this);
return new AbstractIterator<ColumnMetadata>()
{
private final Iterator<ColumnMetadata> partitionKeyIter = partitionKeyColumns.iterator();
private final Iterator<ColumnMetadata> clusteringIter =
isStaticCompactTable ? Collections.emptyIterator() : clusteringColumns.iterator();
private final Iterator<ColumnMetadata> otherColumns =
noNonPkColumns
? Collections.emptyIterator()
: (isStaticCompactTable ? staticColumns().selectOrderIterator()
: regularAndStaticColumns.selectOrderIterator());
protected ColumnMetadata computeNext()
{
if (partitionKeyIter.hasNext())
return partitionKeyIter.next();
if (clusteringIter.hasNext())
return clusteringIter.next();
return otherColumns.hasNext() ? otherColumns.next() : endOfData();
}
};
}
/**
* Returns the ColumnMetadata for {@code name}.
*/
public ColumnMetadata getColumn(ColumnIdentifier name)
{
return columns.get(name.bytes);
}
/*
* In general it is preferable to work with ColumnIdentifier to make it
* clear that we are talking about a CQL column, not a cell name, but there
* is a few cases where all we have is a ByteBuffer (when dealing with IndexExpression
* for instance) so...
*/
public ColumnMetadata getColumn(ByteBuffer name)
{
return columns.get(name);
}
public ColumnMetadata getDroppedColumn(ByteBuffer name)
{
DroppedColumn dropped = droppedColumns.get(name);
return dropped == null ? null : dropped.column;
}
/**
* Returns a "fake" ColumnMetadata corresponding to the dropped column {@code name}
* of {@code null} if there is no such dropped column.
*
* @param name - the column name
* @param isStatic - whether the column was a static column, if known
*/
public ColumnMetadata getDroppedColumn(ByteBuffer name, boolean isStatic)
{
DroppedColumn dropped = droppedColumns.get(name);
if (dropped == null)
return null;
if (isStatic && !dropped.column.isStatic())
return ColumnMetadata.staticColumn(this, name, dropped.column.type);
return dropped.column;
}
public boolean hasStaticColumns()
{
return !staticColumns().isEmpty();
}
public void validate()
{
if (!isNameValid(keyspace))
except("Keyspace name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", SchemaConstants.NAME_LENGTH, keyspace);
if (!isNameValid(name))
except("Table name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", SchemaConstants.NAME_LENGTH, name);
params.validate();
if (partitionKeyColumns.stream().anyMatch(c -> c.type.isCounter()))
except("PRIMARY KEY columns cannot contain counters");
// Mixing counter with non counter columns is not supported (#2614)
if (isCounter())
{
for (ColumnMetadata column : regularAndStaticColumns)
if (!(column.type.isCounter()) && !CompactTables.isSuperColumnMapColumn(column))
except("Cannot have a non counter column (\"%s\") in a counter table", column.name);
}
else
{
for (ColumnMetadata column : regularAndStaticColumns)
if (column.type.isCounter())
except("Cannot have a counter column (\"%s\") in a non counter column table", column.name);
}
// All tables should have a partition key
if (partitionKeyColumns.isEmpty())
except("Missing partition keys for table %s", toString());
// A compact table should always have a clustering
if (isCompactTable() && clusteringColumns.isEmpty())
except("For table %s, isDense=%b, isCompound=%b, clustering=%s", toString(), isDense(), isCompound(), clusteringColumns);
if (!indexes.isEmpty() && isSuper())
except("Secondary indexes are not supported on super column families");
indexes.validate(this);
}
void validateCompatibility(TableMetadata other)
{
if (isIndex())
return;
if (!other.keyspace.equals(keyspace))
except("Keyspace mismatch (found %s; expected %s)", other.keyspace, keyspace);
if (!other.name.equals(name))
except("Table mismatch (found %s; expected %s)", other.name, name);
if (!other.id.equals(id))
except("Table ID mismatch (found %s; expected %s)", other.id, id);
if (!other.flags.equals(flags))
except("Table type mismatch (found %s; expected %s)", other.flags, flags);
if (other.partitionKeyColumns.size() != partitionKeyColumns.size())
except("Partition keys of different length (found %s; expected %s)", other.partitionKeyColumns.size(), partitionKeyColumns.size());
for (int i = 0; i < partitionKeyColumns.size(); i++)
if (!other.partitionKeyColumns.get(i).type.isCompatibleWith(partitionKeyColumns.get(i).type))
except("Partition key column mismatch (found %s; expected %s)", other.partitionKeyColumns.get(i).type, partitionKeyColumns.get(i).type);
if (other.clusteringColumns.size() != clusteringColumns.size())
except("Clustering columns of different length (found %s; expected %s)", other.clusteringColumns.size(), clusteringColumns.size());
for (int i = 0; i < clusteringColumns.size(); i++)
if (!other.clusteringColumns.get(i).type.isCompatibleWith(clusteringColumns.get(i).type))
except("Clustering column mismatch (found %s; expected %s)", other.clusteringColumns.get(i).type, clusteringColumns.get(i).type);
for (ColumnMetadata otherColumn : other.regularAndStaticColumns)
{
ColumnMetadata column = getColumn(otherColumn.name);
if (column != null && !otherColumn.type.isCompatibleWith(column.type))
except("Column mismatch (found %s; expected %s", otherColumn, column);
}
}
public ClusteringComparator partitionKeyAsClusteringComparator()
{
return new ClusteringComparator(partitionKeyColumns.stream().map(c -> c.type).collect(toList()));
}
/**
* The type to use to compare column names in "static compact"
* tables or superColum ones.
* <p>
* This exists because for historical reasons, "static compact" tables as
* well as super column ones can have non-UTF8 column names.
* <p>
* This method should only be called for superColumn tables and "static
* compact" ones. For any other table, all column names are UTF8.
*/
public AbstractType<?> staticCompactOrSuperTableColumnNameType()
{
if (isSuper())
{
assert compactValueColumn != null && compactValueColumn.type instanceof MapType;
return ((MapType) compactValueColumn.type).nameComparator();
}
assert isStaticCompactTable();
return clusteringColumns.get(0).type;
}
public AbstractType<?> columnDefinitionNameComparator(ColumnMetadata.Kind kind)
{
return (isSuper() && kind == ColumnMetadata.Kind.REGULAR) || (isStaticCompactTable() && kind == ColumnMetadata.Kind.STATIC)
? staticCompactOrSuperTableColumnNameType()
: UTF8Type.instance;
}
/**
* Generate a table name for an index corresponding to the given column.
* This is NOT the same as the index's name! This is only used in sstable filenames and is not exposed to users.
*
* @param info A definition of the column with index
*
* @return name of the index table
*/
public String indexTableName(IndexMetadata info)
{
// TODO simplify this when info.index_name is guaranteed to be set
return name + Directories.SECONDARY_INDEX_NAME_SEPARATOR + info.name;
}
/**
* @return true if the change as made impacts queries/updates on the table,
* e.g. any columns or indexes were added, removed, or altered; otherwise, false is returned.
* Used to determine whether prepared statements against this table need to be re-prepared.
*/
boolean changeAffectsPreparedStatements(TableMetadata updated)
{
return !partitionKeyColumns.equals(updated.partitionKeyColumns)
|| !clusteringColumns.equals(updated.clusteringColumns)
|| !regularAndStaticColumns.equals(updated.regularAndStaticColumns)
|| !indexes.equals(updated.indexes)
|| params.defaultTimeToLive != updated.params.defaultTimeToLive
|| params.gcGraceSeconds != updated.params.gcGraceSeconds;
}
/**
* There is a couple of places in the code where we need a TableMetadata object and don't have one readily available
* and know that only the keyspace and name matter. This creates such "fake" metadata. Use only if you know what
* you're doing.
*/
public static TableMetadata minimal(String keyspace, String name)
{
return TableMetadata.builder(keyspace, name)
.addPartitionKeyColumn("key", BytesType.instance)
.build();
}
public TableMetadata updateIndexTableMetadata(TableParams baseTableParams)
{
TableParams.Builder builder =
baseTableParams.unbuild()
.readRepairChance(0.0)
.dcLocalReadRepairChance(0.0)
.gcGraceSeconds(0);
// Depends on parent's cache setting, turn on its index table's cache.
// Row caching is never enabled; see CASSANDRA-5732
builder.caching(baseTableParams.caching.cacheKeys() ? CachingParams.CACHE_KEYS : CachingParams.CACHE_NOTHING);
return unbuild().params(builder.build()).build();
}
private void except(String format, Object... args)
{
throw new ConfigurationException(keyspace + "." + name + ": " +format(format, args));
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof TableMetadata))
return false;
TableMetadata tm = (TableMetadata) o;
return keyspace.equals(tm.keyspace)
&& name.equals(tm.name)
&& id.equals(tm.id)
&& partitioner.equals(tm.partitioner)
&& params.equals(tm.params)
&& flags.equals(tm.flags)
&& isView == tm.isView
&& columns.equals(tm.columns)
&& droppedColumns.equals(tm.droppedColumns)
&& indexes.equals(tm.indexes)
&& triggers.equals(tm.triggers);
}
@Override
public int hashCode()
{
return Objects.hash(keyspace, name, id, partitioner, params, flags, isView, columns, droppedColumns, indexes, triggers);
}
@Override
public String toString()
{
return String.format("%s.%s", ColumnIdentifier.maybeQuote(keyspace), ColumnIdentifier.maybeQuote(name));
}
public String toDebugString()
{
return MoreObjects.toStringHelper(this)
.add("keyspace", keyspace)
.add("table", name)
.add("id", id)
.add("partitioner", partitioner)
.add("params", params)
.add("flags", flags)
.add("isView", isView)
.add("columns", columns())
.add("droppedColumns", droppedColumns.values())
.add("indexes", indexes)
.add("triggers", triggers)
.toString();
}
public static final class Builder
{
final String keyspace;
final String name;
private TableId id;
private IPartitioner partitioner;
private TableParams.Builder params = TableParams.builder();
// Setting compound as default as "normal" CQL tables are compound and that's what we want by default
private Set<Flag> flags = EnumSet.of(Flag.COMPOUND);
private Triggers triggers = Triggers.none();
private Indexes indexes = Indexes.none();
private final Map<ByteBuffer, DroppedColumn> droppedColumns = new HashMap<>();
private final Map<ByteBuffer, ColumnMetadata> columns = new HashMap<>();
private final List<ColumnMetadata> partitionKeyColumns = new ArrayList<>();
private final List<ColumnMetadata> clusteringColumns = new ArrayList<>();
private final List<ColumnMetadata> regularAndStaticColumns = new ArrayList<>();
private boolean isView;
private Builder(String keyspace, String name, TableId id)
{
this.keyspace = keyspace;
this.name = name;
this.id = id;
}
private Builder(String keyspace, String name)
{
this.keyspace = keyspace;
this.name = name;
}
public TableMetadata build()
{
if (partitioner == null)
partitioner = DatabaseDescriptor.getPartitioner();
if (id == null)
id = TableId.generate();
return new TableMetadata(this);
}
public Builder id(TableId val)
{
id = val;
return this;
}
public Builder partitioner(IPartitioner val)
{
partitioner = val;
return this;
}
public Builder params(TableParams val)
{
params = val.unbuild();
return this;
}
public Builder bloomFilterFpChance(double val)
{
params.bloomFilterFpChance(val);
return this;
}
public Builder caching(CachingParams val)
{
params.caching(val);
return this;
}
public Builder comment(String val)
{
params.comment(val);
return this;
}
public Builder compaction(CompactionParams val)
{
params.compaction(val);
return this;
}
public Builder compression(CompressionParams val)
{
params.compression(val);
return this;
}
public Builder dcLocalReadRepairChance(double val)
{
params.dcLocalReadRepairChance(val);
return this;
}
public Builder defaultTimeToLive(int val)
{
params.defaultTimeToLive(val);
return this;
}
public Builder gcGraceSeconds(int val)
{
params.gcGraceSeconds(val);
return this;
}
public Builder maxIndexInterval(int val)
{
params.maxIndexInterval(val);
return this;
}
public Builder memtableFlushPeriod(int val)
{
params.memtableFlushPeriodInMs(val);
return this;
}
public Builder minIndexInterval(int val)
{
params.minIndexInterval(val);
return this;
}
public Builder readRepairChance(double val)
{
params.readRepairChance(val);
return this;
}
public Builder crcCheckChance(double val)
{
params.crcCheckChance(val);
return this;
}
public Builder speculativeRetry(SpeculativeRetryParam val)
{
params.speculativeRetry(val);
return this;
}
public Builder extensions(Map<String, ByteBuffer> val)
{
params.extensions(val);
return this;
}
public Builder isView(boolean val)
{
isView = val;
return this;
}
public Builder flags(Set<Flag> val)
{
flags = val;
return this;
}
public Builder isSuper(boolean val)
{
return flag(Flag.SUPER, val);
}
public Builder isCounter(boolean val)
{
return flag(Flag.COUNTER, val);
}
public Builder isDense(boolean val)
{
return flag(Flag.DENSE, val);
}
public Builder isCompound(boolean val)
{
return flag(Flag.COMPOUND, val);
}
private Builder flag(Flag flag, boolean set)
{
if (set) flags.add(flag); else flags.remove(flag);
return this;
}
public Builder triggers(Triggers val)
{
triggers = val;
return this;
}
public Builder indexes(Indexes val)
{
indexes = val;
return this;
}
public Builder addPartitionKeyColumn(String name, AbstractType type)
{
return addPartitionKeyColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addPartitionKeyColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, partitionKeyColumns.size(), ColumnMetadata.Kind.PARTITION_KEY));
}
public Builder addClusteringColumn(String name, AbstractType type)
{
return addClusteringColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addClusteringColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, clusteringColumns.size(), ColumnMetadata.Kind.CLUSTERING));
}
public Builder addRegularColumn(String name, AbstractType type)
{
return addRegularColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addRegularColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, ColumnMetadata.NO_POSITION, ColumnMetadata.Kind.REGULAR));
}
public Builder addStaticColumn(String name, AbstractType type)
{
return addStaticColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addStaticColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, ColumnMetadata.NO_POSITION, ColumnMetadata.Kind.STATIC));
}
public Builder addColumn(ColumnMetadata column)
{
if (columns.containsKey(column.name.bytes))
throw new IllegalArgumentException();
switch (column.kind)
{
case PARTITION_KEY:
partitionKeyColumns.add(column);
Collections.sort(partitionKeyColumns);
break;
case CLUSTERING:
column.type.checkComparable();
clusteringColumns.add(column);
Collections.sort(clusteringColumns);
break;
default:
regularAndStaticColumns.add(column);
}
columns.put(column.name.bytes, column);
return this;
}
public Builder addColumns(Iterable<ColumnMetadata> columns)
{
columns.forEach(this::addColumn);
return this;
}
public Builder droppedColumns(Map<ByteBuffer, DroppedColumn> droppedColumns)
{
this.droppedColumns.clear();
this.droppedColumns.putAll(droppedColumns);
return this;
}
/**
* Records a deprecated column for a system table.
*/
public Builder recordDeprecatedSystemColumn(String name, AbstractType<?> type)
{
// As we play fast and loose with the removal timestamp, make sure this is misued for a non system table.
assert SchemaConstants.isSystemKeyspace(keyspace);
recordColumnDrop(ColumnMetadata.regularColumn(keyspace, this.name, name, type), Long.MAX_VALUE);
return this;
}
public Builder recordColumnDrop(ColumnMetadata column, long timeMicros)
{
droppedColumns.put(column.name.bytes, new DroppedColumn(column, timeMicros));
return this;
}
public Iterable<ColumnMetadata> columns()
{
return columns.values();
}
public Set<String> columnNames()
{
return columns.values().stream().map(c -> c.name.toString()).collect(toSet());
}
public ColumnMetadata getColumn(ColumnIdentifier identifier)
{
return columns.get(identifier.bytes);
}
public ColumnMetadata getColumn(ByteBuffer name)
{
return columns.get(name);
}
public boolean hasRegularColumns()
{
return regularAndStaticColumns.stream().anyMatch(ColumnMetadata::isRegular);
}
/*
* The following methods all assume a Builder with valid set of partition key, clustering, regular and static columns.
*/
public Builder removeRegularOrStaticColumn(ColumnIdentifier identifier)
{
ColumnMetadata column = columns.get(identifier.bytes);
if (column == null || column.isPrimaryKeyColumn())
throw new IllegalArgumentException();
columns.remove(identifier.bytes);
regularAndStaticColumns.remove(column);
return this;
}
public Builder renamePrimaryKeyColumn(ColumnIdentifier from, ColumnIdentifier to)
{
if (columns.containsKey(to.bytes))
throw new IllegalArgumentException();
ColumnMetadata column = columns.get(from.bytes);
if (column == null || !column.isPrimaryKeyColumn())
throw new IllegalArgumentException();
ColumnMetadata newColumn = column.withNewName(to);
if (column.isPartitionKey())
partitionKeyColumns.set(column.position(), newColumn);
else
clusteringColumns.set(column.position(), newColumn);
columns.remove(from.bytes);
columns.put(to.bytes, newColumn);
return this;
}
public Builder alterColumnType(ColumnIdentifier name, AbstractType<?> type)
{
ColumnMetadata column = columns.get(name.bytes);
if (column == null)
throw new IllegalArgumentException();
ColumnMetadata newColumn = column.withNewType(type);
switch (column.kind)
{
case PARTITION_KEY:
partitionKeyColumns.set(column.position(), newColumn);
break;
case CLUSTERING:
clusteringColumns.set(column.position(), newColumn);
break;
case REGULAR:
case STATIC:
regularAndStaticColumns.remove(column);
regularAndStaticColumns.add(newColumn);
break;
}
columns.put(column.name.bytes, newColumn);
return this;
}
}
/**
* A table with strict liveness filters/ignores rows without PK liveness info,
* effectively tying the row liveness to its primary key liveness.
*
* Currently this is only used by views with normal base column as PK column
* so updates to other columns do not make the row live when the base column
* is not live. See CASSANDRA-11500.
*/
public boolean enforceStrictLiveness()
{
return isView && Keyspace.open(keyspace).viewManager.getByName(name).enforceStrictLiveness();
}
}
| |
/**
* Copyright 2009 Joe LaPenna
*/
package com.joelapenna.foursquare;
import com.joelapenna.foursquare.error.FoursquareCredentialsException;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareException;
import com.joelapenna.foursquare.error.FoursquareParseException;
import com.joelapenna.foursquare.http.AbstractHttpApi;
import com.joelapenna.foursquare.http.HttpApi;
import com.joelapenna.foursquare.http.HttpApiWithBasicAuth;
import com.joelapenna.foursquare.http.HttpApiWithOAuth;
import com.joelapenna.foursquare.parsers.AbstractParser;
import com.joelapenna.foursquare.parsers.CategoryParser;
import com.joelapenna.foursquare.parsers.CheckinParser;
import com.joelapenna.foursquare.parsers.CheckinResultParser;
import com.joelapenna.foursquare.parsers.CityParser;
import com.joelapenna.foursquare.parsers.CredentialsParser;
import com.joelapenna.foursquare.parsers.FriendInvitesResultParser;
import com.joelapenna.foursquare.parsers.GroupParser;
import com.joelapenna.foursquare.parsers.ResponseParser;
import com.joelapenna.foursquare.parsers.SettingsParser;
import com.joelapenna.foursquare.parsers.TipParser;
import com.joelapenna.foursquare.parsers.UserParser;
import com.joelapenna.foursquare.parsers.VenueParser;
import com.joelapenna.foursquare.types.Category;
import com.joelapenna.foursquare.types.Checkin;
import com.joelapenna.foursquare.types.CheckinResult;
import com.joelapenna.foursquare.types.City;
import com.joelapenna.foursquare.types.Credentials;
import com.joelapenna.foursquare.types.FriendInvitesResult;
import com.joelapenna.foursquare.types.Group;
import com.joelapenna.foursquare.types.Response;
import com.joelapenna.foursquare.types.Settings;
import com.joelapenna.foursquare.types.Tip;
import com.joelapenna.foursquare.types.User;
import com.joelapenna.foursquare.types.Venue;
import com.joelapenna.foursquared.util.Base64Coder;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author Joe LaPenna (joe@joelapenna.com)
*/
class FoursquareHttpApiV1 {
private static final Logger LOG = Logger
.getLogger(FoursquareHttpApiV1.class.getCanonicalName());
private static final boolean DEBUG = Foursquare.DEBUG;
private static final String URL_API_AUTHEXCHANGE = "/authexchange";
private static final String URL_API_ADDVENUE = "/addvenue";
private static final String URL_API_ADDTIP = "/addtip";
private static final String URL_API_CITIES = "/cities";
private static final String URL_API_CHECKINS = "/checkins";
private static final String URL_API_CHECKIN = "/checkin";
private static final String URL_API_USER = "/user";
private static final String URL_API_VENUE = "/venue";
private static final String URL_API_VENUES = "/venues";
private static final String URL_API_TIPS = "/tips";
private static final String URL_API_FRIEND_REQUESTS = "/friend/requests";
private static final String URL_API_FRIEND_APPROVE = "/friend/approve";
private static final String URL_API_FRIEND_DENY = "/friend/deny";
private static final String URL_API_FRIEND_SENDREQUEST = "/friend/sendrequest";
private static final String URL_API_FRIENDS = "/friends";
private static final String URL_API_FIND_FRIENDS_BY_NAME = "/findfriends/byname";
private static final String URL_API_FIND_FRIENDS_BY_PHONE = "/findfriends/byphone";
private static final String URL_API_FIND_FRIENDS_BY_FACEBOOK = "/findfriends/byfacebook";
private static final String URL_API_FIND_FRIENDS_BY_TWITTER = "/findfriends/bytwitter";
private static final String URL_API_CATEGORIES = "/categories";
private static final String URL_API_HISTORY = "/history";
private static final String URL_API_TIP_TODO = "/tip/marktodo";
private static final String URL_API_TIP_DONE = "/tip/markdone";
private static final String URL_API_FIND_FRIENDS_BY_PHONE_OR_EMAIL = "/findfriends/byphoneoremail";
private static final String URL_API_INVITE_BY_EMAIL = "/invite/byemail";
private static final String URL_API_SETPINGS = "/settings/setpings";
private static final String URL_API_VENUE_FLAG_CLOSED = "/venue/flagclosed";
private static final String URL_API_VENUE_FLAG_MISLOCATED = "/venue/flagmislocated";
private static final String URL_API_VENUE_FLAG_DUPLICATE = "/venue/flagduplicate";
private static final String URL_API_VENUE_PROPOSE_EDIT = "/venue/proposeedit";
private static final String URL_API_USER_UPDATE = "/user/update";
private final DefaultHttpClient mHttpClient = AbstractHttpApi.createHttpClient();
private HttpApi mHttpApi;
private final String mApiBaseUrl;
private final AuthScope mAuthScope;
public FoursquareHttpApiV1(String domain, String clientVersion, boolean useOAuth) {
mApiBaseUrl = "http://" + domain + "/v1";
mAuthScope = new AuthScope(domain, 80);
if (useOAuth) {
mHttpApi = new HttpApiWithOAuth(mHttpClient, clientVersion);
} else {
mHttpApi = new HttpApiWithBasicAuth(mHttpClient, clientVersion);
}
}
void setCredentials(String phone, String password) {
if (phone == null || phone.length() == 0 || password == null || password.length() == 0) {
if (DEBUG) LOG.log(Level.FINE, "Clearing Credentials");
mHttpClient.getCredentialsProvider().clear();
} else {
if (DEBUG) LOG.log(Level.FINE, "Setting Phone/Password: " + phone + "/******");
mHttpClient.getCredentialsProvider().setCredentials(mAuthScope,
new UsernamePasswordCredentials(phone, password));
}
}
public boolean hasCredentials() {
return mHttpClient.getCredentialsProvider().getCredentials(mAuthScope) != null;
}
public void setOAuthConsumerCredentials(String oAuthConsumerKey, String oAuthConsumerSecret) {
if (DEBUG) {
LOG.log(Level.FINE, "Setting consumer key/secret: " + oAuthConsumerKey + " "
+ oAuthConsumerSecret);
}
((HttpApiWithOAuth) mHttpApi).setOAuthConsumerCredentials(oAuthConsumerKey,
oAuthConsumerSecret);
}
public void setOAuthTokenWithSecret(String token, String secret) {
if (DEBUG) LOG.log(Level.FINE, "Setting oauth token/secret: " + token + " " + secret);
((HttpApiWithOAuth) mHttpApi).setOAuthTokenWithSecret(token, secret);
}
public boolean hasOAuthTokenWithSecret() {
return ((HttpApiWithOAuth) mHttpApi).hasOAuthTokenWithSecret();
}
/*
* /authexchange?oauth_consumer_key=d123...a1bffb5&oauth_consumer_secret=fec...
* 18
*/
public Credentials authExchange(String phone, String password) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
if (((HttpApiWithOAuth) mHttpApi).hasOAuthTokenWithSecret()) {
throw new IllegalStateException("Cannot do authExchange with OAuthToken already set");
}
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_AUTHEXCHANGE), //
new BasicNameValuePair("fs_username", phone), //
new BasicNameValuePair("fs_password", password));
return (Credentials) mHttpApi.doHttpRequest(httpPost, new CredentialsParser());
}
/*
* /addtip?vid=1234&text=I%20added%20a%20tip&type=todo (type defaults "tip")
*/
Tip addtip(String vid, String text, String type, String geolat, String geolong, String geohacc,
String geovacc, String geoalt) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_ADDTIP), //
new BasicNameValuePair("vid", vid), //
new BasicNameValuePair("text", text), //
new BasicNameValuePair("type", type), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt));
return (Tip) mHttpApi.doHttpRequest(httpPost, new TipParser());
}
/**
* @param name the name of the venue
* @param address the address of the venue (e.g., "202 1st Avenue")
* @param crossstreet the cross streets (e.g., "btw Grand & Broome")
* @param city the city name where this venue is
* @param state the state where the city is
* @param zip (optional) the ZIP code for the venue
* @param phone (optional) the phone number for the venue
* @return
* @throws FoursquareException
* @throws FoursquareCredentialsException
* @throws FoursquareError
* @throws IOException
*/
Venue addvenue(String name, String address, String crossstreet, String city, String state,
String zip, String phone, String categoryId, String geolat, String geolong, String geohacc,
String geovacc, String geoalt) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_ADDVENUE), //
new BasicNameValuePair("name", name), //
new BasicNameValuePair("address", address), //
new BasicNameValuePair("crossstreet", crossstreet), //
new BasicNameValuePair("city", city), //
new BasicNameValuePair("state", state), //
new BasicNameValuePair("zip", zip), //
new BasicNameValuePair("phone", phone), //
new BasicNameValuePair("primarycategoryid", categoryId), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt) //
);
return (Venue) mHttpApi.doHttpRequest(httpPost, new VenueParser());
}
/*
* /cities
*/
@SuppressWarnings("unchecked")
Group<City> cities() throws FoursquareException, FoursquareCredentialsException,
FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_CITIES));
return (Group<City>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new CityParser()));
}
/*
* /checkins?
*/
@SuppressWarnings("unchecked")
Group<Checkin> checkins(String geolat, String geolong, String geohacc, String geovacc,
String geoalt) throws FoursquareException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_CHECKINS), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt));
return (Group<Checkin>) mHttpApi.doHttpRequest(httpGet,
new GroupParser(new CheckinParser()));
}
/*
* /checkin?vid=1234&venue=Noc%20Noc&shout=Come%20here&private=0&twitter=1
*/
CheckinResult checkin(String vid, String venue, String geolat, String geolong, String geohacc,
String geovacc, String geoalt, String shout, boolean isPrivate, boolean tellFollowers,
boolean twitter, boolean facebook) throws FoursquareException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_CHECKIN), //
new BasicNameValuePair("vid", vid), //
new BasicNameValuePair("venue", venue), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt), //
new BasicNameValuePair("shout", shout), //
new BasicNameValuePair("private", (isPrivate) ? "1" : "0"), //
new BasicNameValuePair("followers", (tellFollowers) ? "1" : "0"), //
new BasicNameValuePair("twitter", (twitter) ? "1" : "0"), //
new BasicNameValuePair("facebook", (facebook) ? "1" : "0"), //
new BasicNameValuePair("markup", "android")); // used only by android for checkin result 'extras'.
return (CheckinResult) mHttpApi.doHttpRequest(httpPost, new CheckinResultParser());
}
/**
* /user?uid=9937
*/
User user(String uid, boolean mayor, boolean badges, String geolat, String geolong,
String geohacc, String geovacc, String geoalt) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_USER), //
new BasicNameValuePair("uid", uid), //
new BasicNameValuePair("mayor", (mayor) ? "1" : "0"), //
new BasicNameValuePair("badges", (badges) ? "1" : "0"), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt) //
);
return (User) mHttpApi.doHttpRequest(httpGet, new UserParser());
}
/**
* /venues?geolat=37.770900&geolong=-122.43698
*/
@SuppressWarnings("unchecked")
Group<Group<Venue>> venues(String geolat, String geolong, String geohacc, String geovacc,
String geoalt, String query, int limit) throws FoursquareException, FoursquareError,
IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_VENUES), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt), //
new BasicNameValuePair("q", query), //
new BasicNameValuePair("l", String.valueOf(limit)));
return (Group<Group<Venue>>) mHttpApi.doHttpRequest(httpGet, new GroupParser(
new GroupParser(new VenueParser())));
}
/**
* /venue?vid=1234
*/
Venue venue(String vid, String geolat, String geolong, String geohacc, String geovacc,
String geoalt) throws FoursquareException, FoursquareCredentialsException,
FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_VENUE), //
new BasicNameValuePair("vid", vid), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt) //
);
return (Venue) mHttpApi.doHttpRequest(httpGet, new VenueParser());
}
/**
* /tips?geolat=37.770900&geolong=-122.436987&l=1
*/
@SuppressWarnings("unchecked")
Group<Group<Tip>> tips(String geolat, String geolong, String geohacc, String geovacc,
String geoalt, int limit) throws FoursquareException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_TIPS), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt), //
new BasicNameValuePair("l", String.valueOf(limit)) //
);
return (Group<Group<Tip>>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new GroupParser(
new TipParser())));
}
/*
* /friends?uid=9937
*/
@SuppressWarnings("unchecked")
Group<User> friends(String uid, String geolat, String geolong, String geohacc, String geovacc,
String geoalt) throws FoursquareException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_FRIENDS), //
new BasicNameValuePair("uid", uid), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt) //
);
return (Group<User>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new UserParser()));
}
/*
* /friend/requests
*/
@SuppressWarnings("unchecked")
Group<User> friendRequests() throws FoursquareException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_FRIEND_REQUESTS));
return (Group<User>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new UserParser()));
}
/*
* /friend/approve?uid=9937
*/
User friendApprove(String uid) throws FoursquareException, FoursquareCredentialsException,
FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_FRIEND_APPROVE), //
new BasicNameValuePair("uid", uid));
return (User) mHttpApi.doHttpRequest(httpPost, new UserParser());
}
/*
* /friend/deny?uid=9937
*/
User friendDeny(String uid) throws FoursquareException, FoursquareCredentialsException,
FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_FRIEND_DENY), //
new BasicNameValuePair("uid", uid));
return (User) mHttpApi.doHttpRequest(httpPost, new UserParser());
}
/*
* /friend/sendrequest?uid=9937
*/
User friendSendrequest(String uid) throws FoursquareException, FoursquareCredentialsException,
FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_FRIEND_SENDREQUEST), //
new BasicNameValuePair("uid", uid));
return (User) mHttpApi.doHttpRequest(httpPost, new UserParser());
}
/**
* /findfriends/byname?q=john doe, mary smith
*/
@SuppressWarnings("unchecked")
public Group<User> findFriendsByName(String text) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_FIND_FRIENDS_BY_NAME), //
new BasicNameValuePair("q", text));
return (Group<User>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new UserParser()));
}
/**
* /findfriends/byphone?q=555-5555,555-5556
*/
@SuppressWarnings("unchecked")
public Group<User> findFriendsByPhone(String text) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_FIND_FRIENDS_BY_PHONE), //
new BasicNameValuePair("q", text));
return (Group<User>) mHttpApi.doHttpRequest(httpPost, new GroupParser(new UserParser()));
}
/**
* /findfriends/byfacebook?q=friendid,friendid,friendid
*/
@SuppressWarnings("unchecked")
public Group<User> findFriendsByFacebook(String text) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_FIND_FRIENDS_BY_FACEBOOK), //
new BasicNameValuePair("q", text));
return (Group<User>) mHttpApi.doHttpRequest(httpPost, new GroupParser(new UserParser()));
}
/**
* /findfriends/bytwitter?q=yourtwittername
*/
@SuppressWarnings("unchecked")
public Group<User> findFriendsByTwitter(String text) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_FIND_FRIENDS_BY_TWITTER), //
new BasicNameValuePair("q", text));
return (Group<User>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new UserParser()));
}
/**
* /categories
*/
@SuppressWarnings("unchecked")
public Group<Category> categories() throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_CATEGORIES));
return (Group<Category>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new CategoryParser()));
}
/**
* /history
*/
@SuppressWarnings("unchecked")
public Group<Checkin> history(String limit, String sinceid) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpGet httpGet = mHttpApi.createHttpGet(fullUrl(URL_API_HISTORY),
new BasicNameValuePair("l", limit),
new BasicNameValuePair("sinceid", sinceid));
return (Group<Checkin>) mHttpApi.doHttpRequest(httpGet, new GroupParser(new CheckinParser()));
}
/**
* /tip/marktodo
*/
public Tip tipMarkTodo(String tipId) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_TIP_TODO), //
new BasicNameValuePair("tid", tipId));
return (Tip) mHttpApi.doHttpRequest(httpPost, new TipParser());
}
/**
* /tip/markdone
*/
public Tip tipMarkDone(String tipId) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_TIP_DONE), //
new BasicNameValuePair("tid", tipId));
return (Tip) mHttpApi.doHttpRequest(httpPost, new TipParser());
}
/**
* /findfriends/byphoneoremail?p=comma-sep-list-of-phones&e=comma-sep-list-of-emails
*/
public FriendInvitesResult findFriendsByPhoneOrEmail(String phones, String emails) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_FIND_FRIENDS_BY_PHONE_OR_EMAIL), //
new BasicNameValuePair("p", phones),
new BasicNameValuePair("e", emails));
return (FriendInvitesResult) mHttpApi.doHttpRequest(httpPost, new FriendInvitesResultParser());
}
/**
* /invite/byemail?q=comma-sep-list-of-emails
*/
public Response inviteByEmail(String emails) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_INVITE_BY_EMAIL), //
new BasicNameValuePair("q", emails));
return (Response) mHttpApi.doHttpRequest(httpPost, new ResponseParser());
}
/**
* /settings/setpings?self=[on|off]
*/
public Settings setpings(boolean on) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_SETPINGS), //
new BasicNameValuePair("self", on ? "on" : "off"));
return (Settings) mHttpApi.doHttpRequest(httpPost, new SettingsParser());
}
/**
* /settings/setpings?uid=userid
*/
public Settings setpings(String userid, boolean on) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_SETPINGS), //
new BasicNameValuePair(userid, on ? "on" : "off"));
return (Settings) mHttpApi.doHttpRequest(httpPost, new SettingsParser());
}
/**
* /venue/flagclosed?vid=venueid
*/
public Response flagclosed(String venueId) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_VENUE_FLAG_CLOSED), //
new BasicNameValuePair("vid", venueId));
return (Response) mHttpApi.doHttpRequest(httpPost, new ResponseParser());
}
/**
* /venue/flagmislocated?vid=venueid
*/
public Response flagmislocated(String venueId) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_VENUE_FLAG_MISLOCATED), //
new BasicNameValuePair("vid", venueId));
return (Response) mHttpApi.doHttpRequest(httpPost, new ResponseParser());
}
/**
* /venue/flagduplicate?vid=venueid
*/
public Response flagduplicate(String venueId) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_VENUE_FLAG_DUPLICATE), //
new BasicNameValuePair("vid", venueId));
return (Response) mHttpApi.doHttpRequest(httpPost, new ResponseParser());
}
/**
* /venue/prposeedit?vid=venueid&name=...
*/
public Response proposeedit(String venueId, String name, String address, String crossstreet,
String city, String state, String zip, String phone, String categoryId, String geolat,
String geolong, String geohacc, String geovacc, String geoalt) throws FoursquareException,
FoursquareCredentialsException, FoursquareError, IOException {
HttpPost httpPost = mHttpApi.createHttpPost(fullUrl(URL_API_VENUE_PROPOSE_EDIT), //
new BasicNameValuePair("vid", venueId), //
new BasicNameValuePair("name", name), //
new BasicNameValuePair("address", address), //
new BasicNameValuePair("crossstreet", crossstreet), //
new BasicNameValuePair("city", city), //
new BasicNameValuePair("state", state), //
new BasicNameValuePair("zip", zip), //
new BasicNameValuePair("phone", phone), //
new BasicNameValuePair("primarycategoryid", categoryId), //
new BasicNameValuePair("geolat", geolat), //
new BasicNameValuePair("geolong", geolong), //
new BasicNameValuePair("geohacc", geohacc), //
new BasicNameValuePair("geovacc", geovacc), //
new BasicNameValuePair("geoalt", geoalt) //
);
return (Response) mHttpApi.doHttpRequest(httpPost, new ResponseParser());
}
private String fullUrl(String url) {
return mApiBaseUrl + url;
}
/**
* /user/update
* Need to bring this method under control like the rest of the api methods. Leaving it
* in this state as authorization will probably switch from basic auth in the near future
* anyway, will have to be updated. Also unlike the other methods, we're sending up data
* which aren't basic name/value pairs.
*/
public User userUpdate(String imagePathToJpg, String username, String password)
throws SocketTimeoutException, IOException, FoursquareError, FoursquareParseException {
String BOUNDARY = "------------------319831265358979362846";
String lineEnd = "\r\n";
String twoHyphens = "--";
int maxBufferSize = 8192;
File file = new File(imagePathToJpg);
FileInputStream fileInputStream = new FileInputStream(file);
HttpURLConnection conn = mHttpApi.createHttpURLConnectionPost(new URL(fullUrl(URL_API_USER_UPDATE)), BOUNDARY);
conn.setRequestProperty("Authorization", "Basic " + Base64Coder.encodeString(username + ":" + password));
// We are always saving the image to a jpg so we can use .jpg as the extension below.
DataOutputStream dos = new DataOutputStream(conn.getOutputStream());
dos.writeBytes(twoHyphens + BOUNDARY + lineEnd);
dos.writeBytes("Content-Disposition: form-data; name=\"image,jpeg\";filename=\"" + "image.jpeg" +"\"" + lineEnd);
dos.writeBytes("Content-Type: " + "image/jpeg" + lineEnd);
dos.writeBytes(lineEnd);
int bytesAvailable = fileInputStream.available();
int bufferSize = Math.min(bytesAvailable, maxBufferSize);
byte[] buffer = new byte[bufferSize];
int bytesRead = fileInputStream.read(buffer, 0, bufferSize);
int totalBytesRead = bytesRead;
while (bytesRead > 0) {
dos.write(buffer, 0, bufferSize);
bytesAvailable = fileInputStream.available();
bufferSize = Math.min(bytesAvailable, maxBufferSize);
bytesRead = fileInputStream.read(buffer, 0, bufferSize);
totalBytesRead = totalBytesRead + bytesRead;
}
dos.writeBytes(lineEnd);
dos.writeBytes(twoHyphens + BOUNDARY + twoHyphens + lineEnd);
fileInputStream.close();
dos.flush();
dos.close();
UserParser parser = new UserParser();
InputStream is = conn.getInputStream();
try {
return parser.parse(AbstractParser.createXmlPullParser(is));
} finally {
is.close();
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.compiler.actions;
import com.intellij.compiler.CompilerConfiguration;
import com.intellij.compiler.CompilerConfigurationImpl;
import com.intellij.compiler.ant.*;
import com.intellij.compiler.impl.CompilerUtil;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.compiler.CompilerBundle;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.*;
import java.util.*;
public class GenerateAntBuildAction extends CompileActionBase {
@NonNls private static final String XML_EXTENSION = ".xml";
@Override
protected void doAction(DataContext dataContext, final Project project) {
((CompilerConfigurationImpl)CompilerConfiguration.getInstance(project)).convertPatterns();
final GenerateAntBuildDialog dialog = new GenerateAntBuildDialog(project);
if (dialog.showAndGet()) {
final String[] names = dialog.getRepresentativeModuleNames();
final GenerationOptionsImpl[] genOptions = new GenerationOptionsImpl[1];
if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(
(Runnable)() -> genOptions[0] = ReadAction
.compute(() -> new GenerationOptionsImpl(project, dialog.isGenerateSingleFileBuild(), dialog.isFormsCompilationEnabled(),
dialog.isBackupFiles(), dialog.isForceTargetJdk(), dialog.isRuntimeClasspathInlined(),
dialog.isIdeaHomeGenerated(), names, dialog.getOutputFileName())), "Analyzing project structure...", true, project)) {
return;
}
if (!validateGenOptions(project, genOptions[0])) {
return;
}
generate(project, genOptions[0]);
}
}
/**
* Validate generation options and notify user about possible problems
*
* @param project a context project
* @param genOptions a generation optiosn
* @return true if the generator should proceed with current options or if there is not conflict.
*/
private static boolean validateGenOptions(Project project, GenerationOptionsImpl genOptions) {
final Collection<String> EMPTY = Collections.emptyList();
Collection<String> conflicts = EMPTY;
for (ModuleChunk chunk : genOptions.getModuleChunks()) {
final ChunkCustomCompilerExtension[] customeCompilers = chunk.getCustomCompilers();
if (customeCompilers.length > 1) {
if (conflicts == EMPTY) {
conflicts = new LinkedList<>();
}
conflicts.add(chunk.getName());
}
}
if (!conflicts.isEmpty()) {
StringBuilder msg = new StringBuilder();
for (String conflictingChunk : conflicts) {
msg.append(CompilerBundle.message("generate.ant.build.custom.compiler.conflict.message.row", conflictingChunk));
}
int rc = Messages
.showOkCancelDialog(project, CompilerBundle.message("generate.ant.build.custom.compiler.conflict.message", msg.toString()),
CompilerBundle.message("generate.ant.build.custom.compiler.conflict.title"), Messages.getErrorIcon());
if (rc != Messages.OK) {
return false;
}
}
return true;
}
@Override
public void update(@NotNull AnActionEvent e) {
Presentation presentation = e.getPresentation();
presentation.setEnabled(e.getProject() != null);
}
private void generate(final Project project, final GenerationOptions genOptions) {
ApplicationManager.getApplication().saveAll();
final List<File> filesToRefresh = new ArrayList<>();
final IOException[] _ex = new IOException[]{null};
final List<File> _generated = new ArrayList<>();
try {
if (genOptions.generateSingleFile) {
final File projectBuildFileDestDir = VfsUtil.virtualToIoFile(project.getBaseDir());
final File destFile = new File(projectBuildFileDestDir, genOptions.getBuildFileName());
final File propertiesFile = new File(projectBuildFileDestDir, genOptions.getPropertiesFileName());
ensureFilesWritable(project, new File[]{destFile, propertiesFile});
}
else {
final List<File> allFiles = new ArrayList<>();
final File projectBuildFileDestDir = VfsUtil.virtualToIoFile(project.getBaseDir());
allFiles.add(new File(projectBuildFileDestDir, genOptions.getBuildFileName()));
allFiles.add(new File(projectBuildFileDestDir, genOptions.getPropertiesFileName()));
final ModuleChunk[] chunks = genOptions.getModuleChunks();
for (final ModuleChunk chunk : chunks) {
final File chunkBaseDir = BuildProperties.getModuleChunkBaseDir(chunk);
allFiles.add(new File(chunkBaseDir, BuildProperties.getModuleChunkBuildFileName(chunk) + XML_EXTENSION));
}
ensureFilesWritable(project, allFiles.toArray(new File[0]));
}
new Task.Modal(project, CompilerBundle.message("generate.ant.build.title"), false) {
@Override
public void run(@NotNull final ProgressIndicator indicator) {
indicator.setIndeterminate(true);
indicator.setText(CompilerBundle.message("generate.ant.build.progress.message"));
try {
final File[] generated;
if (genOptions.generateSingleFile) {
generated = generateSingleFileBuild(project, genOptions, filesToRefresh);
}
else {
generated = generateMultipleFileBuild(project, genOptions, filesToRefresh);
}
if (generated != null) {
ContainerUtil.addAll(_generated, generated);
}
}
catch (IOException e) {
_ex[0] = e;
}
}
}.queue();
}
catch (IOException e) {
_ex[0] = e;
}
if (_ex[0] != null) {
Messages.showErrorDialog(project, CompilerBundle.message("error.ant.files.generate.failed", _ex[0].getMessage()),
CompilerBundle.message("generate.ant.build.title"));
}
else {
StringBuilder filesString = new StringBuilder();
for (int idx = 0; idx < _generated.size(); idx++) {
final File file = _generated.get(idx);
if (idx > 0) {
filesString.append(",\n");
}
filesString.append(file.getPath());
}
Messages.showInfoMessage(project, CompilerBundle.message("message.ant.files.generated.ok", filesString.toString()),
CompilerBundle.message("generate.ant.build.title"));
}
if (filesToRefresh.size() > 0) {
CompilerUtil.refreshIOFiles(filesToRefresh);
}
}
private boolean backup(final File file, final Project project, GenerationOptions genOptions, List<? super File> filesToRefresh) {
if (!genOptions.backupPreviouslyGeneratedFiles || !file.exists()) {
return true;
}
final String path = file.getPath();
final int extensionIndex = path.lastIndexOf(".");
final String extension = path.substring(extensionIndex);
//noinspection HardCodedStringLiteral
final String backupPath = path.substring(0, extensionIndex) +
"_" +
new Date(file.lastModified()).toString().replaceAll("\\s+", "_").replaceAll(":", "-") +
extension;
final File backupFile = new File(backupPath);
boolean ok;
try {
FileUtil.rename(file, backupFile);
ok = true;
}
catch (IOException e) {
Messages.showErrorDialog(project, CompilerBundle.message("error.ant.files.backup.failed", path),
CompilerBundle.message("generate.ant.build.title"));
ok = false;
}
filesToRefresh.add(backupFile);
return ok;
}
private File[] generateSingleFileBuild(Project project, GenerationOptions genOptions, List<? super File> filesToRefresh) throws IOException {
final File projectBuildFileDestDir = VfsUtil.virtualToIoFile(project.getBaseDir());
projectBuildFileDestDir.mkdirs();
final File destFile = new File(projectBuildFileDestDir, genOptions.getBuildFileName());
final File propertiesFile = new File(projectBuildFileDestDir, genOptions.getPropertiesFileName());
if (!backup(destFile, project, genOptions, filesToRefresh)) {
return null;
}
if (!backup(propertiesFile, project, genOptions, filesToRefresh)) {
return null;
}
generateSingleFileBuild(project, genOptions, destFile, propertiesFile);
filesToRefresh.add(destFile);
filesToRefresh.add(propertiesFile);
return new File[]{destFile, propertiesFile};
}
public static void generateSingleFileBuild(final Project project,
final GenerationOptions genOptions,
final File buildxmlFile,
final File propertiesFile) throws IOException {
FileUtil.createIfDoesntExist(buildxmlFile);
FileUtil.createIfDoesntExist(propertiesFile);
try (PrintWriter dataOutput = makeWriter(buildxmlFile)) {
new SingleFileProjectBuild(project, genOptions).generate(dataOutput);
}
try (PrintWriter propertiesOut = makeWriter(propertiesFile)) {
new PropertyFileGeneratorImpl(project, genOptions).generate(propertiesOut);
}
}
/**
* Create print writer over file with UTF-8 encoding
*
* @param buildxmlFile a file to write to
* @return a created print writer
* @throws UnsupportedEncodingException if endcoding not found
* @throws FileNotFoundException if file not found
*/
private static PrintWriter makeWriter(final File buildxmlFile) throws UnsupportedEncodingException, FileNotFoundException {
return new PrintWriter(new OutputStreamWriter(new FileOutputStream(buildxmlFile), CharsetToolkit.UTF8_CHARSET));
}
private void ensureFilesWritable(Project project, File[] files) throws IOException {
final List<VirtualFile> toCheck = new ArrayList<>(files.length);
final LocalFileSystem lfs = LocalFileSystem.getInstance();
for (File file : files) {
final VirtualFile vFile = lfs.findFileByIoFile(file);
if (vFile != null) {
toCheck.add(vFile);
}
}
final ReadonlyStatusHandler.OperationStatus status =
ReadonlyStatusHandler.getInstance(project).ensureFilesWritable(VfsUtil.toVirtualFileArray(toCheck));
if (status.hasReadonlyFiles()) {
throw new IOException(status.getReadonlyFilesMessage());
}
}
public File[] generateMultipleFileBuild(Project project, GenerationOptions genOptions, List<? super File> filesToRefresh) throws IOException {
final File projectBuildFileDestDir = VfsUtil.virtualToIoFile(project.getBaseDir());
projectBuildFileDestDir.mkdirs();
final List<File> generated = new ArrayList<>();
final File projectBuildFile = new File(projectBuildFileDestDir, genOptions.getBuildFileName());
final File propertiesFile = new File(projectBuildFileDestDir, genOptions.getPropertiesFileName());
final ModuleChunk[] chunks = genOptions.getModuleChunks();
final File[] chunkFiles = new File[chunks.length];
for (int idx = 0; idx < chunks.length; idx++) {
final ModuleChunk chunk = chunks[idx];
final File chunkBaseDir = BuildProperties.getModuleChunkBaseDir(chunk);
chunkFiles[idx] = new File(chunkBaseDir, BuildProperties.getModuleChunkBuildFileName(chunk) + XML_EXTENSION);
}
if (!backup(projectBuildFile, project, genOptions, filesToRefresh)) {
return null;
}
if (!backup(propertiesFile, project, genOptions, filesToRefresh)) {
return null;
}
FileUtil.createIfDoesntExist(projectBuildFile);
try (PrintWriter mainDataOutput = makeWriter(projectBuildFile)) {
final MultipleFileProjectBuild build = new MultipleFileProjectBuild(project, genOptions);
build.generate(mainDataOutput);
generated.add(projectBuildFile);
// the sequence in which modules are imported is important cause output path properties for dependent modules should be defined first
for (int idx = 0; idx < chunks.length; idx++) {
final ModuleChunk chunk = chunks[idx];
final File chunkBuildFile = chunkFiles[idx];
final File chunkBaseDir = chunkBuildFile.getParentFile();
if (chunkBaseDir != null) {
chunkBaseDir.mkdirs();
}
final boolean moduleBackupOk = backup(chunkBuildFile, project, genOptions, filesToRefresh);
if (!moduleBackupOk) {
return null;
}
FileUtil.createIfDoesntExist(chunkBuildFile);
try (PrintWriter out = makeWriter(chunkBuildFile)) {
new ModuleChunkAntProject(project, chunk, genOptions).generate(out);
generated.add(chunkBuildFile);
}
}
}
// properties
try (PrintWriter propertiesOut = makeWriter(propertiesFile)) {
new PropertyFileGeneratorImpl(project, genOptions).generate(propertiesOut);
generated.add(propertiesFile);
}
filesToRefresh.addAll(generated);
return generated.toArray(new File[0]);
}
}
| |
package com.vladmihalcea.hibernate.masterclass.laboratory.cascade;
import com.vladmihalcea.hibernate.masterclass.laboratory.util.AbstractTest;
import org.hibernate.annotations.Immutable;
import org.junit.Test;
import javax.persistence.*;
import java.util.Date;
/**
* OneToOneCascadeTest - Test to check @OneToOne Cascading
*
* @author Vlad Mihalcea
*/
public class OneToOneCascadeTest extends AbstractTest {
@Override
protected Class<?>[] entities() {
return new Class<?>[]{
Post.class,
PostDetails.class,
Commit.class,
BranchMerge.class
};
}
public Post newPost() {
return doInTransaction(session -> {
Post post = new Post();
post.setName("Hibernate Master Class");
PostDetails details = new PostDetails();
post.addDetails(details);
session.persist(post);
return post;
});
}
@Test
public void testCascadeTypeMerge() {
LOGGER.info("Test CascadeType.MERGE");
Post post = newPost();
post.setName("Hibernate Master Class Training Material");
post.getDetails().setVisible(true);
doInTransaction(session -> {
session.merge(post);
});
}
@Test
public void testOrphanRemoval() {
LOGGER.info("Test orphan removal");
newPost();
doInTransaction(session -> {
Post post = (Post) session.get(Post.class, 1L);
post.removeDetails();
});
}
@Test
public void testCascadeTypeDelete() {
LOGGER.info("Test CascadeType.DELETE");
Post post = newPost();
doInTransaction(session -> {
session.delete(post);
});
}
@Test
public void testCascadeForUnidirectionalAssociation() {
LOGGER.info("Test Cascade for unidirectional");
doInTransaction(session -> {
Commit commit = new Commit("Reintegrate feature branch FP-123");
commit.addBranchMerge(
"FP-123",
"develop"
);
session.persist(commit);
});
doInTransaction(session -> {
Commit commit = (Commit) session.get(Commit.class, 1L);
session.delete(commit);
});
}
@Entity(name = "Post")
public static class Post {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String name;
@OneToOne(mappedBy = "post",
cascade = CascadeType.ALL, orphanRemoval = true)
private PostDetails details;
public Long getId() {
return id;
}
public PostDetails getDetails() {
return details;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public void addDetails(PostDetails details) {
this.details = details;
details.setPost(this);
}
public void removeDetails() {
if (details != null) {
details.setPost(null);
}
this.details = null;
}
}
@Entity(name = "PostDetails")
public static class PostDetails {
@Id
private Long id;
@Column(name = "created_on")
@Temporal(TemporalType.TIMESTAMP)
private Date createdOn = new Date();
private boolean visible;
@OneToOne
@JoinColumn(name = "id")
@MapsId
private Post post;
public Long getId() {
return id;
}
public void setVisible(boolean visible) {
this.visible = visible;
}
public void setPost(Post post) {
this.post = post;
}
}
@Entity(name = "Commit")
public static class Commit {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String comment;
@OneToOne(cascade = CascadeType.ALL)
@JoinTable(name = "Branch_Merge_Commit",
joinColumns = @JoinColumn(name = "commit_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "branch_merge_id", referencedColumnName = "id")
)
private BranchMerge branchMerge;
public Commit() {
}
public Commit(String comment) {
this.comment = comment;
}
public Long getId() {
return id;
}
public void addBranchMerge(String fromBranch, String toBranch) {
this.branchMerge = new BranchMerge(fromBranch, toBranch);
}
public void removeBranchMerge() {
this.branchMerge = null;
}
}
@Entity(name = "BranchMerge")
@Immutable
public static class BranchMerge {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String fromBranch;
private String toBranch;
public BranchMerge() {
}
public BranchMerge(String fromBranch, String toBranch) {
this.fromBranch = fromBranch;
this.toBranch = toBranch;
}
public Long getId() {
return id;
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.fileEditor.impl.text;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.FileEditorStateLevel;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.ui.EditorNotifications;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.concurrency.Semaphore;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
public class AsyncEditorLoader {
private static final ExecutorService ourExecutor = AppExecutorUtil.createBoundedApplicationPoolExecutor("AsyncEditorLoader Pool", 2);
private static final Key<AsyncEditorLoader> ASYNC_LOADER = Key.create("ASYNC_LOADER");
private static final int SYNCHRONOUS_LOADING_WAITING_TIME_MS = 200;
private static final int DOCUMENT_COMMIT_WAITING_TIME_MS = 5_000;
@NotNull private final Editor myEditor;
@NotNull private final Project myProject;
@NotNull private final TextEditorImpl myTextEditor;
@NotNull private final TextEditorComponent myEditorComponent;
@NotNull private final TextEditorProvider myProvider;
private final List<Runnable> myDelayedActions = new ArrayList<>();
private TextEditorState myDelayedState;
private final AtomicBoolean myLoadingFinished = new AtomicBoolean();
AsyncEditorLoader(@NotNull TextEditorImpl textEditor, @NotNull TextEditorComponent component, @NotNull TextEditorProvider provider) {
myProvider = provider;
myTextEditor = textEditor;
myProject = textEditor.myProject;
myEditorComponent = component;
myEditor = textEditor.getEditor();
myEditor.putUserData(ASYNC_LOADER, this);
myEditorComponent.getContentPanel().setVisible(false);
}
void start() {
ApplicationManager.getApplication().assertIsDispatchThread();
Future<Runnable> asyncLoading = scheduleLoading();
boolean showProgress = true;
if (worthWaiting()) {
/*
* Possible alternatives:
* 1. show "Loading" from the beginning, then it'll be always noticeable at least in fade-out phase
* 2. show a gray screen for some time and then "Loading" if it's still loading; it'll produce quick background blinking for all editors
* 3. show non-highlighted and unfolded editor as "Loading" background and allow it to relayout at the end of loading phase
* 4. freeze EDT a bit and hope that for small editors it'll suffice and for big ones show "Loading" after that.
* This strategy seems to produce minimal blinking annoyance.
*/
Runnable continuation = resultInTimeOrNull(asyncLoading);
if (continuation != null) {
showProgress = false;
loadingFinished(continuation);
}
}
if (showProgress) {
myEditorComponent.startLoading();
}
}
private Future<Runnable> scheduleLoading() {
long commitDeadline = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(DOCUMENT_COMMIT_WAITING_TIME_MS);
// we can't return the result of "nonBlocking" call below because it's only finished on EDT later,
// but we need to get the result of bg calculation in the same EDT event, if it's quick
CompletableFuture<Runnable> future = new CompletableFuture<>();
ReadAction
.nonBlocking(() -> {
waitForCommit(commitDeadline);
Runnable runnable = myTextEditor.loadEditorInBackground();
future.complete(runnable);
return runnable;
})
.expireWith(myEditorComponent)
.expireWith(myProject)
.finishOnUiThread(ModalityState.any(), result -> loadingFinished(result))
.submit(ourExecutor);
return future;
}
private void waitForCommit(long commitDeadlineNs) {
Document document = myEditor.getDocument();
PsiDocumentManager pdm = PsiDocumentManager.getInstance(myProject);
if (!pdm.isCommitted(document) && System.nanoTime() < commitDeadlineNs) {
Semaphore semaphore = new Semaphore(1);
pdm.performForCommittedDocument(document, semaphore::up);
while (System.nanoTime() < commitDeadlineNs && !semaphore.waitFor(10)) {
ProgressManager.checkCanceled();
}
}
}
private boolean isDone() {
return myLoadingFinished.get();
}
private boolean worthWaiting() {
// cannot perform commitAndRunReadAction in parallel to EDT waiting
return !PsiDocumentManager.getInstance(myProject).hasUncommitedDocuments() &&
!ApplicationManager.getApplication().isWriteAccessAllowed();
}
private static <T> T resultInTimeOrNull(@NotNull Future<T> future) {
try {
return future.get(SYNCHRONOUS_LOADING_WAITING_TIME_MS, TimeUnit.MILLISECONDS);
}
catch (InterruptedException | TimeoutException ignored) {
}
catch (ExecutionException e) {
throw new RuntimeException(e);
}
return null;
}
private void loadingFinished(Runnable continuation) {
if (!myLoadingFinished.compareAndSet(false, true)) return;
myEditor.putUserData(ASYNC_LOADER, null);
if (myEditorComponent.isDisposed()) return;
if (continuation != null) {
continuation.run();
}
if (myEditorComponent.isLoading()) {
myEditorComponent.stopLoading();
}
myEditorComponent.getContentPanel().setVisible(true);
if (myDelayedState != null && PsiDocumentManager.getInstance(myProject).isCommitted(myEditor.getDocument())) {
TextEditorState state = new TextEditorState();
state.RELATIVE_CARET_POSITION = Integer.MAX_VALUE; // don't do any scrolling
state.setFoldingState(myDelayedState.getFoldingState());
myProvider.setStateImpl(myProject, myEditor, state, true);
myDelayedState = null;
}
for (Runnable runnable : myDelayedActions) {
myEditor.getScrollingModel().disableAnimation();
runnable.run();
}
myEditor.getScrollingModel().enableAnimation();
if (FileEditorManager.getInstance(myProject).getSelectedTextEditor() == myEditor) {
IdeFocusManager.getInstance(myProject).requestFocusInProject(myTextEditor.getPreferredFocusedComponent(), myProject);
}
EditorNotifications.getInstance(myProject).updateNotifications(myTextEditor.myFile);
}
public static void performWhenLoaded(@NotNull Editor editor, @NotNull Runnable runnable) {
ApplicationManager.getApplication().assertIsDispatchThread();
AsyncEditorLoader loader = editor.getUserData(ASYNC_LOADER);
if (loader == null) {
runnable.run();
}
else {
loader.myDelayedActions.add(runnable);
}
}
@NotNull
TextEditorState getEditorState(@NotNull FileEditorStateLevel level) {
ApplicationManager.getApplication().assertIsDispatchThread();
TextEditorState state = myProvider.getStateImpl(myProject, myEditor, level);
if (!isDone() && myDelayedState != null) {
state.setDelayedFoldState(myDelayedState::getFoldingState);
}
return state;
}
void setEditorState(@NotNull final TextEditorState state, boolean exactState) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (!isDone()) {
myDelayedState = state;
}
myProvider.setStateImpl(myProject, myEditor, state, exactState);
}
public static boolean isEditorLoaded(@NotNull Editor editor) {
return editor.getUserData(ASYNC_LOADER) == null;
}
}
| |
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pluto.container.bean.processor;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.enterprise.context.spi.Contextual;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.inject.spi.Bean;
import javax.portlet.PortletRequest;
import javax.portlet.StateAwareResponse;
import javax.portlet.annotations.PortletSerializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is a container for RenderStateScoped CDI beans.
*
* @author Scott Nicklous
*
*/
public class PortletStateScopedBeanHolder implements Serializable {
/** Logger. */
private static final Logger LOG = LoggerFactory.getLogger(PortletStateScopedBeanHolder.class);
private static final boolean isTrace = LOG.isTraceEnabled();
private static final long serialVersionUID = 6014843414216617217L;
// The ThreadLocal manages the holders so that there is one holder per thread.
private static final ThreadLocal<PortletStateScopedBeanHolder> holders =
new ThreadLocal<PortletStateScopedBeanHolder>();
private class BeanInstance<T> implements Serializable {
private static final long serialVersionUID = -4173708394115905180L;
CreationalContext<T> crco;
T instance;
}
// Each instance of the bean holder gets its own map.
// Key: Bean Type, Value: Structure containing CreationalContext and the specific
// bean instance.
private Map<Contextual<?>, BeanInstance<?>> beans =
new ConcurrentHashMap<Contextual<?>, BeanInstance<?>>();
private final PortletRequest request;
private final PortletStateScopedConfig config;
/**
* private constructor
*/
private PortletStateScopedBeanHolder(PortletRequest req, PortletStateScopedConfig config) {
this.request = req;
this.config = config;
}
/**
* Sets the portlet session bean holder in a ThreadLocal object for the given
* portlet session. If no bean holder exists in the session, a new one is created.
*
* @param req The portlet request
* @return The render state bean holder
*/
public static void setBeanHolder(PortletRequest req, PortletStateScopedConfig config) {
if (isTrace) {
StringBuilder txt = new StringBuilder(80);
txt.append("Setting render state bean holder.");
txt.append(" ThreadId=").append(Thread.currentThread().getId());
txt.append(", config: ").append((config == null) ? "null" : config.getConfigAsString());
LOG.trace(txt.toString());
}
PortletStateScopedBeanHolder holder = new PortletStateScopedBeanHolder(req, config);
holders.set(holder);
}
/**
* Removes the bean holder for the current request. Deletes all beans contained therein.
* If response is provided, the beans are deserialized and stored.
*
* @param resp The response for setting the bean values
*/
public static void removeBeanHolder(StateAwareResponse resp) {
PortletStateScopedBeanHolder bh = getBeanHolder();
if (bh != null) {
bh.removeAll(resp);
}
holders.remove();
if (isTrace) {
StringBuilder txt = new StringBuilder(80);
txt.append("Removed render state bean holder.");
txt.append(" ThreadId=").append(Thread.currentThread().getId());
LOG.trace(txt.toString());
}
}
/**
* Returns the portlet session bean holder that was set for the thread.
*
* @return
*/
public static PortletStateScopedBeanHolder getBeanHolder() {
return holders.get();
}
/**
* Removes the bean holder for the current thread and
* returns the removed instance to the caller.
*
* @return the removed bean holder
*/
public static PortletStateScopedBeanHolder deregister() {
PortletStateScopedBeanHolder holder = holders.get();
holders.remove();
return holder;
}
/**
* Registers the provided bean holder for the current thread.
*
* @param holder the bean holder to register
*/
public static void register(PortletStateScopedBeanHolder holder) {
holders.set(holder);
}
/**
* Returns existing instance of object, or null if no instance exists.
*
* @param bean The bean type
* @return The bean instance
*/
@SuppressWarnings("unchecked")
public <T> T getBean(Contextual<T> bean) {
BeanInstance<?> bi = beans.get(bean);
return (bi == null) ? null : (T) bi.instance;
}
/**
* Returns an instance for the contextual type. If no existing bean is available,
* a new instance is created.
*
* @param bean Contextual type (Bean) for which an instance is desired
* @return The instance, or null if none exists
*/
@SuppressWarnings("unchecked")
public <T> T getBean(Contextual<T> bean, CreationalContext<T> crco) {
BeanInstance<?> bi = beans.get(bean);
if (bi == null) {
// No bean available, so create one.
BeanInstance<T> newbi = new BeanInstance<T>();
newbi.crco = crco;
newbi.instance = bean.create(crco);
assert newbi.instance instanceof PortletSerializable;
bi = newbi;
// Determine the parameter name.
// initialize the bean with the proper values.
assert bean instanceof Bean<?>;
String parmName = config.getParamName((Bean<?>) bean);
String[] vals = request.getRenderParameters().getValues(parmName);
if (vals == null) {
vals = new String[] {};
}
PortletSerializable thisBean = (PortletSerializable) newbi.instance;
thisBean.deserialize(vals);
beans.put(bean, newbi);
if (isTrace) {
StringBuilder txt = new StringBuilder(80);
txt.append("Created bean: ");
txt.append(((Bean<?>) bean).getBeanClass().getSimpleName());
txt.append(", Render parameter name: ").append(parmName);
txt.append(", Values: ").append(Arrays.toString(vals));
LOG.trace(txt.toString());
}
}
return (T) bi.instance;
}
public String getParameterName(Class<?> beanClass) {
return config.getParamName(beanClass);
}
/**
* Removes & destroys the given bean
* @param bean
*/
@SuppressWarnings("unchecked")
protected <T> void remove(Contextual<T> bean) {
BeanInstance<?> bi = beans.get(bean);
if (isTrace) {
StringBuilder txt = new StringBuilder(80);
txt.append("Removing render state scoped bean: ");
if (bean instanceof Bean<?>) {
Bean<?> b = (Bean<?>) bean;
txt.append(b.getBeanClass().getSimpleName());
}
if (bi == null) {
txt.append(", instance is null.");
}
LOG.trace(txt.toString());
}
if (bi != null) {
beans.remove(bean);
bi.crco.release();
bean.destroy((T)bi.instance, (CreationalContext<T>)bi.crco);
}
}
/**
* Remove & destroy all beans. if a response is provided, store the bean state.
*
* @param resp The state aware response
*/
protected void removeAll(StateAwareResponse resp) {
for (Contextual<?> bean : beans.keySet()) {
if (resp != null) {
PortletSerializable thisBean = (PortletSerializable) beans.get(bean).instance;
String[] vals = thisBean.serialize();
String pn = config.getParamName((Bean<?>) bean);
resp.getRenderParameters().setValues(pn, vals);
if (isTrace) {
StringBuilder txt = new StringBuilder(128);
txt.append("Stored parameter for portlet with namespace: ");
txt.append(resp.getNamespace());
txt.append(", paramName: ").append(pn);
txt.append(", Values: ").append(Arrays.toString(vals));
LOG.trace(txt.toString());
}
}
remove(bean);
}
}
}
| |
/**
* Copyright 2014 Cisco Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cisco.oss.foundation.directory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import com.cisco.oss.foundation.directory.entity.OperationalStatus;
import com.cisco.oss.foundation.directory.entity.ProvidedServiceInstance;
import com.cisco.oss.foundation.directory.entity.ServiceInstance;
import com.cisco.oss.foundation.directory.exception.ServiceException;
import com.cisco.oss.foundation.directory.query.ServiceInstanceQuery;
import com.cisco.oss.foundation.directory.test.TestServiceDirectoryManagerFactory;
public class InMemTestCase {
@Before
public void build() throws ServiceException {
// For the Application do integration test, it just need to see the ServiceDirectoryManagerFactory configuration.
// ServiceDirectory.getServiceDirectoryConfig().setProperty(ConfigurationConstants.SD_API_SERVICE_DIRECTORY_MANAGER_FACTORY_PROVIDER_PROPERTY,
// "com.cisco.oss.foundation.directory.test.TestServiceDirectoryManagerFactory");
// Set the ServiceDirectoryManagerFactory in runtime.
ServiceDirectoryManagerFactory manager = new TestServiceDirectoryManagerFactory();
ServiceDirectory.reinitServiceDirectoryManagerFactory(manager);
}
@Test
public void testRegisterService() throws ServiceException{
String serviceName = "InMemTest-01";
ProvidedServiceInstance ist = createInstance(serviceName);
ist.setStatus(OperationalStatus.UP);
RegistrationManager register = ServiceDirectory.getRegistrationManager();
LookupManager lookup = ServiceDirectory.getLookupManager();
register.registerService(ist);
ServiceInstance instance = lookup.lookupInstance(serviceName);
Assert.assertEquals(ist.getProviderId(), instance.getInstanceId() );
register.unregisterService(serviceName, ist.getProviderId());
ServiceInstance instance1 = lookup.lookupInstance(serviceName);
Assert.assertNull(instance1);
}
@Test
public void testRegisterWithStatus() throws ServiceException{
String serviceName = "InMemTest-01";
ProvidedServiceInstance ist = createInstance(serviceName);
RegistrationManager register = ServiceDirectory.getRegistrationManager();
LookupManager lookup = ServiceDirectory.getLookupManager();
ist.setStatus(OperationalStatus.UP);
register.registerService(ist);
ServiceInstance instance = lookup.lookupInstance(serviceName);
Assert.assertEquals(instance.getInstanceId(), ist.getProviderId());
register.unregisterService(serviceName, ist.getProviderId());
ServiceInstance instance1 = lookup.lookupInstance(serviceName);
Assert.assertNull(instance1);
}
@Test
public void testUpdateServiceInstance() throws ServiceException{
String serviceName = "MockSvc001";
ProvidedServiceInstance instance = createInstance(serviceName);
RegistrationManager register = ServiceDirectory.getRegistrationManager();
instance.setStatus(OperationalStatus.UP);
register.registerService(instance);
LookupManager lookup = ServiceDirectory.getLookupManager();
ServiceInstance sInstance= lookup.lookupInstance(serviceName);
Assert.assertEquals(sInstance.getInstanceId(), instance.getProviderId());
// Update the URI and metadata in the ServiceInstance.
String newUri = "http://new.cisco.test:8081/test";
instance.setUri(newUri);
instance.getMetadata().put("keynew", "valuenew");
register.updateService(instance);
sInstance= lookup.lookupInstance(serviceName);
Assert.assertEquals(sInstance.getInstanceId(), instance.getProviderId());
Assert.assertEquals(newUri, sInstance.getUri());
Assert.assertEquals(sInstance.getMetadata().get("keynew"), "valuenew");
// Update the OperationalStatus in the ServiceInstance.
register.updateServiceOperationalStatus(serviceName, instance.getProviderId(), OperationalStatus.DOWN);
sInstance= lookup.lookupInstance(serviceName);
// The ServiceInstance switched to DOWN, can not be found in LookupMananger.
Assert.assertNull(sInstance);
// cleanup.
register.unregisterService(serviceName, instance.getProviderId());
ServiceInstance sInstance1= lookup.lookupInstance(serviceName);
Assert.assertNull(sInstance1);
}
@Test
public void testQueryServiceInstances() throws ServiceException{
String serviceName = "InMemTest-01";
ProvidedServiceInstance ist = createInstance(serviceName);
RegistrationManager register = ServiceDirectory.getRegistrationManager();
LookupManager lookup = ServiceDirectory.getLookupManager();
ist.getMetadata().put("key", "instance1");
ist.setPort(8091);
ist.setStatus(OperationalStatus.UP);
register.registerService(ist);
ist.getMetadata().put("key", "instance2");
ist.setPort(8092);
register.registerService(ist);
ist.getMetadata().put("key", "instance3");
ist.setPort(8093);
register.registerService(ist);
ist.getMetadata().put("key", "instance4");
ist.setPort(8094);
register.registerService(ist);
ServiceInstanceQuery query = new ServiceInstanceQuery();
query.addQueryCriterion(new ServiceInstanceQuery.EqualQueryCriterion("key", "instance1"));
ServiceInstance inst = lookup.queryInstanceByName(serviceName, query);
Assert.assertEquals("127.0.0.1-8091", inst.getInstanceId());
query = new ServiceInstanceQuery();
query.addQueryCriterion(new ServiceInstanceQuery.EqualQueryCriterion("key", "instance2"));
inst = lookup.queryInstanceByName(serviceName, query);
Assert.assertEquals("127.0.0.1-8092", inst.getInstanceId());
query = new ServiceInstanceQuery();
query.addQueryCriterion(new ServiceInstanceQuery.ContainQueryCriterion("key"));
List<ServiceInstance> insts = lookup.queryInstancesByName(serviceName, query);
Assert.assertEquals(4, insts.size());
register.unregisterService(serviceName, "127.0.0.1-8091");
register.unregisterService(serviceName, "127.0.0.1-8092");
register.unregisterService(serviceName, "127.0.0.1-8093");
register.unregisterService(serviceName, "127.0.0.1-8094");
ServiceInstance instance1 = lookup.lookupInstance(serviceName);
Assert.assertNull(instance1);
}
@Test
public void testQueryServiceByMetadataKey() throws ServiceException{
String serviceName1 = "InMemTest-01";
String serviceName2 = "InMemTest-02";
RegistrationManager register = ServiceDirectory.getRegistrationManager();
LookupManager lookup = ServiceDirectory.getLookupManager();
ProvidedServiceInstance ist1 = createInstance(serviceName1);
ist1.getMetadata().put("datacenter", "dc01");
ist1.setPort(8091);
ist1.setStatus(OperationalStatus.UP);
register.registerService(ist1);
ProvidedServiceInstance ist2 = createInstance(serviceName1);
ist2.getMetadata().put("datacenter", "dc02");
ist2.setPort(8092);
ist2.setStatus(OperationalStatus.UP);
register.registerService(ist2);
ProvidedServiceInstance ist3 = createInstance(serviceName2);
ist3.getMetadata().put("datacenter", "dc01");
ist3.setPort(8093);
ist3.setStatus(OperationalStatus.UP);
register.registerService(ist3);
ProvidedServiceInstance ist4 = createInstance(serviceName2);
ist4.getMetadata().put("datacenter", "dc02");
ist4.setPort(8094);
ist4.setStatus(OperationalStatus.UP);
register.registerService(ist4);
ServiceInstanceQuery query = new ServiceInstanceQuery().getEqualQueryCriterion("meta1", "value1");
List<ServiceInstance> list1 = lookup.queryInstancesByMetadataKey(query);
Assert.assertEquals(list1.size(), 4);
query.getEqualQueryCriterion("datacenter", "dc01");
List<ServiceInstance> list2 = lookup.queryInstancesByMetadataKey(query);
Assert.assertEquals(list2.size(), 2);
Map<String, Integer> countMap = new HashMap<String, Integer>();
countMap.put(ist2.getProviderId(), 0);
countMap.put(ist4.getProviderId(), 0);
ServiceInstanceQuery query1 = new ServiceInstanceQuery().getEqualQueryCriterion("datacenter", "dc02");
ServiceInstance queryInstance = lookup.queryInstanceByMetadataKey(query1);
Integer count = countMap.get(queryInstance.getInstanceId());
countMap.put(queryInstance.getInstanceId(), count + 1);
queryInstance = lookup.queryInstanceByMetadataKey(query1);
count = countMap.get(queryInstance.getInstanceId());
countMap.put(queryInstance.getInstanceId(), count + 1);
queryInstance = lookup.queryInstanceByMetadataKey(query1);
count = countMap.get(queryInstance.getInstanceId());
countMap.put(queryInstance.getInstanceId(), count + 1);
queryInstance = lookup.queryInstanceByMetadataKey(query1);
count = countMap.get(queryInstance.getInstanceId());
countMap.put(queryInstance.getInstanceId(), count + 1);
Assert.assertEquals(countMap.size(), 2);
Assert.assertTrue(countMap.get(ist2.getProviderId()) == 2);
Assert.assertTrue(countMap.get(ist4.getProviderId()) == 2);
Assert.assertEquals(4, lookup.getAllInstances().size());
register.unregisterService(ist1.getServiceName(), ist1.getProviderId());
register.unregisterService(ist2.getServiceName(), ist2.getProviderId());
register.unregisterService(ist3.getServiceName(), ist3.getProviderId());
register.unregisterService(ist4.getServiceName(), ist4.getProviderId());
ServiceInstance instance1 = lookup.lookupInstance(serviceName1);
Assert.assertNull(instance1);
}
@Test
public void testGetInstances() throws ServiceException{
String serviceName = "MockSvc001";
String serviceName1 = "OtherSvc001";
String key1 = "datacenter";
String key2 = "version";
ProvidedServiceInstance instance1 = createInstance(serviceName);
instance1.setPort(8091);
instance1.setStatus(OperationalStatus.DOWN);
instance1.getMetadata().put(key1, "dc01");
instance1.getMetadata().put(key2, "1.1.2");
ProvidedServiceInstance instance2 = createInstance(serviceName);
instance2.setPort(8092);
instance2.setStatus(OperationalStatus.UP);
instance2.getMetadata().put(key1, "dc02");
ProvidedServiceInstance instance3 = createInstance(serviceName1);
instance3.setPort(8093);
instance3.setStatus(OperationalStatus.DOWN);
instance3.getMetadata().put(key1, "dc01");
ProvidedServiceInstance instance4 = createInstance(serviceName1);
instance4.setPort(8094);
instance4.setStatus(OperationalStatus.UP);
instance4.getMetadata().put(key2, "1.1.1");
RegistrationManager register = ServiceDirectory.getRegistrationManager();
register.registerService(instance1);
register.registerService(instance2);
register.registerService(instance3);
register.registerService(instance4);
try {
// Sleep 2 seconds wait for cache sync.....
TimeUnit.SECONDS.sleep(2);
} catch (InterruptedException e) {
e.printStackTrace();
}
LookupManager lookup = ServiceDirectory.getLookupManager();
List<ServiceInstance> instances = lookup.getAllInstances(serviceName);
Assert.assertEquals(instances.size(), 2);
ServiceInstance getedInstance = lookup.getInstance(serviceName, instance2.getProviderId());
Assert.assertEquals(getedInstance.getUri(), instance2.getUri());
Assert.assertEquals(getedInstance.getStatus(), instance2.getStatus());
ServiceInstanceQuery query = new ServiceInstanceQuery().getEqualQueryCriterion("datacenter", "dc01");
instances = lookup.getAllInstances(serviceName, query);
Assert.assertEquals(instances.size(), 1);
instances = lookup.getAllInstancesByMetadataKey(query);
Assert.assertEquals(instances.size(), 2);
// cleanup.
register.unregisterService(serviceName, instance1.getProviderId());
register.unregisterService(serviceName, instance2.getProviderId());
register.unregisterService(serviceName1, instance3.getProviderId());
register.unregisterService(serviceName1, instance4.getProviderId());
try {
// Sleep 2 seconds wait for cache sync.....
TimeUnit.SECONDS.sleep(2);
} catch (InterruptedException e) {
e.printStackTrace();
}
ServiceInstance sInstance1 = lookup.lookupInstance(serviceName);
Assert.assertNull(sInstance1);
}
private ProvidedServiceInstance createInstance(String serviceName) {
String address = "127.0.0.1";
int port = 8990;
ProvidedServiceInstance si = new ProvidedServiceInstance(serviceName, address, port);
si.setUri("http://www.sina.com.cn");
Map<String, String> pair = new HashMap<String, String>();
pair.put("meta1", "value1");
pair.put("meta2", "value2");
si.setMetadata(pair);
si.setStatus(OperationalStatus.UP);
return si;
}
}
| |
/*
* Javassist, a Java-bytecode translator toolkit.
* Copyright (C) 1999- Shigeru Chiba. All Rights Reserved.
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. Alternatively, the contents of this file may be used under
* the terms of the GNU Lesser General Public License Version 2.1 or later,
* or the Apache License Version 2.0.
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*/
package javassist.convert;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.NotFoundException;
import javassist.CodeConverter.ArrayAccessReplacementMethodNames;
import javassist.bytecode.BadBytecode;
import javassist.bytecode.CodeIterator;
import javassist.bytecode.ConstPool;
import javassist.bytecode.Descriptor;
import javassist.bytecode.MethodInfo;
import javassist.bytecode.analysis.Analyzer;
import javassist.bytecode.analysis.Frame;
/**
* A transformer which replaces array access with static method invocations.
*
* @author <a href="kabir.khan@jboss.com">Kabir Khan</a>
* @author Jason T. Greene
* @version $Revision: 1.8 $
*/
public final class TransformAccessArrayField extends Transformer {
private final String methodClassname;
private final ArrayAccessReplacementMethodNames names;
private Frame[] frames;
private int offset;
public TransformAccessArrayField(Transformer next, String methodClassname,
ArrayAccessReplacementMethodNames names) throws NotFoundException {
super(next);
this.methodClassname = methodClassname;
this.names = names;
}
public void initialize(ConstPool cp, CtClass clazz, MethodInfo minfo) throws CannotCompileException {
/*
* This transformer must be isolated from other transformers, since some
* of them affect the local variable and stack maximums without updating
* the code attribute to reflect the changes. This screws up the
* data-flow analyzer, since it relies on consistent code state. Even
* if the attribute values were updated correctly, we would have to
* detect it, and redo analysis, which is not cheap. Instead, we are
* better off doing all changes in initialize() before everyone else has
* a chance to muck things up.
*/
CodeIterator iterator = minfo.getCodeAttribute().iterator();
while (iterator.hasNext()) {
try {
int pos = iterator.next();
int c = iterator.byteAt(pos);
if (c == AALOAD)
initFrames(clazz, minfo);
if (c == AALOAD || c == BALOAD || c == CALOAD || c == DALOAD
|| c == FALOAD || c == IALOAD || c == LALOAD
|| c == SALOAD) {
pos = replace(cp, iterator, pos, c, getLoadReplacementSignature(c));
} else if (c == AASTORE || c == BASTORE || c == CASTORE
|| c == DASTORE || c == FASTORE || c == IASTORE
|| c == LASTORE || c == SASTORE) {
pos = replace(cp, iterator, pos, c, getStoreReplacementSignature(c));
}
} catch (Exception e) {
throw new CannotCompileException(e);
}
}
}
public void clean() {
frames = null;
offset = -1;
}
public int transform(CtClass tclazz, int pos, CodeIterator iterator,
ConstPool cp) throws BadBytecode {
// Do nothing, see above comment
return pos;
}
private Frame getFrame(int pos) throws BadBytecode {
return frames[pos - offset]; // Adjust pos
}
private void initFrames(CtClass clazz, MethodInfo minfo) throws BadBytecode {
if (frames == null) {
frames = ((new Analyzer())).analyze(clazz, minfo);
offset = 0; // start tracking changes
}
}
private int updatePos(int pos, int increment) {
if (offset > -1)
offset += increment;
return pos + increment;
}
private String getTopType(int pos) throws BadBytecode {
Frame frame = getFrame(pos);
if (frame == null)
return null;
CtClass clazz = frame.peek().getCtClass();
return clazz != null ? Descriptor.toJvmName(clazz) : null;
}
private int replace(ConstPool cp, CodeIterator iterator, int pos,
int opcode, String signature) throws BadBytecode {
String castType = null;
String methodName = getMethodName(opcode);
if (methodName != null) {
// See if the object must be cast
if (opcode == AALOAD) {
castType = getTopType(iterator.lookAhead());
// Do not replace an AALOAD instruction that we do not have a type for
// This happens when the state is guaranteed to be null (Type.UNINIT)
// So we don't really care about this case.
if (castType == null)
return pos;
if ("java/lang/Object".equals(castType))
castType = null;
}
// The gap may include extra padding
// Write a nop in case the padding pushes the instruction forward
iterator.writeByte(NOP, pos);
CodeIterator.Gap gap
= iterator.insertGapAt(pos, castType != null ? 5 : 2, false);
pos = gap.position;
int mi = cp.addClassInfo(methodClassname);
int methodref = cp.addMethodrefInfo(mi, methodName, signature);
iterator.writeByte(INVOKESTATIC, pos);
iterator.write16bit(methodref, pos + 1);
if (castType != null) {
int index = cp.addClassInfo(castType);
iterator.writeByte(CHECKCAST, pos + 3);
iterator.write16bit(index, pos + 4);
}
pos = updatePos(pos, gap.length);
}
return pos;
}
private String getMethodName(int opcode) {
String methodName = null;
switch (opcode) {
case AALOAD:
methodName = names.objectRead();
break;
case BALOAD:
methodName = names.byteOrBooleanRead();
break;
case CALOAD:
methodName = names.charRead();
break;
case DALOAD:
methodName = names.doubleRead();
break;
case FALOAD:
methodName = names.floatRead();
break;
case IALOAD:
methodName = names.intRead();
break;
case SALOAD:
methodName = names.shortRead();
break;
case LALOAD:
methodName = names.longRead();
break;
case AASTORE:
methodName = names.objectWrite();
break;
case BASTORE:
methodName = names.byteOrBooleanWrite();
break;
case CASTORE:
methodName = names.charWrite();
break;
case DASTORE:
methodName = names.doubleWrite();
break;
case FASTORE:
methodName = names.floatWrite();
break;
case IASTORE:
methodName = names.intWrite();
break;
case SASTORE:
methodName = names.shortWrite();
break;
case LASTORE:
methodName = names.longWrite();
break;
}
if (methodName.equals(""))
methodName = null;
return methodName;
}
private String getLoadReplacementSignature(int opcode) throws BadBytecode {
switch (opcode) {
case AALOAD:
return "(Ljava/lang/Object;I)Ljava/lang/Object;";
case BALOAD:
return "(Ljava/lang/Object;I)B";
case CALOAD:
return "(Ljava/lang/Object;I)C";
case DALOAD:
return "(Ljava/lang/Object;I)D";
case FALOAD:
return "(Ljava/lang/Object;I)F";
case IALOAD:
return "(Ljava/lang/Object;I)I";
case SALOAD:
return "(Ljava/lang/Object;I)S";
case LALOAD:
return "(Ljava/lang/Object;I)J";
}
throw new BadBytecode(opcode);
}
private String getStoreReplacementSignature(int opcode) throws BadBytecode {
switch (opcode) {
case AASTORE:
return "(Ljava/lang/Object;ILjava/lang/Object;)V";
case BASTORE:
return "(Ljava/lang/Object;IB)V";
case CASTORE:
return "(Ljava/lang/Object;IC)V";
case DASTORE:
return "(Ljava/lang/Object;ID)V";
case FASTORE:
return "(Ljava/lang/Object;IF)V";
case IASTORE:
return "(Ljava/lang/Object;II)V";
case SASTORE:
return "(Ljava/lang/Object;IS)V";
case LASTORE:
return "(Ljava/lang/Object;IJ)V";
}
throw new BadBytecode(opcode);
}
}
| |
package com.google.devtools.clouddebugger.v2;
import static io.grpc.stub.ClientCalls.asyncUnaryCall;
import static io.grpc.stub.ClientCalls.asyncServerStreamingCall;
import static io.grpc.stub.ClientCalls.asyncClientStreamingCall;
import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ClientCalls.blockingUnaryCall;
import static io.grpc.stub.ClientCalls.blockingServerStreamingCall;
import static io.grpc.stub.ClientCalls.futureUnaryCall;
import static io.grpc.MethodDescriptor.generateFullMethodName;
import static io.grpc.stub.ServerCalls.asyncUnaryCall;
import static io.grpc.stub.ServerCalls.asyncServerStreamingCall;
import static io.grpc.stub.ServerCalls.asyncClientStreamingCall;
import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall;
import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall;
/**
* <pre>
* The Debugger service provides the API that allows users to collect run-time
* information from a running application, without stopping or slowing it down
* and without modifying its state. An application may include one or
* more replicated processes performing the same work.
* The application is represented using the Debuggee concept. The Debugger
* service provides a way to query for available Debuggees, but does not
* provide a way to create one. A debuggee is created using the Controller
* service, usually by running a debugger agent with the application.
* The Debugger service enables the client to set one or more Breakpoints on a
* Debuggee and collect the results of the set Breakpoints.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler (version 1.0.3)",
comments = "Source: google/devtools/clouddebugger/v2/debugger.proto")
public class Debugger2Grpc {
private Debugger2Grpc() {}
public static final String SERVICE_NAME = "google.devtools.clouddebugger.v2.Debugger2";
// Static method descriptors that strictly reflect the proto.
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901")
public static final io.grpc.MethodDescriptor<com.google.devtools.clouddebugger.v2.SetBreakpointRequest,
com.google.devtools.clouddebugger.v2.SetBreakpointResponse> METHOD_SET_BREAKPOINT =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.UNARY,
generateFullMethodName(
"google.devtools.clouddebugger.v2.Debugger2", "SetBreakpoint"),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.SetBreakpointRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.SetBreakpointResponse.getDefaultInstance()));
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901")
public static final io.grpc.MethodDescriptor<com.google.devtools.clouddebugger.v2.GetBreakpointRequest,
com.google.devtools.clouddebugger.v2.GetBreakpointResponse> METHOD_GET_BREAKPOINT =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.UNARY,
generateFullMethodName(
"google.devtools.clouddebugger.v2.Debugger2", "GetBreakpoint"),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.GetBreakpointRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.GetBreakpointResponse.getDefaultInstance()));
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901")
public static final io.grpc.MethodDescriptor<com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest,
com.google.protobuf.Empty> METHOD_DELETE_BREAKPOINT =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.UNARY,
generateFullMethodName(
"google.devtools.clouddebugger.v2.Debugger2", "DeleteBreakpoint"),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.Empty.getDefaultInstance()));
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901")
public static final io.grpc.MethodDescriptor<com.google.devtools.clouddebugger.v2.ListBreakpointsRequest,
com.google.devtools.clouddebugger.v2.ListBreakpointsResponse> METHOD_LIST_BREAKPOINTS =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.UNARY,
generateFullMethodName(
"google.devtools.clouddebugger.v2.Debugger2", "ListBreakpoints"),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.ListBreakpointsRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.ListBreakpointsResponse.getDefaultInstance()));
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901")
public static final io.grpc.MethodDescriptor<com.google.devtools.clouddebugger.v2.ListDebuggeesRequest,
com.google.devtools.clouddebugger.v2.ListDebuggeesResponse> METHOD_LIST_DEBUGGEES =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.UNARY,
generateFullMethodName(
"google.devtools.clouddebugger.v2.Debugger2", "ListDebuggees"),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.ListDebuggeesRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.devtools.clouddebugger.v2.ListDebuggeesResponse.getDefaultInstance()));
/**
* Creates a new async stub that supports all call types for the service
*/
public static Debugger2Stub newStub(io.grpc.Channel channel) {
return new Debugger2Stub(channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static Debugger2BlockingStub newBlockingStub(
io.grpc.Channel channel) {
return new Debugger2BlockingStub(channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary and streaming output calls on the service
*/
public static Debugger2FutureStub newFutureStub(
io.grpc.Channel channel) {
return new Debugger2FutureStub(channel);
}
/**
* <pre>
* The Debugger service provides the API that allows users to collect run-time
* information from a running application, without stopping or slowing it down
* and without modifying its state. An application may include one or
* more replicated processes performing the same work.
* The application is represented using the Debuggee concept. The Debugger
* service provides a way to query for available Debuggees, but does not
* provide a way to create one. A debuggee is created using the Controller
* service, usually by running a debugger agent with the application.
* The Debugger service enables the client to set one or more Breakpoints on a
* Debuggee and collect the results of the set Breakpoints.
* </pre>
*/
public static abstract class Debugger2ImplBase implements io.grpc.BindableService {
/**
* <pre>
* Sets the breakpoint to the debuggee.
* </pre>
*/
public void setBreakpoint(com.google.devtools.clouddebugger.v2.SetBreakpointRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.SetBreakpointResponse> responseObserver) {
asyncUnimplementedUnaryCall(METHOD_SET_BREAKPOINT, responseObserver);
}
/**
* <pre>
* Gets breakpoint information.
* </pre>
*/
public void getBreakpoint(com.google.devtools.clouddebugger.v2.GetBreakpointRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.GetBreakpointResponse> responseObserver) {
asyncUnimplementedUnaryCall(METHOD_GET_BREAKPOINT, responseObserver);
}
/**
* <pre>
* Deletes the breakpoint from the debuggee.
* </pre>
*/
public void deleteBreakpoint(com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
asyncUnimplementedUnaryCall(METHOD_DELETE_BREAKPOINT, responseObserver);
}
/**
* <pre>
* Lists all breakpoints for the debuggee.
* </pre>
*/
public void listBreakpoints(com.google.devtools.clouddebugger.v2.ListBreakpointsRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.ListBreakpointsResponse> responseObserver) {
asyncUnimplementedUnaryCall(METHOD_LIST_BREAKPOINTS, responseObserver);
}
/**
* <pre>
* Lists all the debuggees that the user can set breakpoints to.
* </pre>
*/
public void listDebuggees(com.google.devtools.clouddebugger.v2.ListDebuggeesRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.ListDebuggeesResponse> responseObserver) {
asyncUnimplementedUnaryCall(METHOD_LIST_DEBUGGEES, responseObserver);
}
@java.lang.Override public io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
METHOD_SET_BREAKPOINT,
asyncUnaryCall(
new MethodHandlers<
com.google.devtools.clouddebugger.v2.SetBreakpointRequest,
com.google.devtools.clouddebugger.v2.SetBreakpointResponse>(
this, METHODID_SET_BREAKPOINT)))
.addMethod(
METHOD_GET_BREAKPOINT,
asyncUnaryCall(
new MethodHandlers<
com.google.devtools.clouddebugger.v2.GetBreakpointRequest,
com.google.devtools.clouddebugger.v2.GetBreakpointResponse>(
this, METHODID_GET_BREAKPOINT)))
.addMethod(
METHOD_DELETE_BREAKPOINT,
asyncUnaryCall(
new MethodHandlers<
com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest,
com.google.protobuf.Empty>(
this, METHODID_DELETE_BREAKPOINT)))
.addMethod(
METHOD_LIST_BREAKPOINTS,
asyncUnaryCall(
new MethodHandlers<
com.google.devtools.clouddebugger.v2.ListBreakpointsRequest,
com.google.devtools.clouddebugger.v2.ListBreakpointsResponse>(
this, METHODID_LIST_BREAKPOINTS)))
.addMethod(
METHOD_LIST_DEBUGGEES,
asyncUnaryCall(
new MethodHandlers<
com.google.devtools.clouddebugger.v2.ListDebuggeesRequest,
com.google.devtools.clouddebugger.v2.ListDebuggeesResponse>(
this, METHODID_LIST_DEBUGGEES)))
.build();
}
}
/**
* <pre>
* The Debugger service provides the API that allows users to collect run-time
* information from a running application, without stopping or slowing it down
* and without modifying its state. An application may include one or
* more replicated processes performing the same work.
* The application is represented using the Debuggee concept. The Debugger
* service provides a way to query for available Debuggees, but does not
* provide a way to create one. A debuggee is created using the Controller
* service, usually by running a debugger agent with the application.
* The Debugger service enables the client to set one or more Breakpoints on a
* Debuggee and collect the results of the set Breakpoints.
* </pre>
*/
public static final class Debugger2Stub extends io.grpc.stub.AbstractStub<Debugger2Stub> {
private Debugger2Stub(io.grpc.Channel channel) {
super(channel);
}
private Debugger2Stub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected Debugger2Stub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new Debugger2Stub(channel, callOptions);
}
/**
* <pre>
* Sets the breakpoint to the debuggee.
* </pre>
*/
public void setBreakpoint(com.google.devtools.clouddebugger.v2.SetBreakpointRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.SetBreakpointResponse> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_SET_BREAKPOINT, getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Gets breakpoint information.
* </pre>
*/
public void getBreakpoint(com.google.devtools.clouddebugger.v2.GetBreakpointRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.GetBreakpointResponse> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_GET_BREAKPOINT, getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Deletes the breakpoint from the debuggee.
* </pre>
*/
public void deleteBreakpoint(com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_DELETE_BREAKPOINT, getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Lists all breakpoints for the debuggee.
* </pre>
*/
public void listBreakpoints(com.google.devtools.clouddebugger.v2.ListBreakpointsRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.ListBreakpointsResponse> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_LIST_BREAKPOINTS, getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Lists all the debuggees that the user can set breakpoints to.
* </pre>
*/
public void listDebuggees(com.google.devtools.clouddebugger.v2.ListDebuggeesRequest request,
io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.ListDebuggeesResponse> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_LIST_DEBUGGEES, getCallOptions()), request, responseObserver);
}
}
/**
* <pre>
* The Debugger service provides the API that allows users to collect run-time
* information from a running application, without stopping or slowing it down
* and without modifying its state. An application may include one or
* more replicated processes performing the same work.
* The application is represented using the Debuggee concept. The Debugger
* service provides a way to query for available Debuggees, but does not
* provide a way to create one. A debuggee is created using the Controller
* service, usually by running a debugger agent with the application.
* The Debugger service enables the client to set one or more Breakpoints on a
* Debuggee and collect the results of the set Breakpoints.
* </pre>
*/
public static final class Debugger2BlockingStub extends io.grpc.stub.AbstractStub<Debugger2BlockingStub> {
private Debugger2BlockingStub(io.grpc.Channel channel) {
super(channel);
}
private Debugger2BlockingStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected Debugger2BlockingStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new Debugger2BlockingStub(channel, callOptions);
}
/**
* <pre>
* Sets the breakpoint to the debuggee.
* </pre>
*/
public com.google.devtools.clouddebugger.v2.SetBreakpointResponse setBreakpoint(com.google.devtools.clouddebugger.v2.SetBreakpointRequest request) {
return blockingUnaryCall(
getChannel(), METHOD_SET_BREAKPOINT, getCallOptions(), request);
}
/**
* <pre>
* Gets breakpoint information.
* </pre>
*/
public com.google.devtools.clouddebugger.v2.GetBreakpointResponse getBreakpoint(com.google.devtools.clouddebugger.v2.GetBreakpointRequest request) {
return blockingUnaryCall(
getChannel(), METHOD_GET_BREAKPOINT, getCallOptions(), request);
}
/**
* <pre>
* Deletes the breakpoint from the debuggee.
* </pre>
*/
public com.google.protobuf.Empty deleteBreakpoint(com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest request) {
return blockingUnaryCall(
getChannel(), METHOD_DELETE_BREAKPOINT, getCallOptions(), request);
}
/**
* <pre>
* Lists all breakpoints for the debuggee.
* </pre>
*/
public com.google.devtools.clouddebugger.v2.ListBreakpointsResponse listBreakpoints(com.google.devtools.clouddebugger.v2.ListBreakpointsRequest request) {
return blockingUnaryCall(
getChannel(), METHOD_LIST_BREAKPOINTS, getCallOptions(), request);
}
/**
* <pre>
* Lists all the debuggees that the user can set breakpoints to.
* </pre>
*/
public com.google.devtools.clouddebugger.v2.ListDebuggeesResponse listDebuggees(com.google.devtools.clouddebugger.v2.ListDebuggeesRequest request) {
return blockingUnaryCall(
getChannel(), METHOD_LIST_DEBUGGEES, getCallOptions(), request);
}
}
/**
* <pre>
* The Debugger service provides the API that allows users to collect run-time
* information from a running application, without stopping or slowing it down
* and without modifying its state. An application may include one or
* more replicated processes performing the same work.
* The application is represented using the Debuggee concept. The Debugger
* service provides a way to query for available Debuggees, but does not
* provide a way to create one. A debuggee is created using the Controller
* service, usually by running a debugger agent with the application.
* The Debugger service enables the client to set one or more Breakpoints on a
* Debuggee and collect the results of the set Breakpoints.
* </pre>
*/
public static final class Debugger2FutureStub extends io.grpc.stub.AbstractStub<Debugger2FutureStub> {
private Debugger2FutureStub(io.grpc.Channel channel) {
super(channel);
}
private Debugger2FutureStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected Debugger2FutureStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new Debugger2FutureStub(channel, callOptions);
}
/**
* <pre>
* Sets the breakpoint to the debuggee.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.devtools.clouddebugger.v2.SetBreakpointResponse> setBreakpoint(
com.google.devtools.clouddebugger.v2.SetBreakpointRequest request) {
return futureUnaryCall(
getChannel().newCall(METHOD_SET_BREAKPOINT, getCallOptions()), request);
}
/**
* <pre>
* Gets breakpoint information.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.devtools.clouddebugger.v2.GetBreakpointResponse> getBreakpoint(
com.google.devtools.clouddebugger.v2.GetBreakpointRequest request) {
return futureUnaryCall(
getChannel().newCall(METHOD_GET_BREAKPOINT, getCallOptions()), request);
}
/**
* <pre>
* Deletes the breakpoint from the debuggee.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteBreakpoint(
com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest request) {
return futureUnaryCall(
getChannel().newCall(METHOD_DELETE_BREAKPOINT, getCallOptions()), request);
}
/**
* <pre>
* Lists all breakpoints for the debuggee.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.devtools.clouddebugger.v2.ListBreakpointsResponse> listBreakpoints(
com.google.devtools.clouddebugger.v2.ListBreakpointsRequest request) {
return futureUnaryCall(
getChannel().newCall(METHOD_LIST_BREAKPOINTS, getCallOptions()), request);
}
/**
* <pre>
* Lists all the debuggees that the user can set breakpoints to.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.devtools.clouddebugger.v2.ListDebuggeesResponse> listDebuggees(
com.google.devtools.clouddebugger.v2.ListDebuggeesRequest request) {
return futureUnaryCall(
getChannel().newCall(METHOD_LIST_DEBUGGEES, getCallOptions()), request);
}
}
private static final int METHODID_SET_BREAKPOINT = 0;
private static final int METHODID_GET_BREAKPOINT = 1;
private static final int METHODID_DELETE_BREAKPOINT = 2;
private static final int METHODID_LIST_BREAKPOINTS = 3;
private static final int METHODID_LIST_DEBUGGEES = 4;
private static class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final Debugger2ImplBase serviceImpl;
private final int methodId;
public MethodHandlers(Debugger2ImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_SET_BREAKPOINT:
serviceImpl.setBreakpoint((com.google.devtools.clouddebugger.v2.SetBreakpointRequest) request,
(io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.SetBreakpointResponse>) responseObserver);
break;
case METHODID_GET_BREAKPOINT:
serviceImpl.getBreakpoint((com.google.devtools.clouddebugger.v2.GetBreakpointRequest) request,
(io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.GetBreakpointResponse>) responseObserver);
break;
case METHODID_DELETE_BREAKPOINT:
serviceImpl.deleteBreakpoint((com.google.devtools.clouddebugger.v2.DeleteBreakpointRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_LIST_BREAKPOINTS:
serviceImpl.listBreakpoints((com.google.devtools.clouddebugger.v2.ListBreakpointsRequest) request,
(io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.ListBreakpointsResponse>) responseObserver);
break;
case METHODID_LIST_DEBUGGEES:
serviceImpl.listDebuggees((com.google.devtools.clouddebugger.v2.ListDebuggeesRequest) request,
(io.grpc.stub.StreamObserver<com.google.devtools.clouddebugger.v2.ListDebuggeesResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
return new io.grpc.ServiceDescriptor(SERVICE_NAME,
METHOD_SET_BREAKPOINT,
METHOD_GET_BREAKPOINT,
METHOD_DELETE_BREAKPOINT,
METHOD_LIST_BREAKPOINTS,
METHOD_LIST_DEBUGGEES);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.util;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.lang.IgnitePredicate;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_REFLECTION_CACHE_SIZE;
/**
* Reflection field and method cache for classes.
*/
public class GridReflectionCache implements Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** Compares fields by name. */
private static final Comparator<Field> FIELD_NAME_COMPARATOR = new Comparator<Field>() {
@Override public int compare(Field f1, Field f2) {
return f1.getName().compareTo(f2.getName());
}
};
/** Compares methods by name. */
private static final Comparator<Method> METHOD_NAME_COMPARATOR = new Comparator<Method>() {
@Override public int compare(Method m1, Method m2) {
return m1.getName().compareTo(m2.getName());
}
};
/** @see IgniteSystemProperties#IGNITE_REFLECTION_CACHE_SIZE */
public static final int DFLT_REFLECTION_CACHE_SIZE = 128;
/** Cache size. */
private static final int CACHE_SIZE = Integer.getInteger(IGNITE_REFLECTION_CACHE_SIZE, DFLT_REFLECTION_CACHE_SIZE);
/** Fields cache. */
private ConcurrentMap<Class, List<Field>> fields = new GridBoundedConcurrentLinkedHashMap<>(
CACHE_SIZE, CACHE_SIZE);
/** Methods cache. */
private ConcurrentMap<Class, List<Method>> mtds = new GridBoundedConcurrentLinkedHashMap<>(
CACHE_SIZE, CACHE_SIZE);
/** Field predicate. */
private IgnitePredicate<Field> fp;
/** Method predicate. */
private IgnitePredicate<Method> mp;
/**
* Reflection cache without any method or field predicates.
*/
public GridReflectionCache() {
// No-op.
}
/**
* Reflection cache with specified field and method predicates.
* @param fp Field predicate.
* @param mp Method predicate.
*/
public GridReflectionCache(@Nullable IgnitePredicate<Field> fp, @Nullable IgnitePredicate<Method> mp) {
this.fp = fp;
this.mp = mp;
}
/**
* Gets field value for object.
*
* @param o Key to get affinity key for.
* @return Value of the field for given object or {@code null} if field was not found.
* @throws IgniteCheckedException If failed.
*/
@Nullable public Object firstFieldValue(Object o) throws IgniteCheckedException {
assert o != null;
Field f = firstField(o.getClass());
if (f != null) {
try {
return f.get(o);
}
catch (IllegalAccessException e) {
throw new IgniteCheckedException("Failed to access field for object [field=" + f + ", obj=" + o + ']', e);
}
}
return null;
}
/**
* Gets method return value for object.
*
* @param o Key to get affinity key for.
* @return Method return value for given object or {@code null} if method was not found.
* @throws IgniteCheckedException If failed.
*/
@Nullable public Object firstMethodValue(Object o) throws IgniteCheckedException {
assert o != null;
Method m = firstMethod(o.getClass());
if (m != null) {
try {
return m.invoke(o);
}
catch (IllegalAccessException | InvocationTargetException e) {
throw new IgniteCheckedException("Failed to invoke method for object [mtd=" + m + ", obj=" + o + ']', e);
}
}
return null;
}
/**
* Gets first field in the class list of fields.
*
* @param cls Class.
* @return First field.
*/
@Nullable public Field firstField(Class<?> cls) {
assert cls != null;
List<Field> l = fields(cls);
return l.isEmpty() ? null : l.get(0);
}
/**
* Gets first method in the class list of methods.
*
* @param cls Class.
* @return First method.
*/
@Nullable public Method firstMethod(Class<?> cls) {
assert cls != null;
List<Method> l = methods(cls);
return l.isEmpty() ? null : l.get(0);
}
/**
* Gets fields.
*
* @param cls Class.
* @return Annotated field.
*/
public List<Field> fields(Class<?> cls) {
assert cls != null;
List<Field> fieldsList = fields.get(cls);
if (fieldsList == null) {
fieldsList = new ArrayList<>();
for (Class<?> c = cls; c != null && !c.equals(Object.class); c = c.getSuperclass()) {
List<Field> l = new ArrayList<>();
for (Field f : c.getDeclaredFields()) {
if (fp == null || fp.apply(f)) {
f.setAccessible(true);
l.add(f);
}
}
if (!l.isEmpty()) {
Collections.sort(l, FIELD_NAME_COMPARATOR);
fieldsList.addAll(l);
}
}
fields.putIfAbsent(cls, fieldsList);
}
return fieldsList;
}
/**
* Gets methods.
*
* @param cls Class.
* @return Annotated method.
*/
public List<Method> methods(Class<?> cls) {
assert cls != null;
List<Method> mtdsList = mtds.get(cls);
if (mtdsList == null) {
mtdsList = new ArrayList<>();
for (Class<?> c = cls; c != null && !c.equals(Object.class); c = c.getSuperclass()) {
List<Method> l = new ArrayList<>();
for (Method m : c.getDeclaredMethods()) {
if (mp == null || mp.apply(m)) {
m.setAccessible(true);
l.add(m);
}
}
if (!l.isEmpty()) {
Collections.sort(l, METHOD_NAME_COMPARATOR);
mtdsList.addAll(l);
}
}
mtds.putIfAbsent(cls, mtdsList);
}
return mtdsList;
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(fp);
out.writeObject(mp);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
fp = (IgnitePredicate<Field>)in.readObject();
mp = (IgnitePredicate<Method>)in.readObject();
}
}
| |
package de.tum.in.www1.exerciseapp.web.rest;
import de.tum.in.www1.exerciseapp.ArTEMiSApp;
import de.tum.in.www1.exerciseapp.domain.ModelingExercise;
import de.tum.in.www1.exerciseapp.repository.ModelingExerciseRepository;
import de.tum.in.www1.exerciseapp.web.rest.errors.ExceptionTranslator;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.web.PageableHandlerMethodArgumentResolver;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the ModelingExerciseResource REST controller.
*
* @see ModelingExerciseResource
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = ArTEMiSApp.class)
public class ModelingExerciseResourceIntTest {
private static final String DEFAULT_BASE_FILE_PATH = "AAAAAAAAAA";
private static final String UPDATED_BASE_FILE_PATH = "BBBBBBBBBB";
@Autowired
private ModelingExerciseRepository modelingExerciseRepository;
@Autowired
private MappingJackson2HttpMessageConverter jacksonMessageConverter;
@Autowired
private PageableHandlerMethodArgumentResolver pageableArgumentResolver;
@Autowired
private ExceptionTranslator exceptionTranslator;
@Autowired
private EntityManager em;
private MockMvc restModelingExerciseMockMvc;
private ModelingExercise modelingExercise;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
final ModelingExerciseResource modelingExerciseResource = new ModelingExerciseResource(modelingExerciseRepository);
this.restModelingExerciseMockMvc = MockMvcBuilders.standaloneSetup(modelingExerciseResource)
.setCustomArgumentResolvers(pageableArgumentResolver)
.setControllerAdvice(exceptionTranslator)
.setMessageConverters(jacksonMessageConverter).build();
}
/**
* Create an entity for this test.
*
* This is a static method, as tests for other entities might also need it,
* if they test an entity which requires the current entity.
*/
public static ModelingExercise createEntity(EntityManager em) {
ModelingExercise modelingExercise = new ModelingExercise()
.baseFilePath(DEFAULT_BASE_FILE_PATH);
return modelingExercise;
}
@Before
public void initTest() {
modelingExercise = createEntity(em);
}
@Test
@Transactional
public void createModelingExercise() throws Exception {
int databaseSizeBeforeCreate = modelingExerciseRepository.findAll().size();
// Create the ModelingExercise
restModelingExerciseMockMvc.perform(post("/api/modeling-exercises")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(modelingExercise)))
.andExpect(status().isCreated());
// Validate the ModelingExercise in the database
List<ModelingExercise> modelingExerciseList = modelingExerciseRepository.findAll();
assertThat(modelingExerciseList).hasSize(databaseSizeBeforeCreate + 1);
ModelingExercise testModelingExercise = modelingExerciseList.get(modelingExerciseList.size() - 1);
assertThat(testModelingExercise.getBaseFilePath()).isEqualTo(DEFAULT_BASE_FILE_PATH);
}
@Test
@Transactional
public void createModelingExerciseWithExistingId() throws Exception {
int databaseSizeBeforeCreate = modelingExerciseRepository.findAll().size();
// Create the ModelingExercise with an existing ID
modelingExercise.setId(1L);
// An entity with an existing ID cannot be created, so this API call must fail
restModelingExerciseMockMvc.perform(post("/api/modeling-exercises")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(modelingExercise)))
.andExpect(status().isBadRequest());
// Validate the Alice in the database
List<ModelingExercise> modelingExerciseList = modelingExerciseRepository.findAll();
assertThat(modelingExerciseList).hasSize(databaseSizeBeforeCreate);
}
@Test
@Transactional
public void getAllModelingExercises() throws Exception {
// Initialize the database
modelingExerciseRepository.saveAndFlush(modelingExercise);
// Get all the modelingExerciseList
restModelingExerciseMockMvc.perform(get("/api/modeling-exercises?sort=id,desc"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.[*].id").value(hasItem(modelingExercise.getId().intValue())))
.andExpect(jsonPath("$.[*].baseFilePath").value(hasItem(DEFAULT_BASE_FILE_PATH.toString())));
}
@Test
@Transactional
public void getModelingExercise() throws Exception {
// Initialize the database
modelingExerciseRepository.saveAndFlush(modelingExercise);
// Get the modelingExercise
restModelingExerciseMockMvc.perform(get("/api/modeling-exercises/{id}", modelingExercise.getId()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.id").value(modelingExercise.getId().intValue()))
.andExpect(jsonPath("$.baseFilePath").value(DEFAULT_BASE_FILE_PATH.toString()));
}
@Test
@Transactional
public void getNonExistingModelingExercise() throws Exception {
// Get the modelingExercise
restModelingExerciseMockMvc.perform(get("/api/modeling-exercises/{id}", Long.MAX_VALUE))
.andExpect(status().isNotFound());
}
@Test
@Transactional
public void updateModelingExercise() throws Exception {
// Initialize the database
modelingExerciseRepository.saveAndFlush(modelingExercise);
int databaseSizeBeforeUpdate = modelingExerciseRepository.findAll().size();
// Update the modelingExercise
ModelingExercise updatedModelingExercise = modelingExerciseRepository.findOne(modelingExercise.getId());
updatedModelingExercise
.baseFilePath(UPDATED_BASE_FILE_PATH);
restModelingExerciseMockMvc.perform(put("/api/modeling-exercises")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(updatedModelingExercise)))
.andExpect(status().isOk());
// Validate the ModelingExercise in the database
List<ModelingExercise> modelingExerciseList = modelingExerciseRepository.findAll();
assertThat(modelingExerciseList).hasSize(databaseSizeBeforeUpdate);
ModelingExercise testModelingExercise = modelingExerciseList.get(modelingExerciseList.size() - 1);
assertThat(testModelingExercise.getBaseFilePath()).isEqualTo(UPDATED_BASE_FILE_PATH);
}
@Test
@Transactional
public void updateNonExistingModelingExercise() throws Exception {
int databaseSizeBeforeUpdate = modelingExerciseRepository.findAll().size();
// Create the ModelingExercise
// If the entity doesn't have an ID, it will be created instead of just being updated
restModelingExerciseMockMvc.perform(put("/api/modeling-exercises")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(modelingExercise)))
.andExpect(status().isCreated());
// Validate the ModelingExercise in the database
List<ModelingExercise> modelingExerciseList = modelingExerciseRepository.findAll();
assertThat(modelingExerciseList).hasSize(databaseSizeBeforeUpdate + 1);
}
@Test
@Transactional
public void deleteModelingExercise() throws Exception {
// Initialize the database
modelingExerciseRepository.saveAndFlush(modelingExercise);
int databaseSizeBeforeDelete = modelingExerciseRepository.findAll().size();
// Get the modelingExercise
restModelingExerciseMockMvc.perform(delete("/api/modeling-exercises/{id}", modelingExercise.getId())
.accept(TestUtil.APPLICATION_JSON_UTF8))
.andExpect(status().isOk());
// Validate the database is empty
List<ModelingExercise> modelingExerciseList = modelingExerciseRepository.findAll();
assertThat(modelingExerciseList).hasSize(databaseSizeBeforeDelete - 1);
}
@Test
@Transactional
public void equalsVerifier() throws Exception {
TestUtil.equalsVerifier(ModelingExercise.class);
ModelingExercise modelingExercise1 = new ModelingExercise();
modelingExercise1.setId(1L);
ModelingExercise modelingExercise2 = new ModelingExercise();
modelingExercise2.setId(modelingExercise1.getId());
assertThat(modelingExercise1).isEqualTo(modelingExercise2);
modelingExercise2.setId(2L);
assertThat(modelingExercise1).isNotEqualTo(modelingExercise2);
modelingExercise1.setId(null);
assertThat(modelingExercise1).isNotEqualTo(modelingExercise2);
}
}
| |
/*
* Copyright 2020-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.kubevirtnode.util;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import io.fabric8.kubernetes.api.model.Node;
import io.fabric8.kubernetes.api.model.NodeAddress;
import io.fabric8.kubernetes.api.model.NodeSpec;
import io.fabric8.kubernetes.api.model.Taint;
import io.fabric8.kubernetes.client.ConfigBuilder;
import io.fabric8.kubernetes.client.DefaultKubernetesClient;
import io.fabric8.kubernetes.client.KubernetesClient;
import org.apache.commons.lang.StringUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.onlab.packet.IpAddress;
import org.onosproject.kubevirtnode.api.DefaultKubevirtNode;
import org.onosproject.kubevirtnode.api.DefaultKubevirtPhyInterface;
import org.onosproject.kubevirtnode.api.KubevirtApiConfig;
import org.onosproject.kubevirtnode.api.KubevirtNode;
import org.onosproject.kubevirtnode.api.KubevirtNodeState;
import org.onosproject.kubevirtnode.api.KubevirtPhyInterface;
import org.onosproject.net.Device;
import org.onosproject.net.behaviour.BridgeConfig;
import org.onosproject.net.behaviour.BridgeName;
import org.onosproject.net.device.DeviceService;
import org.onosproject.ovsdb.controller.OvsdbClientService;
import org.onosproject.ovsdb.controller.OvsdbController;
import org.onosproject.ovsdb.controller.OvsdbNodeId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xbill.DNS.Address;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Dictionary;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.onlab.util.Tools.get;
import static org.onosproject.kubevirtnode.api.Constants.SONA_PROJECT_DOMAIN;
import static org.onosproject.kubevirtnode.api.KubevirtNode.Type.GATEWAY;
import static org.onosproject.kubevirtnode.api.KubevirtNode.Type.MASTER;
import static org.onosproject.kubevirtnode.api.KubevirtNode.Type.OTHER;
import static org.onosproject.kubevirtnode.api.KubevirtNode.Type.WORKER;
/**
* An utility that used in KubeVirt node app.
*/
public final class KubevirtNodeUtil {
private static final Logger log = LoggerFactory.getLogger(KubevirtNodeUtil.class);
private static final String COLON_SLASH = "://";
private static final String COLON = ":";
private static final int HEX_LENGTH = 16;
private static final String OF_PREFIX = "of:";
private static final String ZERO = "0";
private static final String INTERNAL_IP = "InternalIP";
private static final String K8S_ROLE = "node-role.kubernetes.io";
private static final String PHYSNET_CONFIG_KEY = SONA_PROJECT_DOMAIN + "/physnet-config";
private static final String DATA_IP_KEY = SONA_PROJECT_DOMAIN + "/data-ip";
private static final String GATEWAY_CONFIG_KEY = SONA_PROJECT_DOMAIN + "/gateway-config";
private static final String GATEWAY_BRIDGE_NAME = "gatewayBridgeName";
private static final String NETWORK_KEY = "network";
private static final String INTERFACE_KEY = "interface";
private static final int PORT_NAME_MAX_LENGTH = 15;
private static final String NO_SCHEDULE_EFFECT = "NoSchedule";
private static final String KUBEVIRT_IO_KEY = "kubevirt.io/drain";
private static final String DRAINING_VALUE = "draining";
/**
* Prevents object installation from external.
*/
private KubevirtNodeUtil() {
}
/**
* Generates endpoint URL by referring to scheme, ipAddress and port.
*
* @param apiConfig kubernetes API config
* @return generated endpoint URL
*/
public static String endpoint(KubevirtApiConfig apiConfig) {
return endpoint(apiConfig.scheme(), apiConfig.ipAddress(), apiConfig.port());
}
/**
* Generates endpoint URL by referring to scheme, ipAddress and port.
*
* @param scheme scheme
* @param ipAddress IP address
* @param port port number
* @return generated endpoint URL
*/
public static String endpoint(KubevirtApiConfig.Scheme scheme, IpAddress ipAddress, int port) {
StringBuilder endpoint = new StringBuilder();
String protocol = StringUtils.lowerCase(scheme.name());
endpoint.append(protocol);
endpoint.append(COLON_SLASH);
endpoint.append(ipAddress.toString());
endpoint.append(COLON);
endpoint.append(port);
return endpoint.toString();
}
/**
* Generates a DPID (of:0000000000000001) from an index value.
*
* @param index index value
* @return generated DPID
*/
public static String genDpid(long index) {
if (index < 0) {
return null;
}
String hexStr = Long.toHexString(index);
StringBuilder zeroPadding = new StringBuilder();
for (int i = 0; i < HEX_LENGTH - hexStr.length(); i++) {
zeroPadding.append(ZERO);
}
return OF_PREFIX + zeroPadding.toString() + hexStr;
}
/**
* Generates string format based on the given string length list.
*
* @param stringLengths a list of string lengths
* @return string format (e.g., %-28s%-15s%-24s%-20s%-15s)
*/
public static String genFormatString(List<Integer> stringLengths) {
StringBuilder fsb = new StringBuilder();
stringLengths.forEach(length -> {
fsb.append("%-");
fsb.append(length);
fsb.append("s");
});
return fsb.toString();
}
/**
* Prints out the JSON string in pretty format.
*
* @param mapper Object mapper
* @param jsonString JSON string
* @return pretty formatted JSON string
*/
public static String prettyJson(ObjectMapper mapper, String jsonString) {
try {
Object jsonObject = mapper.readValue(jsonString, Object.class);
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonObject);
} catch (IOException e) {
log.debug("Json string parsing exception caused by {}", e);
}
return null;
}
/**
* Obtains workable kubernetes client.
*
* @param config kubernetes API config
* @return kubernetes client
*/
public static KubernetesClient k8sClient(KubevirtApiConfig config) {
if (config == null) {
log.warn("Kubernetes API server config is empty.");
return null;
}
String endpoint = endpoint(config);
ConfigBuilder configBuilder = new ConfigBuilder().withMasterUrl(endpoint);
if (config.scheme() == KubevirtApiConfig.Scheme.HTTPS) {
configBuilder.withTrustCerts(true)
.withCaCertData(config.caCertData())
.withClientCertData(config.clientCertData())
.withClientKeyData(config.clientKeyData());
if (StringUtils.isNotEmpty(config.token())) {
configBuilder.withOauthToken(config.token());
}
}
return new DefaultKubernetesClient(configBuilder.build());
}
/**
* Gets the ovsdb client with supplied openstack node.
*
* @param node kubernetes node
* @param ovsdbPort ovsdb port
* @param ovsdbController ovsdb controller
* @return ovsdb client
*/
public static OvsdbClientService getOvsdbClient(KubevirtNode node,
int ovsdbPort,
OvsdbController ovsdbController) {
OvsdbNodeId ovsdb = new OvsdbNodeId(node.managementIp(), ovsdbPort);
return ovsdbController.getOvsdbClient(ovsdb);
}
/**
* Checks whether the controller has a connection with an OVSDB that resides
* inside the given kubernetes node.
*
* @param node kubernetes node
* @param ovsdbPort OVSDB port
* @param ovsdbController OVSDB controller
* @param deviceService device service
* @return true if the controller is connected to the OVSDB, false otherwise
*/
public static boolean isOvsdbConnected(KubevirtNode node,
int ovsdbPort,
OvsdbController ovsdbController,
DeviceService deviceService) {
OvsdbClientService client = getOvsdbClient(node, ovsdbPort, ovsdbController);
return deviceService.isAvailable(node.ovsdb()) &&
client != null &&
client.isConnected();
}
/**
* Adds or removes a network interface (aka port) into a given bridge of kubernetes node.
*
* @param k8sNode kubernetes node
* @param bridgeName bridge name
* @param intfName interface name
* @param deviceService device service
* @param addOrRemove add port is true, remove it otherwise
*/
public static synchronized void addOrRemoveSystemInterface(KubevirtNode k8sNode,
String bridgeName,
String intfName,
DeviceService deviceService,
boolean addOrRemove) {
Device device = deviceService.getDevice(k8sNode.ovsdb());
if (device == null || !device.is(BridgeConfig.class)) {
log.info("device is null or this device if not ovsdb device");
return;
}
BridgeConfig bridgeConfig = device.as(BridgeConfig.class);
if (addOrRemove) {
bridgeConfig.addPort(BridgeName.bridgeName(bridgeName), intfName);
} else {
bridgeConfig.deletePort(BridgeName.bridgeName(bridgeName), intfName);
}
}
/**
* Re-structures the OVS port name.
* The length of OVS port name should be not large than 15.
*
* @param portName original port name
* @return re-structured OVS port name
*/
public static String structurePortName(String portName) {
// The size of OVS port name should not be larger than 15
if (portName.length() > PORT_NAME_MAX_LENGTH) {
return StringUtils.substring(portName, 0, PORT_NAME_MAX_LENGTH);
}
return portName;
}
/**
* Gets Boolean property from the propertyName
* Return null if propertyName is not found.
*
* @param properties properties to be looked up
* @param propertyName the name of the property to look up
* @return value when the propertyName is defined or return null
*/
public static Boolean getBooleanProperty(Dictionary<?, ?> properties,
String propertyName) {
Boolean value;
try {
String s = get(properties, propertyName);
value = Strings.isNullOrEmpty(s) ? null : Boolean.valueOf(s);
} catch (ClassCastException e) {
value = null;
}
return value;
}
/**
* Returns the kubevirt node from the node.
*
* @param node a raw node object returned from a k8s client
* @return kubevirt node
*/
public static KubevirtNode buildKubevirtNode(Node node) {
String hostname = node.getMetadata().getName();
IpAddress managementIp = null;
IpAddress dataIp = null;
for (NodeAddress nodeAddress:node.getStatus().getAddresses()) {
if (nodeAddress.getType().equals(INTERNAL_IP)) {
managementIp = IpAddress.valueOf(nodeAddress.getAddress());
dataIp = IpAddress.valueOf(nodeAddress.getAddress());
}
}
Set<String> rolesFull = node.getMetadata().getLabels().keySet().stream()
.filter(l -> l.contains(K8S_ROLE))
.collect(Collectors.toSet());
KubevirtNode.Type nodeType = WORKER;
for (String roleStr : rolesFull) {
String role = roleStr.split("/")[1];
if (MASTER.name().equalsIgnoreCase(role)) {
nodeType = MASTER;
break;
}
}
// start to parse kubernetes annotation
Map<String, String> annots = node.getMetadata().getAnnotations();
String physnetConfig = annots.get(PHYSNET_CONFIG_KEY);
String gatewayConfig = annots.get(GATEWAY_CONFIG_KEY);
String dataIpStr = annots.get(DATA_IP_KEY);
Set<KubevirtPhyInterface> phys = new HashSet<>();
String gatewayBridgeName = null;
try {
if (physnetConfig != null) {
JSONArray configJson = new JSONArray(physnetConfig);
for (int i = 0; i < configJson.length(); i++) {
JSONObject object = configJson.getJSONObject(i);
String network = object.getString(NETWORK_KEY);
String intf = object.getString(INTERFACE_KEY);
if (network != null && intf != null) {
phys.add(DefaultKubevirtPhyInterface.builder()
.network(network).intf(intf).build());
}
}
}
if (dataIpStr != null) {
dataIp = IpAddress.valueOf(dataIpStr);
}
if (gatewayConfig != null) {
JsonNode jsonNode = new ObjectMapper().readTree(gatewayConfig);
nodeType = GATEWAY;
gatewayBridgeName = jsonNode.get(GATEWAY_BRIDGE_NAME).asText();
}
} catch (JSONException | JsonProcessingException e) {
log.error("Failed to parse physnet config or gateway config object", e);
}
// if the node is taint with kubevirt.io key configured,
// we mark this node as OTHER type, and do not add it into the cluster
NodeSpec spec = node.getSpec();
if (spec.getTaints() != null) {
for (Taint taint : spec.getTaints()) {
String effect = taint.getEffect();
String key = taint.getKey();
String value = taint.getValue();
if (StringUtils.equals(effect, NO_SCHEDULE_EFFECT) &&
StringUtils.equals(key, KUBEVIRT_IO_KEY) &&
StringUtils.equals(value, DRAINING_VALUE)) {
nodeType = OTHER;
}
}
}
return DefaultKubevirtNode.builder()
.hostname(hostname)
.managementIp(managementIp)
.dataIp(dataIp)
.type(nodeType)
.state(KubevirtNodeState.ON_BOARDED)
.phyIntfs(phys)
.gatewayBridgeName(gatewayBridgeName)
.build();
}
/**
* Resolve a DNS with the given DNS server and hostname.
*
* @param hostname hostname to be resolved
* @return resolved IP address
*/
public static IpAddress resolveHostname(String hostname) {
try {
InetAddress addr = Address.getByName(hostname);
return IpAddress.valueOf(IpAddress.Version.INET, addr.getAddress());
} catch (UnknownHostException e) {
log.warn("Failed to resolve IP address of host {}", hostname);
}
return null;
}
/**
* Waits for the given length of time.
*
* @param timeSecond the amount of time for wait in second unit
*/
public static void waitFor(int timeSecond) {
try {
Thread.sleep(timeSecond * 1000L);
} catch (Exception e) {
log.error(e.toString());
}
}
}
| |
/*
* GetChannelAuthenticationCapabilities.java
* Created on 2011-07-21
*
* Copyright (c) Verax Systems 2011.
* All rights reserved.
*
* This software is furnished under a license. Use, duplication,
* disclosure and all other uses are restricted to the rights
* specified in the written license agreement.
*/
package ipmi.coding.commands.session;
import ipmi.coding.commands.CommandCodes;
import ipmi.coding.commands.IpmiCommandCoder;
import ipmi.coding.commands.IpmiVersion;
import ipmi.coding.commands.PrivilegeLevel;
import ipmi.coding.commands.ResponseData;
import ipmi.coding.payload.CompletionCode;
import ipmi.coding.payload.IpmiPayload;
import ipmi.coding.payload.lan.IPMIException;
import ipmi.coding.payload.lan.IpmiLanRequest;
import ipmi.coding.payload.lan.IpmiLanResponse;
import ipmi.coding.payload.lan.NetworkFunction;
import ipmi.coding.protocol.AuthenticationType;
import ipmi.coding.protocol.IpmiMessage;
import ipmi.coding.protocol.Ipmiv15Message;
import ipmi.coding.security.CipherSuite;
import ipmi.common.TypeConverter;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
/**
* Wrapper for Get Channel Authentication Capabilities request
*/
public class GetChannelAuthenticationCapabilities extends IpmiCommandCoder {
private PrivilegeLevel requestedPrivilegeLevel;
private byte channelNumber;
private IpmiVersion requestVersion;
public void setRequestedPrivilegeLevel(
PrivilegeLevel requestedPrivilegeLevel) {
this.requestedPrivilegeLevel = requestedPrivilegeLevel;
}
public PrivilegeLevel getRequestedPrivilegeLevel() {
return requestedPrivilegeLevel;
}
/**
* Sets the channel number that will be put into IPMI command.
*
* @param channelNumber
* - must be 0h-Bh or Eh-Fh <br>
* Eh = retrieve information for channel this request was issued
* on
* @throws IllegalArgumentException
*/
public void setChannelNumber(int channelNumber)
throws IllegalArgumentException {
if (channelNumber < 0 || channelNumber > 0xF || channelNumber == 0xC
|| channelNumber == 0xD) {
throw new IllegalArgumentException("Invalid channel number");
}
this.channelNumber = TypeConverter.intToByte(channelNumber);
}
public int getChannelNumber() {
return TypeConverter.byteToInt(channelNumber);
}
protected void setRequestVersion(IpmiVersion requestVersion) {
this.requestVersion = requestVersion;
}
protected IpmiVersion getRequestVersion() {
return requestVersion;
}
/**
* Initiates class for decoding in 1.5 version. Sets requested privilege
* level to user. Sets channel number to 14 indicating that response will
* contain information for channel this request was issued on. Sets session
* parameters to default.
*
* @see IpmiCommandCoder#setSessionParameters(IpmiVersion, CipherSuite,
* AuthenticationType)
* @see IpmiVersion
*/
public GetChannelAuthenticationCapabilities() {
super();
setRequestedPrivilegeLevel(PrivilegeLevel.User);
setChannelNumber(14);
}
/**
* Initiates class. Sets IPMI version to version. Sets requested privilege
* level to user. Sets channel number to 14 indicating that response will
* contain information for channel this request was issued on.
*
* @param version
* - Version of IPMI protocol used
* @param requestVersion
* - If Get Channel Authentication Capabilities command is sent
* to BMC with requestVersion = {@link IpmiVersion#V15} it will
* respond, that it does not support IPMI v2.0 even if it does.
* @param cipherSuite
* - {@link CipherSuite} containing authentication,
* confidentiality and integrity algorithms for this session.
* @see IpmiVersion
*/
public GetChannelAuthenticationCapabilities(IpmiVersion version,
IpmiVersion requestVersion, CipherSuite cipherSuite) {
super(version, cipherSuite, AuthenticationType.None);
this.setRequestVersion(requestVersion);
setRequestedPrivilegeLevel(PrivilegeLevel.User);
setChannelNumber(14);
}
/**
* Initiates class. Sets IPMI version to version. Sets requested privilege
* level privilegeLevel. Sets channel number to channelNumber.
*
* @param version
* - Version of IPMI protocol used
* @param requestVersion
* - If Get Channel Authentication Capabilities command is sent
* to BMC with requestVersion = {@link IpmiVersion#V15} it will
* respond, that it does not support IPMI v2.0 even if it does.
* @param cipherSuite
* - {@link CipherSuite} containing authentication,
* confidentiality and integrity algorithms for this session.
* @param privilegeLevel
* - Maximum requested privilege level. Can't be
* {@link PrivilegeLevel#MaximumAvailable}.
* @param channelNumber
* - must be 0h-Bh or Eh-Fh <br>
* Eh = retrieve information for channel this request was issued
* on.
* @see IpmiVersion
* @see PrivilegeLevel
*/
public GetChannelAuthenticationCapabilities(IpmiVersion version,
IpmiVersion requestVersion, CipherSuite cipherSuite,
PrivilegeLevel privilegeLevel, byte channelNumber) {
super(version, cipherSuite, AuthenticationType.None);
this.setRequestVersion(requestVersion);
setRequestedPrivilegeLevel(privilegeLevel);
setChannelNumber(channelNumber);
}
@Override
public IpmiMessage encodeCommand(int sequenceNumber, int sessionId)
throws InvalidKeyException, NoSuchAlgorithmException {
if (getIpmiVersion() == IpmiVersion.V15) {
if (sessionId != 0) {
throw new IllegalArgumentException("Session ID must be 0");
}
Ipmiv15Message message = new Ipmiv15Message();
message.setAuthenticationType(getAuthenticationType());
message.setSessionSequenceNumber(0);
message.setSessionID(0);
message.setPayload(preparePayload(sequenceNumber));
return message;
} else {
setAuthenticationType(AuthenticationType.RMCPPlus);
return super.encodeCommand(sequenceNumber, sessionId);
}
}
@Override
protected IpmiPayload preparePayload(int sequenceNumber) {
byte[] payload = new byte[2];
// payload[0] = TypeConverter.intToByte(sequenceNumber % 256);
payload[0] = 0;
if (getRequestVersion() == IpmiVersion.V20) {
payload[0] |= TypeConverter.intToByte(0x80);
}
payload[0] |= channelNumber;
payload[1] = encodePrivilegeLevel(requestedPrivilegeLevel);
return new IpmiLanRequest(getNetworkFunction(), getCommandCode(),
payload, TypeConverter.intToByte(sequenceNumber % 64));
// return payload;
}
@Override
public byte getCommandCode() {
return CommandCodes.GET_CHANNEL_AUTHENTICATION_CAPABILITIES;
}
@Override
public NetworkFunction getNetworkFunction() {
return NetworkFunction.ApplicationRequest;
}
@Override
public ResponseData getResponseData(IpmiMessage message)
throws IllegalArgumentException, IPMIException {
if (!isCommandResponse(message)) {
throw new IllegalArgumentException(
"This is not a response for Get Channel Authentication Capabilities command");
}
if (!(message.getPayload() instanceof IpmiLanResponse)) {
throw new IllegalArgumentException("Invalid response payload");
}
if (((IpmiLanResponse) message.getPayload()).getCompletionCode() != CompletionCode.Ok) {
throw new IPMIException(
((IpmiLanResponse) message.getPayload())
.getCompletionCode());
}
GetChannelAuthenticationCapabilitiesResponseData responseData = new GetChannelAuthenticationCapabilitiesResponseData();
byte[] raw = message.getPayload().getIpmiCommandData();
if (raw.length != 8) {
throw new IllegalArgumentException("Data has invalid length");
}
responseData.setChannelNumber(raw[0]);
responseData.setIpmiv20Support(!((raw[1] & 0x80) == 0));
responseData
.setAuthenticationTypes(new ArrayList<AuthenticationType>());
if ((raw[1] & 0x20) != 0) {
responseData.getAuthenticationTypes().add(AuthenticationType.Oem);
}
if ((raw[1] & 0x10) != 0) {
responseData.getAuthenticationTypes()
.add(AuthenticationType.Simple);
}
if ((raw[1] & 0x04) != 0) {
responseData.getAuthenticationTypes().add(AuthenticationType.Md5);
}
if ((raw[1] & 0x02) != 0) {
responseData.getAuthenticationTypes().add(AuthenticationType.Md2);
}
if ((raw[1] & 0x01) != 0) {
responseData.getAuthenticationTypes().add(AuthenticationType.None);
}
responseData.setKgEnabled(!((raw[2] & 0x20) == 0));
responseData.setPerMessageAuthenticationEnabled((raw[2] & 0x10) == 0);
responseData.setUserLevelAuthenticationEnabled((raw[2] & 0x08) == 0);
responseData.setNonNullUsernamesEnabled(!((raw[2] & 0x04) == 0));
responseData.setNullUsernamesEnabled(!((raw[2] & 0x02) == 0));
responseData.setAnonymusLoginEnabled(!((raw[2] & 0x01) == 0));
byte[] oemId = new byte[4];
System.arraycopy(raw, 4, oemId, 0, 3);
oemId[3] = 0;
responseData.setOemId(TypeConverter.littleEndianByteArrayToInt(oemId));
responseData.setOemData(raw[7]);
return responseData;
}
/**
* Sets session parameters.
*
* @param version
* - IPMI version of the command.
* @param cipherSuite
* - {@link CipherSuite} containing authentication,
* confidentiality and integrity algorithms for this session.
* @param authenticationType
* - Type of authentication used. Must be RMCPPlus for IPMI v2.0.
*/
@Override
public void setSessionParameters(IpmiVersion version,
CipherSuite cipherSuite, AuthenticationType authenticationType) {
if (version == IpmiVersion.V20
&& authenticationType != AuthenticationType.RMCPPlus
&& authenticationType != AuthenticationType.None) {
throw new IllegalArgumentException(
"Authentication Type must be RMCPPlus for IPMI v2.0 messages");
}
setIpmiVersion(version);
setAuthenticationType(authenticationType);
setCipherSuite(cipherSuite);
}
}
| |
/*
* Copyright 2014-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.metrics.model;
import static java.lang.Double.NaN;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* {@link BucketPoint} for availability metrics.
*
* @author Thomas Segismont
* @author Jay Shaughnessy
*/
public final class AvailabilityBucketPoint extends BucketPoint {
private final Map<AvailabilityType, Long> durationMap;
private final Long lastNotUptime;
private final Double uptimeRatio;
private final Long notUpCount;
private final Long samples;
protected AvailabilityBucketPoint(long start, long end, Map<AvailabilityType, Long> durationMap,
long lastNotUptime, double uptimeRatio, long notUpCount, long samples) {
super(start, end);
this.durationMap = durationMap;
this.lastNotUptime = lastNotUptime;
this.uptimeRatio = getDoubleValue(uptimeRatio);
this.notUpCount = notUpCount;
this.samples = samples;
}
/**
* @return The number of segments of where the availability type is not UP. A segment can combine multiple
* NotUP statuses (e.g. a change from DOWN to UNKNOWN does not increment the count).
*/
public Long getNotUpCount() {
if (isEmpty()) {
return null;
}
return notUpCount;
}
public Map<AvailabilityType, Long> getDurationMap() {
return durationMap;
}
public Long getAdminDuration() {
if (isEmpty()) {
return null;
}
return durationMap.getOrDefault(AvailabilityType.ADMIN, 0L);
}
public Long getDownDuration() {
if (isEmpty()) {
return null;
}
return durationMap.getOrDefault(AvailabilityType.DOWN, 0L);
}
public Long getUnknownDuration() {
if (isEmpty()) {
return null;
}
return durationMap.getOrDefault(AvailabilityType.UNKNOWN, 0L);
}
public Long getUpDuration() {
if (isEmpty()) {
return null;
}
return durationMap.getOrDefault(AvailabilityType.UP, 0L);
}
public Long getNotUpDuration() {
if (isEmpty()) {
return null;
}
return getAdminDuration() + getDownDuration() + getUnknownDuration();
}
public Long getLastNotUptime() {
if (isEmpty()) {
return null;
}
return lastNotUptime;
}
public Double getUptimeRatio() {
return uptimeRatio;
}
public Long getSamples() {
return samples;
}
/**
* @return Convenience method to return number of up segments <code>(samples - notUpCount)</code>
*/
public Long getUpCount() {
return samples - notUpCount;
}
@Override
public boolean isEmpty() {
return uptimeRatio == null;
}
@Override
public String toString() {
return "AvailabilityBucketPoint [durationMap=" + durationMap + ", lastNotUptime=" + lastNotUptime
+ ", uptimeRatio=" + uptimeRatio + ", notUpCount=" + notUpCount + ", samples=" + samples + "]";
}
/**
* @see BucketPoint#toList(Map, Buckets, java.util.function.BiFunction)
*/
public static List<AvailabilityBucketPoint> toList(Map<Long, AvailabilityBucketPoint> pointMap, Buckets buckets) {
return BucketPoint.toList(pointMap, buckets, (start, end) -> new Builder(start, end).build());
}
public static class Builder {
private final long start;
private final long end;
private Map<AvailabilityType, Long> durationMap = new HashMap<>();
private long lastNotUptime = 0;
private double uptimeRatio = NaN;
private long notUpCount = 0;
private long samples = 0;
/**
* Creates a builder for an initially empty instance, configurable with the builder setters.
*
* @param start the start timestamp of this bucket point
* @param end the end timestamp of this bucket point
*/
public Builder(long start, long end) {
this.start = start;
this.end = end;
}
public Builder setDurationMap(Map<AvailabilityType, Long> durationMap) {
this.durationMap.putAll(durationMap);
return this;
}
public Builder setAdminDuration(long adminDuration) {
this.durationMap.put(AvailabilityType.ADMIN, adminDuration);
return this;
}
public Builder setDownDuration(long downDuration) {
this.durationMap.put(AvailabilityType.DOWN, downDuration);
return this;
}
public Builder setUnknownDuration(long unknownDuration) {
this.durationMap.put(AvailabilityType.UNKNOWN, unknownDuration);
return this;
}
public Builder setUpDuration(long upDuration) {
this.durationMap.put(AvailabilityType.UP, upDuration);
return this;
}
public Builder setLastNotUptime(long lastNotUptime) {
this.lastNotUptime = lastNotUptime;
return this;
}
public Builder setUptimeRatio(double uptimeRatio) {
this.uptimeRatio = uptimeRatio;
return this;
}
public Builder setNotUptimeCount(long notUptimeCount) {
this.notUpCount = notUptimeCount;
return this;
}
public Builder setSamples(long samples) {
this.samples = samples;
return this;
}
public AvailabilityBucketPoint build() {
return new AvailabilityBucketPoint(start, end, durationMap, lastNotUptime, uptimeRatio, notUpCount,
samples);
}
}
}
| |
package org.mikala.testqwondo.api.model;
import java.io.Serializable;
import java.util.LinkedHashSet;
import java.util.Set;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.validation.constraints.Size;
import org.hibernate.validator.constraints.NotBlank;
@Entity
@Table(name="users")
public class User implements Serializable {
private static final long serialVersionUID = -8526367903243161996L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
private Long id;
@Basic
@Column(length=50, unique=true, nullable=false)
@NotBlank
@Size(min=2,max=50)
private String login;
@Basic
@Column(length=200, unique=true, nullable=false)
@NotBlank
@Size(min=2,max=200)
private String name;
@Basic
@Column( length=200, nullable=false)
@NotBlank
@Size(min=5,max=200)
private String email;
@Basic
@Column( length=200, nullable=true)
@Size(max=200)
private String jabber;
@Basic
@Column(length=100, nullable=false)
@NotBlank
@Size(min=8, max=50)
private String password;
@OneToMany(mappedBy="user", fetch=FetchType.LAZY,
targetEntity=UserRole.class, cascade={CascadeType.ALL})
private Set<UserRole> userRoles;
public User() {
}
public User(String login, String password, String name, String email, String jabber) {
super();
this.login = login;
this.password = password;
this.name = name;
this.email = email;
this.jabber = jabber;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getJabber() {
return jabber;
}
public void setJabber(String jabber) {
this.jabber = jabber;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Set<UserRole> getUserRoles() {
return userRoles;
}
public void setUserRoles(Set<UserRole> userRoles) {
this.userRoles = userRoles;
}
public UserRole addUserRole(UserRole userRole) {
if(getUserRoles()==null){
setUserRoles(new LinkedHashSet<UserRole>());
}
getUserRoles().add(userRole);
userRole.setUser(this);
return userRole;
}
public UserRole removeUserRole(UserRole userRole) {
if(getUserRoles()==null){
//TODO co by tu zwrocic?
return userRole;
}
getUserRoles().remove(userRole);
userRole.setUser(null);
return userRole;
}
public static long getSerialversionuid() {
return serialVersionUID;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((email == null) ? 0 : email.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((jabber == null) ? 0 : jabber.hashCode());
result = prime * result + ((login == null) ? 0 : login.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((password == null) ? 0 : password.hashCode());
//result = prime * result + ((userRoles == null) ? 0 : userRoles.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof User)) {
return false;
}
User other = (User) obj;
if (email == null) {
if (other.email != null) {
return false;
}
} else if (!email.equals(other.email)) {
return false;
}
if (id == null) {
if (other.id != null) {
return false;
}
} else if (!id.equals(other.id)) {
return false;
}
if (jabber == null) {
if (other.jabber != null) {
return false;
}
} else if (!jabber.equals(other.jabber)) {
return false;
}
if (login == null) {
if (other.login != null) {
return false;
}
} else if (!login.equals(other.login)) {
return false;
}
if (name == null) {
if (other.name != null) {
return false;
}
} else if (!name.equals(other.name)) {
return false;
}
if (password == null) {
if (other.password != null) {
return false;
}
} else if (!password.equals(other.password)) {
return false;
}
/*if (userRoles == null) {
if (other.userRoles != null) {
return false;
}
} else if (!userRoles.equals(other.userRoles)) {
return false;
}*/
return true;
}
}
| |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.kotlin.idea.completion.test.weighers;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.idea.test.JUnit3RunnerWithInners;
import org.jetbrains.kotlin.idea.test.KotlinTestUtils;
import org.jetbrains.kotlin.test.TestMetadata;
import org.jetbrains.kotlin.idea.test.TestRoot;
import org.junit.runner.RunWith;
/**
* This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}.
* DO NOT MODIFY MANUALLY.
*/
@SuppressWarnings("all")
@TestRoot("completion/tests")
@TestDataPath("$CONTENT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic")
public abstract class BasicCompletionWeigherTestGenerated extends AbstractBasicCompletionWeigherTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/contextualReturn")
public abstract static class ContextualReturn extends AbstractBasicCompletionWeigherTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/contextualReturn/noReturnType")
public static class NoReturnType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("BeginOfNestedBlock.kt")
public void testBeginOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/BeginOfNestedBlock.kt");
}
@TestMetadata("BeginOfTopLevelBlock.kt")
public void testBeginOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/BeginOfTopLevelBlock.kt");
}
@TestMetadata("EndOfNestedBlock.kt")
public void testEndOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/EndOfNestedBlock.kt");
}
@TestMetadata("EndOfTopLevelBlock.kt")
public void testEndOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/EndOfTopLevelBlock.kt");
}
@TestMetadata("ForWithBody.kt")
public void testForWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/ForWithBody.kt");
}
@TestMetadata("ForWithoutBody.kt")
public void testForWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/ForWithoutBody.kt");
}
@TestMetadata("IfWithoutBody.kt")
public void testIfWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/IfWithoutBody.kt");
}
@TestMetadata("InElvis.kt")
public void testInElvis() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InElvis.kt");
}
@TestMetadata("InElvisWhenSmartCompletionWins.kt")
public void testInElvisWhenSmartCompletionWins() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InElvisWhenSmartCompletionWins.kt");
}
@TestMetadata("InWhenSingleExpression.kt")
public void testInWhenSingleExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InWhenSingleExpression.kt");
}
@TestMetadata("InWhenWithBody.kt")
public void testInWhenWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InWhenWithBody.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/contextualReturn/withReturnType")
public static class WithReturnType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("BeginOfNestedBlock.kt")
public void testBeginOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/BeginOfNestedBlock.kt");
}
@TestMetadata("BeginOfTopLevelBlock.kt")
public void testBeginOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/BeginOfTopLevelBlock.kt");
}
@TestMetadata("EndOfNestedBlock.kt")
public void testEndOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/EndOfNestedBlock.kt");
}
@TestMetadata("EndOfTopLevelBlock.kt")
public void testEndOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/EndOfTopLevelBlock.kt");
}
@TestMetadata("ForWithBody.kt")
public void testForWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/ForWithBody.kt");
}
@TestMetadata("ForWithoutBody.kt")
public void testForWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/ForWithoutBody.kt");
}
@TestMetadata("IfWithoutBody.kt")
public void testIfWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/IfWithoutBody.kt");
}
@TestMetadata("InElvis.kt")
public void testInElvis() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InElvis.kt");
}
@TestMetadata("InElvisInReturn.kt")
public void testInElvisInReturn() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InElvisInReturn.kt");
}
@TestMetadata("InElvisWhenSmartCompletionWins.kt")
public void testInElvisWhenSmartCompletionWins() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InElvisWhenSmartCompletionWins.kt");
}
@TestMetadata("InIfAsReturnedExpression.kt")
public void testInIfAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InIfAsReturnedExpression.kt");
}
@TestMetadata("InIfInWhenWithBodyAsReturnedExpression.kt")
public void testInIfInWhenWithBodyAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InIfInWhenWithBodyAsReturnedExpression.kt");
}
@TestMetadata("InNotElvisBinaryOperator.kt")
public void testInNotElvisBinaryOperator() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InNotElvisBinaryOperator.kt");
}
@TestMetadata("InWhenAsReturnedExpression.kt")
public void testInWhenAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenAsReturnedExpression.kt");
}
@TestMetadata("InWhenSingleExpression.kt")
public void testInWhenSingleExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenSingleExpression.kt");
}
@TestMetadata("InWhenWithBody.kt")
public void testInWhenWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenWithBody.kt");
}
@TestMetadata("InWhenWithBodyAsReturnedExpression.kt")
public void testInWhenWithBodyAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenWithBodyAsReturnedExpression.kt");
}
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/expectedInfo")
public static class ExpectedInfo extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("AfterAs.kt")
public void testAfterAs() throws Exception {
runTest("testData/weighers/basic/expectedInfo/AfterAs.kt");
}
@TestMetadata("CompanionObjectMethod.kt")
public void testCompanionObjectMethod() throws Exception {
runTest("testData/weighers/basic/expectedInfo/CompanionObjectMethod.kt");
}
@TestMetadata("EnumEntries.kt")
public void testEnumEntries() throws Exception {
runTest("testData/weighers/basic/expectedInfo/EnumEntries.kt");
}
@TestMetadata("ExpectedType.kt")
public void testExpectedType() throws Exception {
runTest("testData/weighers/basic/expectedInfo/ExpectedType.kt");
}
@TestMetadata("ExpectedType2.kt")
public void testExpectedType2() throws Exception {
runTest("testData/weighers/basic/expectedInfo/ExpectedType2.kt");
}
@TestMetadata("LambdaValue.kt")
public void testLambdaValue() throws Exception {
runTest("testData/weighers/basic/expectedInfo/LambdaValue.kt");
}
@TestMetadata("MultiArgsItem.kt")
public void testMultiArgsItem() throws Exception {
runTest("testData/weighers/basic/expectedInfo/MultiArgsItem.kt");
}
@TestMetadata("NameSimilarity.kt")
public void testNameSimilarity() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NameSimilarity.kt");
}
@TestMetadata("NameSimilarityAndNoExpectedType.kt")
public void testNameSimilarityAndNoExpectedType() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NameSimilarityAndNoExpectedType.kt");
}
@TestMetadata("NameSimilarityAndNoExpectedType2.kt")
public void testNameSimilarityAndNoExpectedType2() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NameSimilarityAndNoExpectedType2.kt");
}
@TestMetadata("NoStupidComparison.kt")
public void testNoStupidComparison() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NoStupidComparison.kt");
}
@TestMetadata("Null.kt")
public void testNull() throws Exception {
runTest("testData/weighers/basic/expectedInfo/Null.kt");
}
@TestMetadata("PreferMatchingThis.kt")
public void testPreferMatchingThis() throws Exception {
runTest("testData/weighers/basic/expectedInfo/PreferMatchingThis.kt");
}
@TestMetadata("TrueFalse.kt")
public void testTrueFalse() throws Exception {
runTest("testData/weighers/basic/expectedInfo/TrueFalse.kt");
}
@TestMetadata("WhenByEnum.kt")
public void testWhenByEnum() throws Exception {
runTest("testData/weighers/basic/expectedInfo/WhenByEnum.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/expectedType")
public static class ExpectedType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("ifConditionQualified.kt")
public void testIfConditionQualified() throws Exception {
runTest("testData/weighers/basic/expectedType/ifConditionQualified.kt");
}
@TestMetadata("returnFromFunction.kt")
public void testReturnFromFunction() throws Exception {
runTest("testData/weighers/basic/expectedType/returnFromFunction.kt");
}
@TestMetadata("returnFromFunctionQualifiedSelector.kt")
public void testReturnFromFunctionQualifiedSelector() throws Exception {
runTest("testData/weighers/basic/expectedType/returnFromFunctionQualifiedSelector.kt");
}
@TestMetadata("returnFromLambda.kt")
public void testReturnFromLambda() throws Exception {
runTest("testData/weighers/basic/expectedType/returnFromLambda.kt");
}
@TestMetadata("whileConditionQualified.kt")
public void testWhileConditionQualified() throws Exception {
runTest("testData/weighers/basic/expectedType/whileConditionQualified.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/parameterNameAndType")
public static class ParameterNameAndType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("Deprecated.kt")
public void testDeprecated() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/Deprecated.kt");
}
@TestMetadata("FromCurrentFilePriority.kt")
public void testFromCurrentFilePriority() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/FromCurrentFilePriority.kt");
}
@TestMetadata("ImportedFirst.kt")
public void testImportedFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/ImportedFirst.kt");
}
@TestMetadata("MoreWordsMatchFirst.kt")
public void testMoreWordsMatchFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/MoreWordsMatchFirst.kt");
}
@TestMetadata("ShorterFirst.kt")
public void testShorterFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/ShorterFirst.kt");
}
@TestMetadata("StartMatchFirst.kt")
public void testStartMatchFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/StartMatchFirst.kt");
}
@TestMetadata("UserPrefix.kt")
public void testUserPrefix() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/UserPrefix.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic")
public static class Uncategorized extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("AfterNullable.kt")
public void testAfterNullable() throws Exception {
runTest("testData/weighers/basic/AfterNullable.kt");
}
@TestMetadata("CallableReference_NothingLast.kt")
public void testCallableReference_NothingLast() throws Exception {
runTest("testData/weighers/basic/CallableReference_NothingLast.kt");
}
@TestMetadata("Callables.kt")
public void testCallables() throws Exception {
runTest("testData/weighers/basic/Callables.kt");
}
@TestMetadata("DelegateToOtherObject.kt")
public void testDelegateToOtherObject() throws Exception {
runTest("testData/weighers/basic/DelegateToOtherObject.kt");
}
@TestMetadata("DeprecatedFun.kt")
public void testDeprecatedFun() throws Exception {
runTest("testData/weighers/basic/DeprecatedFun.kt");
}
@TestMetadata("DeprecatedJavaClass.kt")
public void testDeprecatedJavaClass() throws Exception {
runTest("testData/weighers/basic/DeprecatedJavaClass.kt");
}
@TestMetadata("DeprecatedSinceKotlinFun.kt")
public void testDeprecatedSinceKotlinFun() throws Exception {
runTest("testData/weighers/basic/DeprecatedSinceKotlinFun.kt");
}
@TestMetadata("DslCallWithExpectedType.kt")
public void testDslCallWithExpectedType() throws Exception {
runTest("testData/weighers/basic/DslCallWithExpectedType.kt");
}
@TestMetadata("DslCalls.kt")
public void testDslCalls() throws Exception {
runTest("testData/weighers/basic/DslCalls.kt");
}
@TestMetadata("DslCallsAnnotatedFunctionType.kt")
public void testDslCallsAnnotatedFunctionType() throws Exception {
runTest("testData/weighers/basic/DslCallsAnnotatedFunctionType.kt");
}
@TestMetadata("DslCallsWithMultipleReceivers.kt")
public void testDslCallsWithMultipleReceivers() throws Exception {
runTest("testData/weighers/basic/DslCallsWithMultipleReceivers.kt");
}
@TestMetadata("DslMemberCalls.kt")
public void testDslMemberCalls() throws Exception {
runTest("testData/weighers/basic/DslMemberCalls.kt");
}
@TestMetadata("ExactMatchForKeyword.kt")
public void testExactMatchForKeyword() throws Exception {
runTest("testData/weighers/basic/ExactMatchForKeyword.kt");
}
@TestMetadata("ImportedFirst.kt")
public void testImportedFirst() throws Exception {
runTest("testData/weighers/basic/ImportedFirst.kt");
}
@TestMetadata("ImportedFirstForJavaClass.kt")
public void testImportedFirstForJavaClass() throws Exception {
runTest("testData/weighers/basic/ImportedFirstForJavaClass.kt");
}
@TestMetadata("ImportedOrder.kt")
public void testImportedOrder() throws Exception {
runTest("testData/weighers/basic/ImportedOrder.kt");
}
@TestMetadata("KT-25588_1.kts")
public void testKT_25588_1() throws Exception {
runTest("testData/weighers/basic/KT-25588_1.kts");
}
@TestMetadata("KT-25588_2.kts")
public void testKT_25588_2() throws Exception {
runTest("testData/weighers/basic/KT-25588_2.kts");
}
@TestMetadata("KeywordsLast.kt")
public void testKeywordsLast() throws Exception {
runTest("testData/weighers/basic/KeywordsLast.kt");
}
@TestMetadata("LambdaSignature.kt")
public void testLambdaSignature() throws Exception {
runTest("testData/weighers/basic/LambdaSignature.kt");
}
@TestMetadata("LocalFileBeforeImported.kt")
public void testLocalFileBeforeImported() throws Exception {
runTest("testData/weighers/basic/LocalFileBeforeImported.kt");
}
@TestMetadata("LocalValuesAndParams.kt")
public void testLocalValuesAndParams() throws Exception {
runTest("testData/weighers/basic/LocalValuesAndParams.kt");
}
@TestMetadata("LocalsBeforeKeywords.kt")
public void testLocalsBeforeKeywords() throws Exception {
runTest("testData/weighers/basic/LocalsBeforeKeywords.kt");
}
@TestMetadata("LocalsPropertiesKeywords.kt")
public void testLocalsPropertiesKeywords() throws Exception {
runTest("testData/weighers/basic/LocalsPropertiesKeywords.kt");
}
@TestMetadata("NamedParameters.kt")
public void testNamedParameters() throws Exception {
runTest("testData/weighers/basic/NamedParameters.kt");
}
@TestMetadata("NamedParameters2.kt")
public void testNamedParameters2() throws Exception {
runTest("testData/weighers/basic/NamedParameters2.kt");
}
@TestMetadata("NamedParameters3.kt")
public void testNamedParameters3() throws Exception {
runTest("testData/weighers/basic/NamedParameters3.kt");
}
@TestMetadata("NoExpectedType.kt")
public void testNoExpectedType() throws Exception {
runTest("testData/weighers/basic/NoExpectedType.kt");
}
@TestMetadata("NullArgForInfixFunctionOnTheLeft.kt")
public void testNullArgForInfixFunctionOnTheLeft() throws Exception {
runTest("testData/weighers/basic/NullArgForInfixFunctionOnTheLeft.kt");
}
@TestMetadata("NullArgForInfixFunctionOnTheRight.kt")
public void testNullArgForInfixFunctionOnTheRight() throws Exception {
runTest("testData/weighers/basic/NullArgForInfixFunctionOnTheRight.kt");
}
@TestMetadata("NullArgForNotImportedFunction.kt")
public void testNullArgForNotImportedFunction() throws Exception {
runTest("testData/weighers/basic/NullArgForNotImportedFunction.kt");
}
@TestMetadata("NullForIfConditionOnTheLeft.kt")
public void testNullForIfConditionOnTheLeft() throws Exception {
runTest("testData/weighers/basic/NullForIfConditionOnTheLeft.kt");
}
@TestMetadata("NullForIfConditionOnTheRight.kt")
public void testNullForIfConditionOnTheRight() throws Exception {
runTest("testData/weighers/basic/NullForIfConditionOnTheRight.kt");
}
@TestMetadata("Packages.kt")
public void testPackages() throws Exception {
runTest("testData/weighers/basic/Packages.kt");
}
@TestMetadata("ParametersBeforeKeywords.kt")
public void testParametersBeforeKeywords() throws Exception {
runTest("testData/weighers/basic/ParametersBeforeKeywords.kt");
}
@TestMetadata("PreferFromJdk.kt")
public void testPreferFromJdk() throws Exception {
runTest("testData/weighers/basic/PreferFromJdk.kt");
}
@TestMetadata("PreferGetMethodToProperty.kt")
public void testPreferGetMethodToProperty() throws Exception {
runTest("testData/weighers/basic/PreferGetMethodToProperty.kt");
}
@TestMetadata("Prefix.kt")
public void testPrefix() throws Exception {
runTest("testData/weighers/basic/Prefix.kt");
}
@TestMetadata("PropertiesBeforeKeywords.kt")
public void testPropertiesBeforeKeywords() throws Exception {
runTest("testData/weighers/basic/PropertiesBeforeKeywords.kt");
}
@TestMetadata("StaticMembers.kt")
public void testStaticMembers() throws Exception {
runTest("testData/weighers/basic/StaticMembers.kt");
}
@TestMetadata("SuperMembers.kt")
public void testSuperMembers() throws Exception {
runTest("testData/weighers/basic/SuperMembers.kt");
}
@TestMetadata("TopLevelKeywordWithClassName.kt")
public void testTopLevelKeywordWithClassName() throws Exception {
runTest("testData/weighers/basic/TopLevelKeywordWithClassName.kt");
}
@TestMetadata("UnavailableDslReceiver.kt")
public void testUnavailableDslReceiver() throws Exception {
runTest("testData/weighers/basic/UnavailableDslReceiver.kt");
}
}
}
| |
package com.zigorsalvador.phoenix.android.activity;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.os.StrictMode;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.zigorsalvador.phoenix.android.R;
import com.zigorsalvador.phoenix.android.protocol.ServiceProtocol;
import com.zigorsalvador.phoenix.android.service.MobilityService;
public class MobilityActivity extends Activity implements OnClickListener
{
private MessageSender sender;
private MessageReceiver receiver;
private Button wirelessButton;
private Button discoverButton;
private Button subscribeButton;
private ImageView wirelessImage;
private ImageView discoverImage;
private ImageView subscribeImage;
private TextView brokerValue;
private TextView progressValue;
private ProgressBar progressBar;
private Boolean blockButton1 = false;
private Boolean blockButton2 = false;
private Boolean blockButton3 = false;
private Integer limit = 1000;
//////////
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
Log.v(getClass().getName(), "onCreate...");
StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder().permitAll().build());
wirelessButton = (Button) this.findViewById(R.id.button1);
discoverButton = (Button) this.findViewById(R.id.button2);
subscribeButton = (Button) this.findViewById(R.id.button3);
wirelessButton.setOnClickListener(this);
discoverButton.setOnClickListener(this);
subscribeButton.setOnClickListener(this);
wirelessImage = (ImageView) this.findViewById(R.id.imageView2);
discoverImage = (ImageView) this.findViewById(R.id.imageView4);
subscribeImage = (ImageView) this.findViewById(R.id.imageView6);
brokerValue = (TextView) this.findViewById(R.id.textView2);
progressBar = (ProgressBar) this.findViewById(R.id.progressBar1);
progressValue = (TextView) this.findViewById(R.id.textView4);
progressBar.setMax(limit);
}
//////////
@Override
public void onResume()
{
super.onResume();
Log.v(getClass().getName(), "onResume...");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
progressValue.setText(getResources().getString(R.string.progress_label));
if (sender == null) sender = new MessageSender();
if (receiver == null) receiver = new MessageReceiver();
IntentFilter filter = new IntentFilter();
filter.addAction(ServiceProtocol.REFRESH_RESPONSE);
filter.addAction(ServiceProtocol.EVENT_NOTIFICATION);
registerReceiver(receiver, filter);
sender.sendStartService(ServiceProtocol.REFRESH_REQUEST);
}
//////////
@Override
public void onPause()
{
super.onPause();
Log.v(getClass().getName(), "onPause...");
sender.sendStopService(ServiceProtocol.SHUTDOWN_REQUEST);
unregisterReceiver(receiver);
finish();
}
//////////
@Override
public void onClick(View view)
{
if (view.getId() == R.id.button1 && !blockButton1)
{
blockButton1 = true;
wirelessImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_transition));
sender.sendStartService(ServiceProtocol.WIRELESS_BUTTON);
}
if (view.getId() == R.id.button2 && !blockButton2)
{
blockButton2 = true;
discoverImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_transition));
sender.sendStartService(ServiceProtocol.DISCOVER_BUTTON);
}
if (view.getId() == R.id.button3 && !blockButton3)
{
blockButton3 = true;
subscribeImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_transition));
sender.sendStartService(ServiceProtocol.SUBSCRIBE_BUTTON);
}
}
//////////
@Override
public boolean onKeyDown(int keyCode, KeyEvent event)
{
return true;
}
//////////
public class MessageSender
{
public void sendStartService(String action)
{
Intent intent = new Intent(getApplicationContext(), MobilityService.class);
intent.setAction(action);
startService(intent);
}
//////////
public void sendStopService(String action)
{
Intent intent = new Intent(getApplicationContext(), MobilityService.class);
stopService(intent);
}
}
//////////
public class MessageReceiver extends BroadcastReceiver
{
@Override
public void onReceive(Context context, Intent intent)
{
if (intent.getAction().equals(ServiceProtocol.REFRESH_RESPONSE))
{
Boolean wireless = intent.getExtras().getBoolean(ServiceProtocol.RESPONSE_EXTRA_1);
Boolean discover = intent.getExtras().getBoolean(ServiceProtocol.RESPONSE_EXTRA_2);
Boolean subscribe = intent.getExtras().getBoolean(ServiceProtocol.RESPONSE_EXTRA_3);
String currentBroker = intent.getExtras().getString(ServiceProtocol.RESPONSE_EXTRA_4);
String formerBroker = intent.getExtras().getString(ServiceProtocol.RESPONSE_EXTRA_5);
processRefresh(wireless, discover, subscribe, currentBroker, formerBroker);
}
else if (intent.getAction().equals(ServiceProtocol.EVENT_NOTIFICATION))
{
Long number = intent.getExtras().getLong(ServiceProtocol.NOTIFICATION_EXTRA);
Log.v(getClass().getName(), "Received event number " + number + "...");
increaseProgressBar();
}
}
}
//////////
public void increaseProgressBar()
{
progressBar.setProgress(progressBar.getProgress() + 1);
progressValue.setText(progressBar.getProgress() + " / " + limit);
if (progressBar.getProgress() == limit)
{
sender.sendStopService(ServiceProtocol.SHUTDOWN_REQUEST);
blockButton1 = true;
blockButton2 = true;
blockButton3 = true;
}
}
//////////
public void processRefresh(Boolean wireless, Boolean discover, Boolean subscribe, String currentBroker, String formerBroker)
{
if (wireless)
{
wirelessImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_enabled));
discoverButton.setEnabled(true);
blockButton1 = false;
}
else
{
wirelessImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_disabled));
discoverImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_disabled));
subscribeImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_disabled));
discoverButton.setEnabled(false);
subscribeButton.setEnabled(false);
blockButton1 = false;
blockButton2 = false;
}
if (discover)
{
discoverImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_enabled));
subscribeButton.setEnabled(true);
if (brokerValue.getText().equals(getResources().getString(R.string.none_label)))
{
if (!currentBroker.equals(getResources().getString(R.string.none_label)) && !formerBroker.equals(getResources().getString(R.string.none_label)))
{
subscribeButton.setText(R.string.reconnect_button);
}
}
}
else
{
discoverImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_disabled));
subscribeButton.setEnabled(false);
blockButton2 = false;
}
if (subscribe)
{
subscribeImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_enabled));
}
else
{
subscribeImage.setImageDrawable(getResources().getDrawable(R.drawable.icon_disabled));
blockButton3 = false;
}
brokerValue.setText(currentBroker);
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v9.services;
import com.google.ads.googleads.v9.resources.AccessibleBiddingStrategy;
import com.google.ads.googleads.v9.resources.AccessibleBiddingStrategyName;
import com.google.ads.googleads.v9.services.stub.AccessibleBiddingStrategyServiceStub;
import com.google.ads.googleads.v9.services.stub.AccessibleBiddingStrategyServiceStubSettings;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.UnaryCallable;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service to read accessible bidding strategies.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* try (AccessibleBiddingStrategyServiceClient accessibleBiddingStrategyServiceClient =
* AccessibleBiddingStrategyServiceClient.create()) {
* AccessibleBiddingStrategyName resourceName =
* AccessibleBiddingStrategyName.of("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]");
* AccessibleBiddingStrategy response =
* accessibleBiddingStrategyServiceClient.getAccessibleBiddingStrategy(resourceName);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the AccessibleBiddingStrategyServiceClient object to clean
* up resources such as threads. In the example above, try-with-resources is used, which
* automatically calls close().
*
* <p>The surface of this class includes several types of Java methods for each of the API's
* methods:
*
* <ol>
* <li> A "flattened" method. With this type of method, the fields of the request type have been
* converted into function parameters. It may be the case that not all fields are available as
* parameters, and not every API method will have a flattened method entry point.
* <li> A "request object" method. This type of method only takes one parameter, a request object,
* which must be constructed before the call. Not every API method will have a request object
* method.
* <li> A "callable" method. This type of method takes no parameters and returns an immutable API
* callable object, which can be used to initiate calls to the service.
* </ol>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of
* AccessibleBiddingStrategyServiceSettings to create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* AccessibleBiddingStrategyServiceSettings accessibleBiddingStrategyServiceSettings =
* AccessibleBiddingStrategyServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* AccessibleBiddingStrategyServiceClient accessibleBiddingStrategyServiceClient =
* AccessibleBiddingStrategyServiceClient.create(accessibleBiddingStrategyServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* AccessibleBiddingStrategyServiceSettings accessibleBiddingStrategyServiceSettings =
* AccessibleBiddingStrategyServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* AccessibleBiddingStrategyServiceClient accessibleBiddingStrategyServiceClient =
* AccessibleBiddingStrategyServiceClient.create(accessibleBiddingStrategyServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class AccessibleBiddingStrategyServiceClient implements BackgroundResource {
private final AccessibleBiddingStrategyServiceSettings settings;
private final AccessibleBiddingStrategyServiceStub stub;
/** Constructs an instance of AccessibleBiddingStrategyServiceClient with default settings. */
public static final AccessibleBiddingStrategyServiceClient create() throws IOException {
return create(AccessibleBiddingStrategyServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of AccessibleBiddingStrategyServiceClient, using the given settings. The
* channels are created based on the settings passed in, or defaults for any settings that are not
* set.
*/
public static final AccessibleBiddingStrategyServiceClient create(
AccessibleBiddingStrategyServiceSettings settings) throws IOException {
return new AccessibleBiddingStrategyServiceClient(settings);
}
/**
* Constructs an instance of AccessibleBiddingStrategyServiceClient, using the given stub for
* making calls. This is for advanced usage - prefer using
* create(AccessibleBiddingStrategyServiceSettings).
*/
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public static final AccessibleBiddingStrategyServiceClient create(
AccessibleBiddingStrategyServiceStub stub) {
return new AccessibleBiddingStrategyServiceClient(stub);
}
/**
* Constructs an instance of AccessibleBiddingStrategyServiceClient, using the given settings.
* This is protected so that it is easy to make a subclass, but otherwise, the static factory
* methods should be preferred.
*/
protected AccessibleBiddingStrategyServiceClient(
AccessibleBiddingStrategyServiceSettings settings) throws IOException {
this.settings = settings;
this.stub =
((AccessibleBiddingStrategyServiceStubSettings) settings.getStubSettings()).createStub();
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
protected AccessibleBiddingStrategyServiceClient(AccessibleBiddingStrategyServiceStub stub) {
this.settings = null;
this.stub = stub;
}
public final AccessibleBiddingStrategyServiceSettings getSettings() {
return settings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public AccessibleBiddingStrategyServiceStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested accessible bidding strategy in full detail.
*
* <p>Sample code:
*
* <pre>{@code
* try (AccessibleBiddingStrategyServiceClient accessibleBiddingStrategyServiceClient =
* AccessibleBiddingStrategyServiceClient.create()) {
* AccessibleBiddingStrategyName resourceName =
* AccessibleBiddingStrategyName.of("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]");
* AccessibleBiddingStrategy response =
* accessibleBiddingStrategyServiceClient.getAccessibleBiddingStrategy(resourceName);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the accessible bidding strategy to fetch.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AccessibleBiddingStrategy getAccessibleBiddingStrategy(
AccessibleBiddingStrategyName resourceName) {
GetAccessibleBiddingStrategyRequest request =
GetAccessibleBiddingStrategyRequest.newBuilder()
.setResourceName(resourceName == null ? null : resourceName.toString())
.build();
return getAccessibleBiddingStrategy(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested accessible bidding strategy in full detail.
*
* <p>Sample code:
*
* <pre>{@code
* try (AccessibleBiddingStrategyServiceClient accessibleBiddingStrategyServiceClient =
* AccessibleBiddingStrategyServiceClient.create()) {
* String resourceName =
* AccessibleBiddingStrategyName.of("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]").toString();
* AccessibleBiddingStrategy response =
* accessibleBiddingStrategyServiceClient.getAccessibleBiddingStrategy(resourceName);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the accessible bidding strategy to fetch.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AccessibleBiddingStrategy getAccessibleBiddingStrategy(String resourceName) {
GetAccessibleBiddingStrategyRequest request =
GetAccessibleBiddingStrategyRequest.newBuilder().setResourceName(resourceName).build();
return getAccessibleBiddingStrategy(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested accessible bidding strategy in full detail.
*
* <p>Sample code:
*
* <pre>{@code
* try (AccessibleBiddingStrategyServiceClient accessibleBiddingStrategyServiceClient =
* AccessibleBiddingStrategyServiceClient.create()) {
* GetAccessibleBiddingStrategyRequest request =
* GetAccessibleBiddingStrategyRequest.newBuilder()
* .setResourceName(
* AccessibleBiddingStrategyName.of("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]")
* .toString())
* .build();
* AccessibleBiddingStrategy response =
* accessibleBiddingStrategyServiceClient.getAccessibleBiddingStrategy(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AccessibleBiddingStrategy getAccessibleBiddingStrategy(
GetAccessibleBiddingStrategyRequest request) {
return getAccessibleBiddingStrategyCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested accessible bidding strategy in full detail.
*
* <p>Sample code:
*
* <pre>{@code
* try (AccessibleBiddingStrategyServiceClient accessibleBiddingStrategyServiceClient =
* AccessibleBiddingStrategyServiceClient.create()) {
* GetAccessibleBiddingStrategyRequest request =
* GetAccessibleBiddingStrategyRequest.newBuilder()
* .setResourceName(
* AccessibleBiddingStrategyName.of("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]")
* .toString())
* .build();
* ApiFuture<AccessibleBiddingStrategy> future =
* accessibleBiddingStrategyServiceClient
* .getAccessibleBiddingStrategyCallable()
* .futureCall(request);
* // Do something.
* AccessibleBiddingStrategy response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetAccessibleBiddingStrategyRequest, AccessibleBiddingStrategy>
getAccessibleBiddingStrategyCallable() {
return stub.getAccessibleBiddingStrategyCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.query.profile.types.test;
import com.yahoo.component.ComponentId;
import com.yahoo.container.jdisc.HttpRequest;
import com.yahoo.language.Language;
import com.yahoo.language.process.Embedder;
import com.yahoo.tensor.Tensor;
import com.yahoo.tensor.TensorType;
import com.yahoo.yolean.Exceptions;
import com.yahoo.search.Query;
import com.yahoo.processing.request.CompoundName;
import com.yahoo.search.query.profile.QueryProfile;
import com.yahoo.search.query.profile.QueryProfileRegistry;
import com.yahoo.search.query.profile.compiled.CompiledQueryProfile;
import com.yahoo.search.query.profile.QueryProfileProperties;
import com.yahoo.search.query.profile.compiled.CompiledQueryProfileRegistry;
import com.yahoo.search.query.profile.types.FieldDescription;
import com.yahoo.search.query.profile.types.FieldType;
import com.yahoo.search.query.profile.types.QueryProfileType;
import com.yahoo.search.query.profile.types.QueryProfileTypeRegistry;
import org.junit.Before;
import org.junit.Test;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* tests query profiles with/and types
*
* @author bratseth
*/
public class QueryProfileTypeTestCase {
private QueryProfileRegistry registry;
private QueryProfileType testtype, emptyInheritingTesttype, testtypeStrict, user, userStrict;
@Before
public void setUp() {
registry = new QueryProfileRegistry();
testtype = new QueryProfileType(new ComponentId("testtype"));
testtype.inherited().add(registry.getTypeRegistry().getComponent(new ComponentId("native")));
emptyInheritingTesttype = new QueryProfileType(new ComponentId("emptyInheritingTesttype"));
emptyInheritingTesttype.inherited().add(testtype);
testtypeStrict = new QueryProfileType(new ComponentId("testtypeStrict"));
testtypeStrict.setStrict(true);
user = new QueryProfileType(new ComponentId("user"));
userStrict = new QueryProfileType(new ComponentId("userStrict"));
userStrict.setStrict(true);
registry.getTypeRegistry().register(testtype);
registry.getTypeRegistry().register(emptyInheritingTesttype);
registry.getTypeRegistry().register(testtypeStrict);
registry.getTypeRegistry().register(user);
registry.getTypeRegistry().register(userStrict);
addTypeFields(testtype, registry.getTypeRegistry());
testtype.addField(new FieldDescription("myUserQueryProfile", FieldType.fromString("query-profile:user", registry.getTypeRegistry())));
addTypeFields(testtypeStrict, registry.getTypeRegistry());
testtypeStrict.addField(new FieldDescription("myUserQueryProfile", FieldType.fromString("query-profile:userStrict", registry.getTypeRegistry())));
addUserFields(user, registry.getTypeRegistry());
addUserFields(userStrict, registry.getTypeRegistry());
}
private void addTypeFields(QueryProfileType type, QueryProfileTypeRegistry registry) {
type.addField(new FieldDescription("myString", FieldType.fromString("string", registry)), registry);
type.addField(new FieldDescription("myInteger", FieldType.fromString("integer", registry),"int"), registry);
type.addField(new FieldDescription("myLong", FieldType.fromString("long", registry)), registry);
type.addField(new FieldDescription("myFloat", FieldType.fromString("float", registry)), registry);
type.addField(new FieldDescription("myDouble", FieldType.fromString("double", registry)), registry);
type.addField(new FieldDescription("myBoolean", FieldType.fromString("boolean", registry)), registry);
type.addField(new FieldDescription("myBoolean", FieldType.fromString("boolean", registry)), registry);
type.addField(new FieldDescription("ranking.features.query(myTensor1)", FieldType.fromString("tensor(a{},b{})", registry)), registry);
type.addField(new FieldDescription("ranking.features.query(myTensor2)", FieldType.fromString("tensor(x[2],y[2])", registry)), registry);
type.addField(new FieldDescription("ranking.features.query(myTensor3)", FieldType.fromString("tensor<float>(x{})",registry)), registry);
type.addField(new FieldDescription("ranking.features.query(myTensor4)", FieldType.fromString("tensor<float>(x[5])",registry)), registry);
type.addField(new FieldDescription("myQuery", FieldType.fromString("query", registry)), registry);
type.addField(new FieldDescription("myQueryProfile", FieldType.fromString("query-profile", registry),"qp"), registry);
}
private void addUserFields(QueryProfileType user, QueryProfileTypeRegistry registry) {
user.addField(new FieldDescription("myUserString",FieldType.fromString("string",registry)), registry);
user.addField(new FieldDescription("myUserInteger",FieldType.fromString("integer",registry),"uint"), registry);
}
@Test
public void testTypedOfPrimitivesAssignmentNonStrict() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtype);
registry.register(profile);
profile.set("myString", "anyValue", registry);
profile.set("nontypedString", "anyValueToo", registry); // legal because this is not strict
assertWrongType(profile,"integer", "myInteger","notInteger");
assertWrongType(profile, "integer", "myInteger", "1.5");
profile.set("myInteger", 3, registry);
assertWrongType(profile,"long","myLong","notLong");
assertWrongType(profile, "long", "myLong", "1.5");
profile.set("myLong", 4000000000000L, registry);
assertWrongType(profile, "float", "myFloat", "notFloat");
profile.set("myFloat", 3.14f, registry);
assertWrongType(profile, "double", "myDouble", "notDouble");
profile.set("myDouble",2.18, registry);
profile.set("myBoolean",true, registry);
String tensorString1 = "{{a:a1, b:b1}:1.0, {a:a2, b:b1}:2.0}}";
profile.set("ranking.features.query(myTensor1)", tensorString1, registry);
String tensorString2 = "{{x:0, y:0}:1.0, {x:0, y:1}:2.0}}";
profile.set("ranking.features.query(myTensor2)", tensorString2, registry);
String tensorString3 = "{{x:x1}:1.0, {x:x2}:2.0}}";
profile.set("ranking.features.query(myTensor3)", tensorString3, registry);
profile.set("myQuery", "...", registry); // TODO
profile.set("myQueryProfile.anyString","value1", registry);
profile.set("myQueryProfile.anyDouble",8.76, registry);
profile.set("myUserQueryProfile.myUserString","value2", registry);
profile.set("myUserQueryProfile.anyString", "value3", registry); // Legal because user is not strict
assertWrongType(profile, "integer", "myUserQueryProfile.myUserInteger", "notInteger");
profile.set("myUserQueryProfile.uint",1337, registry); // Set using alias
profile.set("myUserQueryProfile.anyDouble", 9.13, registry); // Legal because user is not strict
CompiledQueryProfileRegistry cRegistry = registry.compile();
QueryProfileProperties properties = new QueryProfileProperties(cRegistry.findQueryProfile("test"));
assertEquals("anyValue", properties.get("myString"));
assertEquals("anyValueToo", properties.get("nontypedString"));
assertEquals(3, properties.get("myInteger"));
assertEquals(3, properties.get("Int"));
assertEquals(4000000000000L, properties.get("myLong"));
assertEquals(3.14f, properties.get("myFloat"));
assertEquals(2.18, properties.get("myDouble"));
assertEquals(true, properties.get("myBoolean"));
assertEquals(Tensor.from(tensorString1), properties.get("ranking.features.query(myTensor1)"));
assertEquals(Tensor.from("tensor(x[2],y[2])", tensorString2), properties.get("ranking.features.query(myTensor2)"));
assertEquals(Tensor.from("tensor<float>(x{})", tensorString3), properties.get("ranking.features.query(myTensor3)"));
// TODO: assertEquals(..., cprofile.get("myQuery"));
assertEquals("value1", properties.get("myQueryProfile.anyString"));
assertEquals("value1", properties.get("QP.anyString"));
assertEquals(8.76, properties.get("myQueryProfile.anyDouble"));
assertEquals(8.76, properties.get("qp.anyDouble"));
assertEquals("value2", properties.get("myUserQueryProfile.myUserString"));
assertEquals("value3", properties.get("myUserQueryProfile.anyString"));
assertEquals(1337, properties.get("myUserQueryProfile.myUserInteger"));
assertEquals(1337, properties.get("myUserQueryProfile.uint"));
assertEquals(9.13, properties.get("myUserQueryProfile.anyDouble"));
assertNull(properties.get("nonExisting"));
properties.set("INt", 51);
assertEquals(51, properties.get("InT"));
assertEquals(51, properties.get("myInteger"));
}
@Test
public void testTypedOfPrimitivesAssignmentStrict() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtypeStrict);
profile.set("myString", "anyValue", registry);
assertNotPermitted(profile, "nontypedString", "anyValueToo"); // Illegal because this is strict
assertWrongType(profile,"integer","myInteger","notInteger");
assertWrongType(profile, "integer", "myInteger", "1.5");
profile.set("myInteger", 3, registry);
assertWrongType(profile,"long","myLong","notLong");
assertWrongType(profile, "long", "myLong", "1.5");
profile.set("myLong", 4000000000000L, registry);
assertWrongType(profile, "float", "myFloat", "notFloat");
profile.set("myFloat", 3.14f, registry);
assertWrongType(profile, "double", "myDouble", "notDouble");
profile.set("myDouble",2.18, registry);
profile.set("myQueryProfile.anyString", "value1", registry);
profile.set("myQueryProfile.anyDouble", 8.76, registry);
profile.set("myUserQueryProfile.myUserString", "value2", registry);
assertNotPermitted(profile, "myUserQueryProfile.anyString", "value3"); // Illegal because this is strict
assertWrongType(profile, "integer", "myUserQueryProfile.myUserInteger", "notInteger");
profile.set("myUserQueryProfile.myUserInteger", 1337, registry);
assertNotPermitted(profile, "myUserQueryProfile.anyDouble", 9.13); // Illegal because this is strict
CompiledQueryProfile cprofile = profile.compile(null);
assertEquals("anyValue", cprofile.get("myString"));
assertNull(cprofile.get("nontypedString"));
assertEquals(3, cprofile.get("myInteger"));
assertEquals(4000000000000L, cprofile.get("myLong"));
assertEquals(3.14f, cprofile.get("myFloat"));
assertEquals(2.18, cprofile.get("myDouble"));
assertEquals("value1", cprofile.get("myQueryProfile.anyString"));
assertEquals(8.76, cprofile.get("myQueryProfile.anyDouble"));
assertEquals("value2", cprofile.get("myUserQueryProfile.myUserString"));
assertNull(cprofile.get("myUserQueryProfile.anyString"));
assertEquals(1337, cprofile.get("myUserQueryProfile.myUserInteger"));
assertNull(cprofile.get("myUserQueryProfile.anyDouble"));
}
/** Tests assigning a subprofile directly */
@Test
public void testTypedAssignmentOfQueryProfilesNonStrict() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtype);
QueryProfile map1=new QueryProfile("myMap1");
map1.set("key1", "value1", registry);
QueryProfile map2=new QueryProfile("myMap2");
map2.set("key2", "value2", registry);
QueryProfile myUser=new QueryProfile("myUser");
myUser.setType(user);
myUser.set("myUserString", "userValue1", registry);
myUser.set("myUserInteger", 442, registry);
assertWrongType(profile,"reference to a query profile","myQueryProfile","aString");
profile.set("myQueryProfile", map1, registry);
profile.set("someMap", map2, registry); // Legal because this is not strict
assertWrongType(profile, "reference to a query profile of type 'user'", "myUserQueryProfile", map1);
profile.set("myUserQueryProfile", myUser, registry);
CompiledQueryProfile cprofile = profile.compile(null);
assertEquals("value1", cprofile.get("myQueryProfile.key1"));
assertEquals("value2", cprofile.get("someMap.key2"));
assertEquals("userValue1", cprofile.get("myUserQueryProfile.myUserString"));
assertEquals(442, cprofile.get("myUserQueryProfile.myUserInteger"));
}
/** Tests assigning a subprofile directly */
@Test
public void testTypedAssignmentOfQueryProfilesStrict() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtypeStrict);
QueryProfile map1=new QueryProfile("myMap1");
map1.set("key1", "value1", registry);
QueryProfile map2=new QueryProfile("myMap2");
map2.set("key2", "value2", registry);
QueryProfile myUser = new QueryProfile("myUser");
myUser.setType(userStrict);
myUser.set("myUserString", "userValue1", registry);
myUser.set("myUserInteger", 442, registry);
assertWrongType(profile,"reference to a query profile","myQueryProfile","aString");
profile.set("myQueryProfile", map1, registry);
assertNotPermitted(profile,"someMap", map2);
assertWrongType(profile,"reference to a query profile of type 'userStrict'", "myUserQueryProfile", map1);
profile.set("myUserQueryProfile", myUser, registry);
CompiledQueryProfile cprofile = profile.compile(null);
assertEquals("value1", cprofile.get("myQueryProfile.key1"));
assertNull(cprofile.get("someMap.key2"));
assertEquals("userValue1", cprofile.get("myUserQueryProfile.myUserString"));
assertEquals(442, cprofile.get("myUserQueryProfile.myUserInteger"));
}
/** Tests assigning a subprofile as an id string */
@Test
public void testTypedAssignmentOfQueryProfileReferencesNonStrict() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtype);
QueryProfile map1 = new QueryProfile("myMap1");
map1.set("key1", "value1", registry);
QueryProfile map2 = new QueryProfile("myMap2");
map2.set("key2", "value2", registry);
QueryProfile myUser = new QueryProfile("myUser");
myUser.setType(user);
myUser.set("myUserString", "userValue1", registry);
myUser.set("myUserInteger", 442, registry);
registry.register(profile);
registry.register(map1);
registry.register(map2);
registry.register(myUser);
assertWrongType(profile,"reference to a query profile", "myQueryProfile", "aString");
registry.register(map1);
profile.set("myQueryProfile", "myMap1", registry);
registry.register(map2);
profile.set("someMap", "myMap2", registry); // NOTICE: Will set as a string because we cannot know this is a reference
assertWrongType(profile, "reference to a query profile of type 'user'", "myUserQueryProfile", "myMap1");
registry.register(myUser);
profile.set("myUserQueryProfile","myUser", registry);
CompiledQueryProfileRegistry cRegistry = registry.compile();
CompiledQueryProfile cprofile = cRegistry.getComponent("test");
assertEquals("value1", cprofile.get("myQueryProfile.key1"));
assertEquals("myMap2", cprofile.get("someMap"));
assertNull("Asking for an value which cannot be completely resolved returns null", cprofile.get("someMap.key2"));
assertEquals("userValue1", cprofile.get("myUserQueryProfile.myUserString"));
assertEquals(442, cprofile.get("myUserQueryProfile.myUserInteger"));
}
/**
* Tests overriding a subprofile as an id string through the query.
* Here there exists a user profile already, and then a new one is overwritten
*/
@Test
public void testTypedOverridingOfQueryProfileReferencesNonStrictThroughQuery() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtype);
QueryProfile myUser=new QueryProfile("myUser");
myUser.setType(user);
myUser.set("myUserString", "userValue1", registry);
myUser.set("myUserInteger", 442, registry);
QueryProfile newUser = new QueryProfile("newUser");
newUser.setType(user);
newUser.set("myUserString", "newUserValue1", registry);
newUser.set("myUserInteger", 845, registry);
QueryProfileRegistry registry = new QueryProfileRegistry();
registry.register(profile);
registry.register(myUser);
registry.register(newUser);
CompiledQueryProfileRegistry cRegistry = registry.compile();
CompiledQueryProfile cprofile = cRegistry.getComponent("test");
Query query = new Query(HttpRequest.createTestRequest("?myUserQueryProfile=newUser",
com.yahoo.jdisc.http.HttpRequest.Method.GET),
cprofile);
assertEquals(0, query.errors().size());
assertEquals("newUserValue1", query.properties().get("myUserQueryProfile.myUserString"));
assertEquals(845, query.properties().get("myUserQueryProfile.myUserInteger"));
}
/**
* Tests overriding a subprofile as an id string through the query.
* Here no user profile is set before it is assigned in the query
*/
@Test
public void testTypedAssignmentOfQueryProfileReferencesNonStrictThroughQuery() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtype);
QueryProfile newUser = new QueryProfile("newUser");
newUser.setType(user);
newUser.set("myUserString", "newUserValue1", registry);
newUser.set("myUserInteger", 845, registry);
registry.register(profile);
registry.register(newUser);
CompiledQueryProfileRegistry cRegistry = registry.compile();
CompiledQueryProfile cprofile = cRegistry.getComponent("test");
Query query = new Query(HttpRequest.createTestRequest("?myUserQueryProfile=newUser",
com.yahoo.jdisc.http.HttpRequest.Method.GET),
cprofile);
assertEquals(0, query.errors().size());
assertEquals("newUserValue1", query.properties().get("myUserQueryProfile.myUserString"));
assertEquals(845, query.properties().get("myUserQueryProfile.myUserInteger"));
}
/**
* Tests overriding a subprofile as an id string through the query.
* Here no user profile is set before it is assigned in the query
*/
@Test
public void testTypedAssignmentOfQueryProfileReferencesStrictThroughQuery() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtypeStrict);
QueryProfile newUser = new QueryProfile("newUser");
newUser.setType(userStrict);
newUser.set("myUserString", "newUserValue1", registry);
newUser.set("myUserInteger", 845, registry);
registry.register(profile);
registry.register(newUser);
CompiledQueryProfileRegistry cRegistry = registry.compile();
Query query = new Query(HttpRequest.createTestRequest("?myUserQueryProfile=newUser", com.yahoo.jdisc.http.HttpRequest.Method.GET), cRegistry.getComponent("test"));
assertEquals(0, query.errors().size());
assertEquals("newUserValue1", query.properties().get("myUserQueryProfile.myUserString"));
assertEquals(845, query.properties().get("myUserQueryProfile.myUserInteger"));
try {
query.properties().set("myUserQueryProfile.someKey", "value");
fail("Should not be allowed to set this");
}
catch (IllegalArgumentException e) {
assertEquals("Could not set 'myUserQueryProfile.someKey' to 'value': 'someKey' is not declared in query profile type 'userStrict', and the type is strict",
Exceptions.toMessageString(e));
}
}
@Test
public void testTensorRankFeatureInRequest() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtype);
registry.register(profile);
CompiledQueryProfileRegistry cRegistry = registry.compile();
String tensorString = "{{a:a1, b:b1}:1.0, {a:a2, b:b1}:2.0}}";
Query query = new Query(HttpRequest.createTestRequest("?" + urlEncode("ranking.features.query(myTensor1)") +
"=" + urlEncode(tensorString),
com.yahoo.jdisc.http.HttpRequest.Method.GET),
cRegistry.getComponent("test"));
assertEquals(0, query.errors().size());
assertEquals(Tensor.from(tensorString), query.properties().get("ranking.features.query(myTensor1)"));
assertEquals(Tensor.from(tensorString), query.getRanking().getFeatures().getTensor("query(myTensor1)").get());
}
// Expected to work exactly as testTensorRankFeatureInRequest
@Test
public void testTensorRankFeatureInRequestWithInheritedQueryProfileType() {
QueryProfile profile = new QueryProfile("test");
profile.setType(emptyInheritingTesttype);
registry.register(profile);
CompiledQueryProfileRegistry cRegistry = registry.compile();
String tensorString = "{{a:a1, b:b1}:1.0, {a:a2, b:b1}:2.0}}";
Query query = new Query(HttpRequest.createTestRequest("?" + urlEncode("ranking.features.query(myTensor1)") +
"=" + urlEncode(tensorString),
com.yahoo.jdisc.http.HttpRequest.Method.GET),
cRegistry.getComponent("test"));
assertEquals(0, query.errors().size());
assertEquals(Tensor.from(tensorString), query.properties().get("ranking.features.query(myTensor1)"));
assertEquals(Tensor.from(tensorString), query.getRanking().getFeatures().getTensor("query(myTensor1)").get());
}
@Test
public void testUnembeddedTensorRankFeatureInRequest() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtype);
registry.register(profile);
CompiledQueryProfileRegistry cRegistry = registry.compile();
String textToEmbed = "text to embed into a tensor";
String destinationFeature = "query(myTensor4)";
Tensor expectedTensor = Tensor.from("tensor<float>(x[5]):[3,7,4,0,0]]");
Query query1 = new Query.Builder().setRequest(HttpRequest.createTestRequest("?" + urlEncode("ranking.features." + destinationFeature) +
"=" + urlEncode("embed(" + textToEmbed + ")"),
com.yahoo.jdisc.http.HttpRequest.Method.GET))
.setQueryProfile(cRegistry.getComponent("test"))
.setEmbedder(new MockEmbedder(textToEmbed,
Language.UNKNOWN,
destinationFeature,
expectedTensor))
.build();
assertEquals(0, query1.errors().size());
assertEquals(expectedTensor, query1.properties().get("ranking.features.query(myTensor4)"));
assertEquals(expectedTensor, query1.getRanking().getFeatures().getTensor("query(myTensor4)").get());
// Explicit language
Query query2 = new Query.Builder().setRequest(HttpRequest.createTestRequest("?" + urlEncode("ranking.features." + destinationFeature) +
"=" + urlEncode("embed(" + textToEmbed + ")") +
"&language=en",
com.yahoo.jdisc.http.HttpRequest.Method.GET))
.setQueryProfile(cRegistry.getComponent("test"))
.setEmbedder(new MockEmbedder(textToEmbed,
Language.ENGLISH,
destinationFeature,
expectedTensor))
.build();
assertEquals(0, query2.errors().size());
assertEquals(expectedTensor, query2.properties().get("ranking.features.query(myTensor4)"));
assertEquals(expectedTensor, query2.getRanking().getFeatures().getTensor("query(myTensor4)").get());
}
private String urlEncode(String s) {
return URLEncoder.encode(s, StandardCharsets.UTF_8);
}
@Test
public void testIllegalStrictAssignmentFromRequest() {
QueryProfile profile = new QueryProfile("test");
profile.setType(testtypeStrict);
QueryProfile newUser = new QueryProfile("newUser");
newUser.setType(userStrict);
profile.set("myUserQueryProfile", newUser, registry);
try {
new Query(HttpRequest.createTestRequest("?myUserQueryProfile.nondeclared=someValue",
com.yahoo.jdisc.http.HttpRequest.Method.GET),
profile.compile(null));
fail("Above statement should throw");
} catch (IllegalArgumentException e) {
// As expected.
assertTrue(Exceptions.toMessageString(e).contains(
"Could not set 'myUserQueryProfile.nondeclared' to 'someValue': 'nondeclared' is not declared in query profile type 'userStrict', and the type is strict"));
}
}
/**
* Tests overriding a subprofile as an id string through the query.
* Here there exists a user profile already, and then a new one is overwritten.
* The whole thing is accessed through a two levels of nontyped top-level profiles
*/
@Test
public void testTypedOverridingOfQueryProfileReferencesNonStrictThroughQueryNestedInAnUntypedProfile() {
QueryProfile topMap = new QueryProfile("topMap");
QueryProfile subMap = new QueryProfile("topSubMap");
topMap.set("subMap", subMap, registry);
QueryProfile test = new QueryProfile("test");
test.setType(testtype);
subMap.set("typeProfile", test, registry);
QueryProfile myUser = new QueryProfile("myUser");
myUser.setType(user);
myUser.set("myUserString", "userValue1", registry);
myUser.set("myUserInteger", 442, registry);
test.set("myUserQueryProfile", myUser, registry);
QueryProfile newUser = new QueryProfile("newUser");
newUser.setType(user);
newUser.set("myUserString", "newUserValue1", registry);
newUser.set("myUserInteger", 845, registry);
registry.register(topMap);
registry.register(subMap);
registry.register(test);
registry.register(myUser);
registry.register(newUser);
CompiledQueryProfileRegistry cRegistry = registry.compile();
Query query = new Query(HttpRequest.createTestRequest("?subMap.typeProfile.myUserQueryProfile=newUser",
com.yahoo.jdisc.http.HttpRequest.Method.GET),
cRegistry.getComponent("topMap"));
assertEquals(0, query.errors().size());
assertEquals("newUserValue1", query.properties().get("subMap.typeProfile.myUserQueryProfile.myUserString"));
assertEquals(845, query.properties().get("subMap.typeProfile.myUserQueryProfile.myUserInteger"));
}
/**
* Same as previous test but using the untyped myQueryProfile reference instead of the typed myUserQueryProfile
*/
@Test
public void testAnonTypedOverridingOfQueryProfileReferencesNonStrictThroughQueryNestedInAnUntypedProfile() {
QueryProfile topMap = new QueryProfile("topMap");
QueryProfile subMap = new QueryProfile("topSubMap");
topMap.set("subMap", subMap, registry);
QueryProfile test = new QueryProfile("test");
test.setType(testtype);
subMap.set("typeProfile", test, registry);
QueryProfile myUser = new QueryProfile("myUser");
myUser.setType(user);
myUser.set("myUserString", "userValue1", registry);
myUser.set("myUserInteger", 442, registry);
test.set("myQueryProfile", myUser, registry);
QueryProfile newUser = new QueryProfile("newUser");
newUser.setType(user);
newUser.set("myUserString", "newUserValue1", registry);
newUser.set("myUserInteger", 845, registry);
registry.register(topMap);
registry.register(subMap);
registry.register(test);
registry.register(myUser);
registry.register(newUser);
CompiledQueryProfileRegistry cRegistry = registry.compile();
Query query = new Query(HttpRequest.createTestRequest("?subMap.typeProfile.myQueryProfile=newUser", com.yahoo.jdisc.http.HttpRequest.Method.GET), cRegistry.getComponent("topMap"));
assertEquals(0, query.errors().size());
assertEquals("newUserValue1",query.properties().get("subMap.typeProfile.myQueryProfile.myUserString"));
assertEquals(845,query.properties().get("subMap.typeProfile.myQueryProfile.myUserInteger"));
}
/**
* Tests setting a illegal value in a strict profile nested under untyped maps
*/
@Test
public void testSettingValueInStrictTypeNestedUnderUntypedMaps() {
QueryProfile topMap = new QueryProfile("topMap");
QueryProfile subMap = new QueryProfile("topSubMap");
topMap.set("subMap", subMap, registry);
QueryProfile test = new QueryProfile("test");
test.setType(testtypeStrict);
subMap.set("typeProfile", test, registry);
registry.register(topMap);
registry.register(subMap);
registry.register(test);
CompiledQueryProfileRegistry cRegistry = registry.compile();
try {
new Query(
HttpRequest.createTestRequest("?subMap.typeProfile.someValue=value",
com.yahoo.jdisc.http.HttpRequest.Method.GET),
cRegistry.getComponent("topMap"));
fail("Above statement should throw");
} catch (IllegalArgumentException e) {
// As expected.
assertTrue(Exceptions.toMessageString(e).contains(
"Could not set 'subMap.typeProfile.someValue' to 'value': 'someValue' is not declared in query profile type 'testtypeStrict', and the type is strict"));
}
}
/**
* Tests overriding a subprofile as an id string through the query.
* Here, no user profile is set before it is assigned in the query
* The whole thing is accessed through a two levels of nontyped top-level profiles
*/
@Test
public void testTypedSettingOfQueryProfileReferencesNonStrictThroughQueryNestedInAnUntypedProfile() {
QueryProfile topMap = new QueryProfile("topMap");
QueryProfile subMap = new QueryProfile("topSubMap");
topMap.set("subMap",subMap, registry);
QueryProfile test = new QueryProfile("test");
test.setType(testtype);
subMap.set("typeProfile",test, registry);
QueryProfile newUser = new QueryProfile("newUser");
newUser.setType(user);
newUser.set("myUserString", "newUserValue1", registry);
newUser.set("myUserInteger", 845, registry);
registry.register(topMap);
registry.register(subMap);
registry.register(test);
registry.register(newUser);
CompiledQueryProfileRegistry cRegistry = registry.compile();
Query query = new Query(HttpRequest.createTestRequest("?subMap.typeProfile.myUserQueryProfile=newUser",
com.yahoo.jdisc.http.HttpRequest.Method.GET),
cRegistry.getComponent("topMap"));
assertEquals(0, query.errors().size());
assertEquals("newUserValue1", query.properties().get("subMap.typeProfile.myUserQueryProfile.myUserString"));
assertEquals(845, query.properties().get("subMap.typeProfile.myUserQueryProfile.myUserInteger"));
}
@Test
public void testNestedTypeName() {
ComponentId.resetGlobalCountersForTests();
QueryProfileRegistry registry = new QueryProfileRegistry();
QueryProfileType type = new QueryProfileType("testType");
registry.getTypeRegistry().register(type);
type.addField(new FieldDescription("ranking.features.query(embedding_profile)",
"tensor<float>(model{},x[128])"),
registry.getTypeRegistry());
QueryProfile test = new QueryProfile("test");
registry.register(test);
test.setType(type);
CompiledQueryProfileRegistry cRegistry = registry.compile();
Query query = new Query("?query=foo", cRegistry.getComponent("test"));
// With a prefix we're not in the built-in type space
query.properties().set("prefix.ranking.foo", 0.1);
assertEquals(0.1, query.properties().get("prefix.ranking.foo"));
}
@Test
public void testNestedTypeNameUsingBuiltInTypes() {
ComponentId.resetGlobalCountersForTests();
QueryProfileRegistry registry = new QueryProfileRegistry();
QueryProfileType type = new QueryProfileType("testType");
type.inherited().add(Query.getArgumentType()); // Include native type checking
registry.getTypeRegistry().register(type);
type.addField(new FieldDescription("ranking.features.query(embedding_profile)",
"tensor<float>(model{},x[128])"),
registry.getTypeRegistry());
QueryProfile test = new QueryProfile("test");
registry.register(test);
test.setType(type);
CompiledQueryProfileRegistry cRegistry = registry.compile();
Query query = new Query("?query=foo", cRegistry.getComponent("test"));
// Cannot set a property in a strict built-in type
try {
query.properties().set("ranking.foo", 0.1);
fail("Expected exception");
}
catch (IllegalArgumentException e) {
assertEquals("'foo' is not a valid property in 'ranking'. See the query api for valid keys starting by 'ranking'.",
e.getCause().getMessage());
}
// With a prefix we're not in the built-in type space
query.properties().set("prefix.ranking.foo", 0.1);
assertEquals(0.1, query.properties().get("prefix.ranking.foo"));
}
private void assertWrongType(QueryProfile profile,String typeName,String name,Object value) {
try {
profile.set(name,value, registry);
fail("Should fail setting " + name + " to " + value);
}
catch (IllegalArgumentException e) {
assertEquals("Could not set '" + name + "' to '" + value + "': '" + value + "' is not a " + typeName,
Exceptions.toMessageString(e));
}
}
private void assertNotPermitted(QueryProfile profile,String name,Object value) {
String localName = new CompoundName(name).last();
try {
profile.set(name, value, registry);
fail("Should fail setting " + name + " to " + value);
}
catch (IllegalArgumentException e) {
assertTrue(Exceptions.toMessageString(e).startsWith("Could not set '" + name + "' to '" + value + "': '" + localName + "' is not declared"));
}
}
private static final class MockEmbedder implements Embedder {
private final String expectedText;
private final Language expectedLanguage;
private final String expectedDestination;
private final Tensor tensorToReturn;
public MockEmbedder(String expectedText,
Language expectedLanguage,
String expectedDestination,
Tensor tensorToReturn) {
this.expectedText = expectedText;
this.expectedLanguage = expectedLanguage;
this.expectedDestination = expectedDestination;
this.tensorToReturn = tensorToReturn;
}
@Override
public List<Integer> embed(String text, Embedder.Context context) {
fail("Unexpected call");
return null;
}
@Override
public Tensor embed(String text, Embedder.Context context, TensorType tensorType) {
assertEquals(expectedText, text);
assertEquals(expectedLanguage, context.getLanguage());
assertEquals(expectedDestination, context.getDestination());
assertEquals(tensorToReturn.type(), tensorType);
return tensorToReturn;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.segment;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Writer;
import java.lang.invoke.MethodHandles;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapFileOutputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.nutch.crawl.CrawlDatum;
import org.apache.nutch.crawl.NutchWritable;
import org.apache.nutch.parse.ParseData;
import org.apache.nutch.parse.ParseText;
import org.apache.nutch.protocol.Content;
import org.apache.nutch.util.HadoopFSUtil;
import org.apache.nutch.util.NutchConfiguration;
import org.apache.nutch.util.NutchJob;
/** Dump the content of a segment. */
public class SegmentReader extends Configured implements Tool,
Reducer<Text, NutchWritable, Text, Text> {
private static final Logger LOG = LoggerFactory
.getLogger(MethodHandles.lookup().lookupClass());
long recNo = 0L;
private boolean co, fe, ge, pa, pd, pt;
private FileSystem fs;
public static class InputCompatMapper extends MapReduceBase implements
Mapper<WritableComparable<?>, Writable, Text, NutchWritable> {
private Text newKey = new Text();
public void map(WritableComparable<?> key, Writable value,
OutputCollector<Text, NutchWritable> collector, Reporter reporter)
throws IOException {
// convert on the fly from old formats with UTF8 keys.
// UTF8 deprecated and replaced by Text.
if (key instanceof Text) {
newKey.set(key.toString());
key = newKey;
}
collector.collect((Text) key, new NutchWritable(value));
}
}
/** Implements a text output format */
public static class TextOutputFormat extends
FileOutputFormat<WritableComparable<?>, Writable> {
public RecordWriter<WritableComparable<?>, Writable> getRecordWriter(
final FileSystem fs, JobConf job, String name,
final Progressable progress) throws IOException {
final Path segmentDumpFile = new Path(
FileOutputFormat.getOutputPath(job), name);
// Get the old copy out of the way
if (fs.exists(segmentDumpFile))
fs.delete(segmentDumpFile, true);
final PrintStream printStream = new PrintStream(
fs.create(segmentDumpFile));
return new RecordWriter<WritableComparable<?>, Writable>() {
public synchronized void write(WritableComparable<?> key, Writable value)
throws IOException {
printStream.println(value);
}
public synchronized void close(Reporter reporter) throws IOException {
printStream.close();
}
};
}
}
public SegmentReader() {
super(null);
}
public SegmentReader(Configuration conf, boolean co, boolean fe, boolean ge,
boolean pa, boolean pd, boolean pt) {
super(conf);
this.co = co;
this.fe = fe;
this.ge = ge;
this.pa = pa;
this.pd = pd;
this.pt = pt;
try {
this.fs = FileSystem.get(getConf());
} catch (IOException e) {
LOG.error("IOException:", e);
}
}
public void configure(JobConf job) {
setConf(job);
this.co = getConf().getBoolean("segment.reader.co", true);
this.fe = getConf().getBoolean("segment.reader.fe", true);
this.ge = getConf().getBoolean("segment.reader.ge", true);
this.pa = getConf().getBoolean("segment.reader.pa", true);
this.pd = getConf().getBoolean("segment.reader.pd", true);
this.pt = getConf().getBoolean("segment.reader.pt", true);
try {
this.fs = FileSystem.get(getConf());
} catch (IOException e) {
LOG.error("IOException:", e);
}
}
private JobConf createJobConf() {
JobConf job = new NutchJob(getConf());
job.setBoolean("segment.reader.co", this.co);
job.setBoolean("segment.reader.fe", this.fe);
job.setBoolean("segment.reader.ge", this.ge);
job.setBoolean("segment.reader.pa", this.pa);
job.setBoolean("segment.reader.pd", this.pd);
job.setBoolean("segment.reader.pt", this.pt);
return job;
}
public void close() {
}
public void reduce(Text key, Iterator<NutchWritable> values,
OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
StringBuffer dump = new StringBuffer();
dump.append("\nRecno:: ").append(recNo++).append("\n");
dump.append("URL:: " + key.toString() + "\n");
while (values.hasNext()) {
Writable value = values.next().get(); // unwrap
if (value instanceof CrawlDatum) {
dump.append("\nCrawlDatum::\n").append(((CrawlDatum) value).toString());
} else if (value instanceof Content) {
dump.append("\nContent::\n").append(((Content) value).toString());
} else if (value instanceof ParseData) {
dump.append("\nParseData::\n").append(((ParseData) value).toString());
} else if (value instanceof ParseText) {
dump.append("\nParseText::\n").append(((ParseText) value).toString());
} else if (LOG.isWarnEnabled()) {
LOG.warn("Unrecognized type: " + value.getClass());
}
}
output.collect(key, new Text(dump.toString()));
}
public void dump(Path segment, Path output) throws IOException {
if (LOG.isInfoEnabled()) {
LOG.info("SegmentReader: dump segment: " + segment);
}
JobConf job = createJobConf();
job.setJobName("read " + segment);
if (ge)
FileInputFormat.addInputPath(job, new Path(segment,
CrawlDatum.GENERATE_DIR_NAME));
if (fe)
FileInputFormat.addInputPath(job, new Path(segment,
CrawlDatum.FETCH_DIR_NAME));
if (pa)
FileInputFormat.addInputPath(job, new Path(segment,
CrawlDatum.PARSE_DIR_NAME));
if (co)
FileInputFormat.addInputPath(job, new Path(segment, Content.DIR_NAME));
if (pd)
FileInputFormat.addInputPath(job, new Path(segment, ParseData.DIR_NAME));
if (pt)
FileInputFormat.addInputPath(job, new Path(segment, ParseText.DIR_NAME));
job.setInputFormat(SequenceFileInputFormat.class);
job.setMapperClass(InputCompatMapper.class);
job.setReducerClass(SegmentReader.class);
Path tempDir = new Path(job.get("hadoop.tmp.dir", "/tmp") + "/segread-"
+ new java.util.Random().nextInt());
fs.delete(tempDir, true);
FileOutputFormat.setOutputPath(job, tempDir);
job.setOutputFormat(TextOutputFormat.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NutchWritable.class);
JobClient.runJob(job);
// concatenate the output
Path dumpFile = new Path(output, job.get("segment.dump.dir", "dump"));
// remove the old file
fs.delete(dumpFile, true);
FileStatus[] fstats = fs.listStatus(tempDir,
HadoopFSUtil.getPassAllFilter());
Path[] files = HadoopFSUtil.getPaths(fstats);
PrintWriter writer = null;
int currentRecordNumber = 0;
if (files.length > 0) {
writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(
fs.create(dumpFile))));
try {
for (int i = 0; i < files.length; i++) {
Path partFile = files[i];
try {
currentRecordNumber = append(fs, job, partFile, writer,
currentRecordNumber);
} catch (IOException exception) {
if (LOG.isWarnEnabled()) {
LOG.warn("Couldn't copy the content of " + partFile.toString()
+ " into " + dumpFile.toString());
LOG.warn(exception.getMessage());
}
}
}
} finally {
writer.close();
}
}
fs.delete(tempDir, true);
if (LOG.isInfoEnabled()) {
LOG.info("SegmentReader: done");
}
}
/** Appends two files and updates the Recno counter */
private int append(FileSystem fs, Configuration conf, Path src,
PrintWriter writer, int currentRecordNumber) throws IOException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(
fs.open(src)))) {
String line = reader.readLine();
while (line != null) {
if (line.startsWith("Recno:: ")) {
line = "Recno:: " + currentRecordNumber++;
}
writer.println(line);
line = reader.readLine();
}
return currentRecordNumber;
}
}
private static final String[][] keys = new String[][] {
{ "co", "Content::\n" }, { "ge", "Crawl Generate::\n" },
{ "fe", "Crawl Fetch::\n" }, { "pa", "Crawl Parse::\n" },
{ "pd", "ParseData::\n" }, { "pt", "ParseText::\n" } };
public void get(final Path segment, final Text key, Writer writer,
final Map<String, List<Writable>> results) throws Exception {
LOG.info("SegmentReader: get '" + key + "'");
ArrayList<Thread> threads = new ArrayList<>();
if (co)
threads.add(new Thread() {
public void run() {
try {
List<Writable> res = getMapRecords(new Path(segment,
Content.DIR_NAME), key);
results.put("co", res);
} catch (Exception e) {
LOG.error("Exception:", e);
}
}
});
if (fe)
threads.add(new Thread() {
public void run() {
try {
List<Writable> res = getMapRecords(new Path(segment,
CrawlDatum.FETCH_DIR_NAME), key);
results.put("fe", res);
} catch (Exception e) {
LOG.error("Exception:", e);
}
}
});
if (ge)
threads.add(new Thread() {
public void run() {
try {
List<Writable> res = getSeqRecords(new Path(segment,
CrawlDatum.GENERATE_DIR_NAME), key);
results.put("ge", res);
} catch (Exception e) {
LOG.error("Exception:", e);
}
}
});
if (pa)
threads.add(new Thread() {
public void run() {
try {
List<Writable> res = getSeqRecords(new Path(segment,
CrawlDatum.PARSE_DIR_NAME), key);
results.put("pa", res);
} catch (Exception e) {
LOG.error("Exception:", e);
}
}
});
if (pd)
threads.add(new Thread() {
public void run() {
try {
List<Writable> res = getMapRecords(new Path(segment,
ParseData.DIR_NAME), key);
results.put("pd", res);
} catch (Exception e) {
LOG.error("Exception:", e);
}
}
});
if (pt)
threads.add(new Thread() {
public void run() {
try {
List<Writable> res = getMapRecords(new Path(segment,
ParseText.DIR_NAME), key);
results.put("pt", res);
} catch (Exception e) {
LOG.error("Exception:", e);
}
}
});
Iterator<Thread> it = threads.iterator();
while (it.hasNext())
it.next().start();
int cnt;
do {
cnt = 0;
try {
Thread.sleep(5000);
} catch (Exception e) {
}
;
it = threads.iterator();
while (it.hasNext()) {
if (it.next().isAlive())
cnt++;
}
if ((cnt > 0) && (LOG.isDebugEnabled())) {
LOG.debug("(" + cnt + " to retrieve)");
}
} while (cnt > 0);
for (int i = 0; i < keys.length; i++) {
List<Writable> res = results.get(keys[i][0]);
if (res != null && res.size() > 0) {
for (int k = 0; k < res.size(); k++) {
writer.write(keys[i][1]);
writer.write(res.get(k) + "\n");
}
}
writer.flush();
}
}
private List<Writable> getMapRecords(Path dir, Text key) throws Exception {
MapFile.Reader[] readers = MapFileOutputFormat.getReaders(fs, dir,
getConf());
ArrayList<Writable> res = new ArrayList<>();
Class<?> keyClass = readers[0].getKeyClass();
Class<?> valueClass = readers[0].getValueClass();
if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
throw new IOException("Incompatible key (" + keyClass.getName() + ")");
Writable value = (Writable) valueClass.newInstance();
// we don't know the partitioning schema
for (int i = 0; i < readers.length; i++) {
if (readers[i].get(key, value) != null) {
res.add(value);
value = (Writable) valueClass.newInstance();
Text aKey = (Text) keyClass.newInstance();
while (readers[i].next(aKey, value) && aKey.equals(key)) {
res.add(value);
value = (Writable) valueClass.newInstance();
}
}
readers[i].close();
}
return res;
}
private List<Writable> getSeqRecords(Path dir, Text key) throws Exception {
SequenceFile.Reader[] readers = SequenceFileOutputFormat.getReaders(
getConf(), dir);
ArrayList<Writable> res = new ArrayList<>();
Class<?> keyClass = readers[0].getKeyClass();
Class<?> valueClass = readers[0].getValueClass();
if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
throw new IOException("Incompatible key (" + keyClass.getName() + ")");
Writable aKey = (Writable) keyClass.newInstance();
Writable value = (Writable) valueClass.newInstance();
for (int i = 0; i < readers.length; i++) {
while (readers[i].next(aKey, value)) {
if (aKey.equals(key)) {
res.add(value);
value = (Writable) valueClass.newInstance();
}
}
readers[i].close();
}
return res;
}
public static class SegmentReaderStats {
public long start = -1L;
public long end = -1L;
public long generated = -1L;
public long fetched = -1L;
public long fetchErrors = -1L;
public long parsed = -1L;
public long parseErrors = -1L;
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
public void list(List<Path> dirs, Writer writer) throws Exception {
writer
.write("NAME\t\tGENERATED\tFETCHER START\t\tFETCHER END\t\tFETCHED\tPARSED\n");
for (int i = 0; i < dirs.size(); i++) {
Path dir = dirs.get(i);
SegmentReaderStats stats = new SegmentReaderStats();
getStats(dir, stats);
writer.write(dir.getName() + "\t");
if (stats.generated == -1)
writer.write("?");
else
writer.write(stats.generated + "");
writer.write("\t\t");
if (stats.start == -1)
writer.write("?\t");
else
writer.write(sdf.format(new Date(stats.start)));
writer.write("\t");
if (stats.end == -1)
writer.write("?");
else
writer.write(sdf.format(new Date(stats.end)));
writer.write("\t");
if (stats.fetched == -1)
writer.write("?");
else
writer.write(stats.fetched + "");
writer.write("\t");
if (stats.parsed == -1)
writer.write("?");
else
writer.write(stats.parsed + "");
writer.write("\n");
writer.flush();
}
}
public void getStats(Path segment, final SegmentReaderStats stats)
throws Exception {
long cnt = 0L;
Text key = new Text();
if (ge) {
SequenceFile.Reader[] readers = SequenceFileOutputFormat.getReaders(
getConf(), new Path(segment, CrawlDatum.GENERATE_DIR_NAME));
for (int i = 0; i < readers.length; i++) {
while (readers[i].next(key))
cnt++;
readers[i].close();
}
stats.generated = cnt;
}
if (fe) {
Path fetchDir = new Path(segment, CrawlDatum.FETCH_DIR_NAME);
if (fs.exists(fetchDir) && fs.getFileStatus(fetchDir).isDirectory()) {
cnt = 0L;
long start = Long.MAX_VALUE;
long end = Long.MIN_VALUE;
CrawlDatum value = new CrawlDatum();
MapFile.Reader[] mreaders = MapFileOutputFormat.getReaders(fs, fetchDir,
getConf());
for (int i = 0; i < mreaders.length; i++) {
while (mreaders[i].next(key, value)) {
cnt++;
if (value.getFetchTime() < start)
start = value.getFetchTime();
if (value.getFetchTime() > end)
end = value.getFetchTime();
}
mreaders[i].close();
}
stats.start = start;
stats.end = end;
stats.fetched = cnt;
}
}
if (pd) {
Path parseDir = new Path(segment, ParseData.DIR_NAME);
if (fs.exists(parseDir) && fs.getFileStatus(parseDir).isDirectory()) {
cnt = 0L;
long errors = 0L;
ParseData value = new ParseData();
MapFile.Reader[] mreaders = MapFileOutputFormat.getReaders(fs, parseDir,
getConf());
for (int i = 0; i < mreaders.length; i++) {
while (mreaders[i].next(key, value)) {
cnt++;
if (!value.getStatus().isSuccess())
errors++;
}
mreaders[i].close();
}
stats.parsed = cnt;
stats.parseErrors = errors;
}
}
}
private static final int MODE_DUMP = 0;
private static final int MODE_LIST = 1;
private static final int MODE_GET = 2;
public int run(String[] args) throws Exception {
if (args.length < 2) {
usage();
return -1;
}
int mode = -1;
if (args[0].equals("-dump"))
mode = MODE_DUMP;
else if (args[0].equals("-list"))
mode = MODE_LIST;
else if (args[0].equals("-get"))
mode = MODE_GET;
boolean co = true;
boolean fe = true;
boolean ge = true;
boolean pa = true;
boolean pd = true;
boolean pt = true;
// collect general options
for (int i = 1; i < args.length; i++) {
if (args[i].equals("-nocontent")) {
co = false;
args[i] = null;
} else if (args[i].equals("-nofetch")) {
fe = false;
args[i] = null;
} else if (args[i].equals("-nogenerate")) {
ge = false;
args[i] = null;
} else if (args[i].equals("-noparse")) {
pa = false;
args[i] = null;
} else if (args[i].equals("-noparsedata")) {
pd = false;
args[i] = null;
} else if (args[i].equals("-noparsetext")) {
pt = false;
args[i] = null;
}
}
Configuration conf = NutchConfiguration.create();
final FileSystem fs = FileSystem.get(conf);
SegmentReader segmentReader = new SegmentReader(conf, co, fe, ge, pa, pd,
pt);
// collect required args
switch (mode) {
case MODE_DUMP:
this.co = co;
this.fe = fe;
this.ge = ge;
this.pa = pa;
this.pd = pd;
this.pt = pt;
try {
this.fs = FileSystem.get(getConf());
} catch (IOException e) {
LOG.error("IOException:", e);
}
String input = args[1];
if (input == null) {
System.err.println("Missing required argument: <segment_dir>");
usage();
return -1;
}
String output = args.length > 2 ? args[2] : null;
if (output == null) {
System.err.println("Missing required argument: <output>");
usage();
return -1;
}
dump(new Path(input), new Path(output));
return 0;
case MODE_LIST:
ArrayList<Path> dirs = new ArrayList<>();
for (int i = 1; i < args.length; i++) {
if (args[i] == null)
continue;
if (args[i].equals("-dir")) {
Path dir = new Path(args[++i]);
FileStatus[] fstats = fs.listStatus(dir,
HadoopFSUtil.getPassDirectoriesFilter(fs));
Path[] files = HadoopFSUtil.getPaths(fstats);
if (files != null && files.length > 0) {
dirs.addAll(Arrays.asList(files));
}
} else
dirs.add(new Path(args[i]));
}
segmentReader.list(dirs, new OutputStreamWriter(System.out, "UTF-8"));
return 0;
case MODE_GET:
input = args[1];
if (input == null) {
System.err.println("Missing required argument: <segment_dir>");
usage();
return -1;
}
String key = args.length > 2 ? args[2] : null;
if (key == null) {
System.err.println("Missing required argument: <keyValue>");
usage();
return -1;
}
segmentReader.get(new Path(input), new Text(key), new OutputStreamWriter(
System.out, "UTF-8"), new HashMap<>());
return 0;
default:
System.err.println("Invalid operation: " + args[0]);
usage();
return -1;
}
}
private static void usage() {
System.err
.println("Usage: SegmentReader (-dump ... | -list ... | -get ...) [general options]\n");
System.err.println("* General options:");
System.err.println("\t-nocontent\tignore content directory");
System.err.println("\t-nofetch\tignore crawl_fetch directory");
System.err.println("\t-nogenerate\tignore crawl_generate directory");
System.err.println("\t-noparse\tignore crawl_parse directory");
System.err.println("\t-noparsedata\tignore parse_data directory");
System.err.println("\t-noparsetext\tignore parse_text directory");
System.err.println();
System.err
.println("* SegmentReader -dump <segment_dir> <output> [general options]");
System.err
.println(" Dumps content of a <segment_dir> as a text file to <output>.\n");
System.err.println("\t<segment_dir>\tname of the segment directory.");
System.err
.println("\t<output>\tname of the (non-existent) output directory.");
System.err.println();
System.err
.println("* SegmentReader -list (<segment_dir1> ... | -dir <segments>) [general options]");
System.err
.println(" List a synopsis of segments in specified directories, or all segments in");
System.err
.println(" a directory <segments>, and print it on System.out\n");
System.err
.println("\t<segment_dir1> ...\tlist of segment directories to process");
System.err
.println("\t-dir <segments>\t\tdirectory that contains multiple segments");
System.err.println();
System.err
.println("* SegmentReader -get <segment_dir> <keyValue> [general options]");
System.err
.println(" Get a specified record from a segment, and print it on System.out.\n");
System.err.println("\t<segment_dir>\tname of the segment directory.");
System.err.println("\t<keyValue>\tvalue of the key (url).");
System.err
.println("\t\tNote: put double-quotes around strings with spaces.");
}
public static void main(String[] args) throws Exception {
int result = ToolRunner.run(NutchConfiguration.create(),
new SegmentReader(), args);
System.exit(result);
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver15;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmConnTrackingZoneVer15 implements OFOxmConnTrackingZone {
private static final Logger logger = LoggerFactory.getLogger(OFOxmConnTrackingZoneVer15.class);
// version: 1.5
final static byte WIRE_VERSION = 6;
final static int LENGTH = 6;
private final static U16 DEFAULT_VALUE = U16.ZERO;
// OF message fields
private final U16 value;
//
// Immutable default instance
final static OFOxmConnTrackingZoneVer15 DEFAULT = new OFOxmConnTrackingZoneVer15(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFOxmConnTrackingZoneVer15(U16 value) {
if(value == null) {
throw new NullPointerException("OFOxmConnTrackingZoneVer15: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x1d402L;
}
@Override
public U16 getValue() {
return value;
}
@Override
public MatchField<U16> getMatchField() {
return MatchField.CONN_TRACKING_ZONE;
}
@Override
public boolean isMasked() {
return false;
}
public OFOxm<U16> getCanonical() {
// exact match OXM is always canonical
return this;
}
@Override
public U16 getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.5");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
public OFOxmConnTrackingZone.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmConnTrackingZone.Builder {
final OFOxmConnTrackingZoneVer15 parentMessage;
// OF message fields
private boolean valueSet;
private U16 value;
BuilderWithParent(OFOxmConnTrackingZoneVer15 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x1d402L;
}
@Override
public U16 getValue() {
return value;
}
@Override
public OFOxmConnTrackingZone.Builder setValue(U16 value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<U16> getMatchField() {
return MatchField.CONN_TRACKING_ZONE;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<U16> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.5");
}
@Override
public U16 getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.5");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFOxmConnTrackingZone build() {
U16 value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFOxmConnTrackingZoneVer15(
value
);
}
}
static class Builder implements OFOxmConnTrackingZone.Builder {
// OF message fields
private boolean valueSet;
private U16 value;
@Override
public long getTypeLen() {
return 0x1d402L;
}
@Override
public U16 getValue() {
return value;
}
@Override
public OFOxmConnTrackingZone.Builder setValue(U16 value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<U16> getMatchField() {
return MatchField.CONN_TRACKING_ZONE;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<U16> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.5");
}
@Override
public U16 getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.5");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
//
@Override
public OFOxmConnTrackingZone build() {
U16 value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFOxmConnTrackingZoneVer15(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmConnTrackingZone> {
@Override
public OFOxmConnTrackingZone readFrom(ByteBuf bb) throws OFParseError {
// fixed value property typeLen == 0x1d402L
int typeLen = bb.readInt();
if(typeLen != 0x1d402)
throw new OFParseError("Wrong typeLen: Expected=0x1d402L(0x1d402L), got="+typeLen);
U16 value = U16.of(bb.readShort());
OFOxmConnTrackingZoneVer15 oxmConnTrackingZoneVer15 = new OFOxmConnTrackingZoneVer15(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmConnTrackingZoneVer15);
return oxmConnTrackingZoneVer15;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmConnTrackingZoneVer15Funnel FUNNEL = new OFOxmConnTrackingZoneVer15Funnel();
static class OFOxmConnTrackingZoneVer15Funnel implements Funnel<OFOxmConnTrackingZoneVer15> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmConnTrackingZoneVer15 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x1d402L
sink.putInt(0x1d402);
message.value.putTo(sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmConnTrackingZoneVer15> {
@Override
public void write(ByteBuf bb, OFOxmConnTrackingZoneVer15 message) {
// fixed value property typeLen = 0x1d402L
bb.writeInt(0x1d402);
bb.writeShort(message.value.getRaw());
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmConnTrackingZoneVer15(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmConnTrackingZoneVer15 other = (OFOxmConnTrackingZoneVer15) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
| |
package com.netflix.astyanax.recipes.queue;
import com.google.common.collect.Lists;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.NotFoundException;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Equality;
import com.netflix.astyanax.model.RangeEndpoint;
import com.netflix.astyanax.recipes.locks.BusyLockException;
import com.netflix.astyanax.recipes.queue.triggers.Trigger;
import com.netflix.astyanax.util.RangeBuilder;
import com.netflix.astyanax.util.TimeUUIDUtils;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Message consumer implementation based on the sharded queue.
*
* @author pbhattacharyya
*/
class MessageConsumerImpl implements MessageConsumer {
private static final Logger LOG = LoggerFactory.getLogger(MessageConsumerImpl.class);
private final ShardedDistributedMessageQueue queue;
public MessageConsumerImpl(ShardedDistributedMessageQueue q) {
this.queue = q;
}
@Override
public List<MessageContext> readMessages(int itemsToPop) throws MessageQueueException, BusyLockException, InterruptedException {
return readMessages(itemsToPop, 0, null);
}
@Override
public List<MessageContext> readMessages(int itemsToPop, long timeout, TimeUnit units) throws MessageQueueException, BusyLockException, InterruptedException {
long timeoutTime = (timeout == 0) ? 0 : System.currentTimeMillis() + TimeUnit.MILLISECONDS.convert(timeout, units);
// Loop while trying to get messages.
// TODO: Make it possible to cancel this loop
// TODO: Read full itemsToPop instead of just stopping when we get the first successful set
List<MessageContext> messages = null;
while (true) {
boolean success = false;
MessageQueueShard partition = queue.shardReaderPolicy.nextShard();
if (partition != null) {
try {
messages = readAndReturnShard(partition, itemsToPop);
success = true;
if (messages != null && !messages.isEmpty()) {
return messages;
}
} finally {
// releaseShard needs to know how many messages were proceed OR if there was an error fetching messages (-1)
queue.shardReaderPolicy.releaseShard(partition, success ? (messages == null ? 0 : messages.size()) : -1);
}
}
if (timeoutTime != 0 && System.currentTimeMillis() > timeoutTime) {
return Lists.newLinkedList();
}
Thread.sleep(queue.shardReaderPolicy.getPollInterval());
}
}
@Override
public List<Message> peekMessages(int itemsToPeek) throws MessageQueueException {
return queue.peekMessages(itemsToPeek);
}
private List<MessageContext> readAndReturnShard(MessageQueueShard shard, int itemsToPop) throws MessageQueueException, BusyLockException, InterruptedException {
List<MessageContext> messages = null;
try {
messages = readMessagesFromShard(shard.getName(), itemsToPop);
} finally {
if (messages == null || messages.isEmpty()) {
queue.stats.incEmptyPartitionCount();
}
}
return messages;
}
@Override
public List<MessageContext> readMessagesFromShard(String shardName, int itemsToPop) throws MessageQueueException, BusyLockException {
if(queue.lockManager != null) {
return readMessagesFromShardUsingLockManager(shardName, itemsToPop);
}
return readMessagesFromShardUsingDefaultLock(shardName, itemsToPop);
}
List<MessageContext> readMessagesFromShardUsingLockManager(String shardName, int itemToPop) throws MessageQueueException, BusyLockException {
ShardLock lock = null;
try {
lock = queue.lockManager.acquireLock(shardName);
MutationBatch m = queue.keyspace.prepareMutationBatch().setConsistencyLevel(queue.consistencyLevel);
ColumnListMutation<MessageQueueEntry> rowMutation = m.withRow(queue.queueColumnFamily, shardName);
long curTimeMicros = TimeUUIDUtils.getMicrosTimeFromUUID(TimeUUIDUtils.getUniqueTimeUUIDinMicros());
return readMessagesInternal(shardName, itemToPop, 0, null, rowMutation, m, curTimeMicros);
} catch (BusyLockException e) {
queue.stats.incLockContentionCount();
throw e;
} catch (Exception e) {
LOG.error("Error reading shard " + shardName, e);
throw new MessageQueueException("Error", e);
} finally {
queue.lockManager.releaseLock(lock);
}
}
List<MessageContext> readMessagesFromShardUsingDefaultLock(String shardName, int itemsToPop) throws MessageQueueException, BusyLockException {
MutationBatch m = null;
MessageQueueEntry lockColumn = null;
ColumnListMutation<MessageQueueEntry> rowMutation = null;
int lockColumnCount = 0;
// Try locking first
try {
// 1. Write the lock column
lockColumn = MessageQueueEntry.newLockEntry(MessageQueueEntryState.None);
long curTimeMicros = TimeUUIDUtils.getMicrosTimeFromUUID(lockColumn.getTimestamp());
m = queue.keyspace.prepareMutationBatch().setConsistencyLevel(queue.consistencyLevel);
m.withRow(queue.queueColumnFamily, shardName).putColumn(lockColumn, curTimeMicros + queue.lockTimeout, queue.lockTtl);
m.execute();
// 2. Read back lock columns and entries
ColumnList<MessageQueueEntry> result = queue.keyspace.prepareQuery(queue.queueColumnFamily).setConsistencyLevel(queue.consistencyLevel).getKey(shardName)
.withColumnRange(ShardedDistributedMessageQueue.entrySerializer
.buildRange()
.greaterThanEquals((byte) MessageQueueEntryType.Lock.ordinal())
.lessThanEquals((byte) MessageQueueEntryType.Lock.ordinal())
.build()
)
.execute()
.getResult();
m = queue.keyspace.prepareMutationBatch().setConsistencyLevel(queue.consistencyLevel);
rowMutation = m.withRow(queue.queueColumnFamily, shardName);
rowMutation.deleteColumn(lockColumn);
int lockCount = 0;
boolean lockAcquired = false;
lockColumnCount = result.size();
for (Column<MessageQueueEntry> column : result) {
MessageQueueEntry lock = column.getName();
if (lock.getType() == MessageQueueEntryType.Lock) {
lockColumnCount++;
// Stale lock so we can discard it
if (column.getLongValue() < curTimeMicros) {
queue.stats.incExpiredLockCount();
rowMutation.deleteColumn(lock);
} else if (lock.getState() == MessageQueueEntryState.Acquired) {
throw new BusyLockException("Not first lock");
} else {
lockCount++;
if (lockCount == 1 && lock.getTimestamp().equals(lockColumn.getTimestamp())) {
lockAcquired = true;
}
}
}
}
if (!lockAcquired) {
throw new BusyLockException("Not first lock");
}
// Write the acquired lock column
lockColumn = MessageQueueEntry.newLockEntry(lockColumn.getTimestamp(), MessageQueueEntryState.Acquired);
rowMutation.putColumn(lockColumn, curTimeMicros + queue.lockTimeout, queue.lockTtl);
} catch (BusyLockException e) {
queue.stats.incLockContentionCount();
throw e;
} catch (ConnectionException e) {
LOG.error("Error reading shard " + shardName, e);
throw new MessageQueueException("Error", e);
} finally {
try {
m.execute();
} catch (Exception e) {
throw new MessageQueueException("Error committing lock", e);
}
}
long curTimeMicros = TimeUUIDUtils.getMicrosTimeFromUUID(lockColumn.getTimestamp());
m = queue.keyspace.prepareMutationBatch().setConsistencyLevel(queue.consistencyLevel);
// First, release the lock column
rowMutation = m.withRow(queue.queueColumnFamily, shardName);
rowMutation.deleteColumn(lockColumn);
return readMessagesInternal(shardName, itemsToPop, lockColumnCount, lockColumn, rowMutation, m, curTimeMicros);
}
@Override
public void ackMessage(MessageContext context) throws MessageQueueException {
MutationBatch mb = queue.keyspace.prepareMutationBatch().setConsistencyLevel(queue.consistencyLevel);
fillAckMutation(context, mb);
try {
mb.execute();
} catch (ConnectionException e) {
throw new MessageQueueException("Failed to ack message", e);
}
}
@Override
public void ackMessages(Collection<MessageContext> messages) throws MessageQueueException {
MutationBatch mb = queue.keyspace.prepareMutationBatch().setConsistencyLevel(queue.consistencyLevel);
for (MessageContext context : messages) {
fillAckMutation(context, mb);
}
try {
mb.execute();
} catch (ConnectionException e) {
throw new MessageQueueException("Failed to ack messages", e);
}
}
private void fillAckMutation(MessageContext context, MutationBatch mb) {
queue.stats.incAckMessageCount();
Message message = context.getMessage();
// Token refers to the timeout event. If 0 (i.e. no) timeout was specified
// then the token will not exist
if (message.getToken() != null) {
MessageQueueEntry entry = MessageQueueEntry.newBusyEntry(message);
// Remove timeout entry from the queue
mb.withRow(queue.queueColumnFamily, queue.getShardKey(message)).deleteColumn(entry);
// Remove entry lookup from the key, if one exists
if (message.hasKey()) {
mb.withRow(queue.keyIndexColumnFamily, queue.getCompositeKey(queue.getName(), message.getKey()))
.putEmptyColumn(MessageMetadataEntry.newMessageId(queue.getCompositeKey(queue.getShardKey(message), entry.getMessageId())), queue.metadataDeleteTTL);
if (message.isKeepHistory()) {
MessageHistory history = context.getHistory();
if (history.getStatus() == MessageStatus.RUNNING) {
history.setStatus(MessageStatus.DONE);
}
history.setEndTime(TimeUnit.MICROSECONDS.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS));
try {
mb.withRow(queue.historyColumnFamily, message.getKey())
.putColumn(history.getToken(), queue.serializeToString(context.getHistory()), queue.metadata.getHistoryTtl()); // TTL
} catch (Exception e) {
LOG.warn("Error serializing message history for " + message.getKey(), e);
}
}
}
// Run hooks
for (MessageQueueHooks hook : queue.hooks) {
hook.beforeAckMessage(message, mb);
}
}
if (context.getNextMessage() != null) {
try {
queue.fillMessageMutation(mb, context.getNextMessage());
} catch (MessageQueueException e) {
LOG.warn("Error filling nextMessage for " + message.getKey(), e);
}
}
}
@Override
public void ackPoisonMessage(MessageContext context) throws MessageQueueException {
// TODO: Remove bad message and add to poison queue
MutationBatch mb = queue.keyspace.prepareMutationBatch().setConsistencyLevel(queue.consistencyLevel);
fillAckMutation(context, mb);
try {
mb.execute();
} catch (ConnectionException e) {
queue.stats.incPersistError();
throw new MessageQueueException("Failed to ack messages", e);
}
}
private List<MessageContext> readMessagesInternal(String shardName,
int itemsToPop,
int lockColumnCount,
MessageQueueEntry lockColumn,
ColumnListMutation<MessageQueueEntry> rowMutation,
MutationBatch m,
long curTimeMicros) throws BusyLockException, MessageQueueException {
try {
List<MessageContext> entries = Lists.newArrayList();
RangeEndpoint re = ShardedDistributedMessageQueue.entrySerializer
.makeEndpoint((byte) MessageQueueEntryType.Message.ordinal(), Equality.EQUAL)
.append(0L, Equality.EQUAL);
if(lockColumn!=null) {
re.append(lockColumn.getTimestamp(), Equality.LESS_THAN_EQUALS);
} else {
re.append(TimeUUIDUtils.getMicrosTimeUUID(curTimeMicros), Equality.LESS_THAN_EQUALS);
}
ColumnList<MessageQueueEntry> result = queue.keyspace.prepareQuery(queue.queueColumnFamily)
.setConsistencyLevel(queue.consistencyLevel).getKey(shardName).
withColumnRange(new RangeBuilder()
.setLimit(itemsToPop + (lockColumn == null? 0:(lockColumnCount + 1)))
.setEnd(re.toBytes())
.build()).execute().getResult();
for (Column<MessageQueueEntry> column : result) {
if (itemsToPop == 0) {
break;
}
MessageQueueEntry entry = column.getName();
switch (entry.getType()) {
case Lock:
// TODO: Track number of locks read and make sure we don't exceed itemsToPop
// We have the lock
if (lockColumn != null && entry.getState() == MessageQueueEntryState.Acquired) {
if (!entry.getTimestamp().equals(lockColumn.getTimestamp())) {
throw new BusyLockException("Someone else snuck in");
}
}
break;
case Message:
{
try {
itemsToPop--;
// First, we always want to remove the old item
String messageId = queue.getCompositeKey(shardName, entry.getMessageId());
rowMutation.deleteColumn(entry);
// Next, parse the message metadata and add a timeout entry
final Message message = queue.extractMessageFromColumn(column);
// Update the message state
if (message != null) {
MessageContext context = new MessageContext();
context.setMessage(message);
// Message has a trigger so we need to figure out if it is an
// unfinished repeating trigger and re-add it.
if (message.hasTrigger()) {
// Read back all messageIds associated with this key and check to see if we have duplicates.
String groupRowKey = queue.getCompositeKey(queue.getName(), message.getKey());
try {
// Use consistency level
ColumnList<MessageMetadataEntry> columns = queue.keyspace.prepareQuery(queue.keyIndexColumnFamily).getRow(groupRowKey).withColumnRange(ShardedDistributedMessageQueue.metadataSerializer.buildRange().greaterThanEquals((byte) MessageMetadataEntryType.MessageId.ordinal()).lessThanEquals((byte) MessageMetadataEntryType.MessageId.ordinal()).build()).execute().getResult();
MessageMetadataEntry mostRecentMessageMetadata = null;
long mostRecentTriggerTime = 0;
for (Column<MessageMetadataEntry> currMessageEntry : columns) {
MessageQueueEntry pendingMessageEntry = MessageQueueEntry.fromMetadata(currMessageEntry.getName());
if (currMessageEntry.getTtl() == 0) {
long currMessageTriggerTime = pendingMessageEntry.getTimestamp(TimeUnit.MICROSECONDS);
// First message we found, so treat as the most recent
if (mostRecentMessageMetadata == null) {
mostRecentMessageMetadata = currMessageEntry.getName();
mostRecentTriggerTime = currMessageTriggerTime;
} else {
// This message's trigger time is after what we thought was the most recent.
// Discard the previous 'most' recent and accept this one instead
if (currMessageTriggerTime > mostRecentTriggerTime) {
LOG.warn("Need to discard : " + entry.getMessageId() + " => " + mostRecentMessageMetadata.getName());
m.withRow(queue.keyIndexColumnFamily,
queue.getCompositeKey(queue.getName(), message.getKey())).putEmptyColumn(mostRecentMessageMetadata, queue.metadataDeleteTTL);
mostRecentTriggerTime = currMessageTriggerTime;
mostRecentMessageMetadata = currMessageEntry.getName();
} else {
LOG.warn("Need to discard : " + entry.getMessageId() + " => " + currMessageEntry.getName());
m.withRow(queue.keyIndexColumnFamily,
queue.getCompositeKey(queue.getName(), message.getKey())).putEmptyColumn(currMessageEntry.getName(), queue.metadataDeleteTTL);
}
}
}
}
if (mostRecentMessageMetadata != null) {
if (!mostRecentMessageMetadata.getName().endsWith(entry.getMessageId())) {
throw new DuplicateMessageException("Duplicate trigger for " + messageId);
}
}
} catch (NotFoundException e) {
} catch (ConnectionException e) {
throw new MessageQueueException("Error fetching row " + groupRowKey, e);
}
// Update the trigger
final Message nextMessage;
Trigger trigger = message.getTrigger().nextTrigger();
if (trigger != null) {
nextMessage = message.clone();
nextMessage.setTrigger(trigger);
context.setNextMessage(nextMessage);
if (message.isAutoCommitTrigger()) {
queue.fillMessageMutation(m, nextMessage);
}
}
}
// Message has a key so we remove this item from the messages by key index.
// A timeout item will be added later
if (message.hasKey()) {
m.withRow(queue.keyIndexColumnFamily,
queue.getCompositeKey(queue.getName(), message.getKey()))
.putEmptyColumn(MessageMetadataEntry.newMessageId(messageId), queue.metadataDeleteTTL);
LOG.debug("Removing from key : " + queue.getCompositeKey(queue.getName(), message.getKey()) + " : " + messageId);
if (message.isKeepHistory()) {
MessageHistory history = context.getHistory();
history.setToken(entry.getTimestamp());
history.setStartTime(curTimeMicros);
history.setTriggerTime(message.getTrigger().getTriggerTime());
history.setStatus(MessageStatus.RUNNING);
try {
m.withRow(queue.historyColumnFamily, message.getKey()).putColumn(entry.getTimestamp(), queue.serializeToString(history)
, queue.metadata.getHistoryTtl());
} catch (Exception e) {
LOG.warn("Error serializing history for key '" + message.getKey() + "'", e);
}
}
}
// Message has a timeout so we add a timeout event.
if (message.getTimeout() > 0) {
MessageQueueEntry timeoutEntry = MessageQueueEntry.newMessageEntry((byte) 0,
TimeUUIDUtils.getMicrosTimeUUID(curTimeMicros
+ TimeUnit.MICROSECONDS.convert(message.getTimeout(), TimeUnit.SECONDS)
+ (queue.counter.incrementAndGet() % 1000)), MessageQueueEntryState.Busy);
message.setToken(timeoutEntry.getTimestamp());
message.setRandom(timeoutEntry.getRandom());
m.withRow(queue.queueColumnFamily, queue.getShardKey(message))
.putColumn(timeoutEntry, column.getStringValue(), queue.metadata.getRetentionTimeout());
MessageMetadataEntry messageIdEntry = MessageMetadataEntry.newMessageId(queue.getCompositeKey(queue.getShardKey(message), timeoutEntry.getMessageId()));
// Add the timeout column to the key
if (message.hasKey()) {
m.withRow(queue.keyIndexColumnFamily, queue.getCompositeKey(queue.getName(), message.getKey()))
.putEmptyColumn(messageIdEntry, queue.metadata.getRetentionTimeout());
}
context.setAckMessageId(messageIdEntry.getName());
} else {
message.setToken(null);
}
// Update some stats
switch (entry.getState()) {
case Waiting:
queue.stats.incProcessCount();
break;
case Busy:
queue.stats.incReprocessCount();
break;
default:
LOG.warn("Unknown message state: " + entry.getState());
// TODO:
break;
}
entries.add(context);
} else {
queue.stats.incInvalidMessageCount();
// TODO: Add to poison queue
}
} catch (DuplicateMessageException e) {
// OK to ignore this error. All the proper columns will have been deleted in the batch.
}
break;
}
default:
{
// TODO: Error: Unknown type
break;
}
}
}
return entries;
} catch (BusyLockException e) {
queue.stats.incLockContentionCount();
throw e;
} catch (Exception e) {
throw new MessageQueueException("Error processing queue shard : " + shardName, e);
} finally {
try {
m.execute();
} catch (Exception e) {
throw new MessageQueueException("Error processing queue shard : " + shardName, e);
}
}
}
}
| |
/**
* Copyright (c) 2007, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the University of California, Los Angeles nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Created Oct 11, 2007
*/
package avrora.sim.radio;
import avrora.sim.clock.Clock;
import avrora.sim.clock.Synchronizer;
import avrora.sim.Simulator;
import avrora.sim.util.TransactionalList;
import java.util.*;
import cck.util.Arithmetic;
/**
* The <code>Medium</code> definition drives the timming in the transmission and
* reception of packets
*
* @author Ben L. Titzer
* @author Rodolfo de Paz
*/
public class Medium {
private static final int BYTE_SIZE = 8;
private static int Pn = -95;//Noise Power in dBm
private static double Pr = (double) Pn;//Received Power in dBm
public interface Arbitrator {
public boolean lockTransmission(Receiver receiver, Transmission tran, int Milliseconds);
public char mergeTransmissions(Receiver receiver, List<Medium.Transmission> trans, long bit, int Milliseconds);
public double computeReceivedPower(Medium.Transmission t, Medium.Receiver receiver, int Milliseconds);
public int getNoise(int index);
}
/**
* The <code>Probe</code> interface defined method to insert and removes
* probes before and after transmit and receive.
*/
public interface Probe {
public void fireBeforeTransmit(Transmitter t, byte val);
public void fireBeforeTransmitEnd(Transmitter t);
public void fireAfterReceive(Receiver r, char val);
public void fireAfterReceiveEnd(Receiver r);
public class Empty implements Probe {
public void fireBeforeTransmit(Transmitter t, byte val) { }
public void fireBeforeTransmitEnd(Transmitter t) { }
public void fireAfterReceive(Receiver r, char val) { }
public void fireAfterReceiveEnd(Receiver r) { }
}
/**
* The <code>List</code> class inherits from TransactionalList several
* methods to implement all methods of the interface Probe
*/
public class List extends TransactionalList implements Probe {
public void fireBeforeTransmit(Transmitter t, byte val) {
beginTransaction();
for (Link pos = head; pos != null; pos = pos.next)
((Probe) pos.object).fireBeforeTransmit(t, val);
endTransaction();
}
public void fireBeforeTransmitEnd(Transmitter t) {
beginTransaction();
for (Link pos = head; pos != null; pos = pos.next)
((Probe) pos.object).fireBeforeTransmitEnd(t);
endTransaction();
}
public void fireAfterReceive(Receiver r, char val) {
beginTransaction();
for (Link pos = head; pos != null; pos = pos.next)
((Probe) pos.object).fireAfterReceive(r, val);
endTransaction();
}
public void fireAfterReceiveEnd(Receiver r) {
beginTransaction();
for (Link pos = head; pos != null; pos = pos.next)
((Probe) pos.object).fireAfterReceiveEnd(r);
endTransaction();
}
}
}
/**
* The <code>Medium.TXRX</code> static class represents a Medium where
* transmitter and receiver exchange bytes
*/
protected static class TXRX {
public final Medium medium;
public final Clock clock;
public final long cyclesPerByte;
public final long leadCycles;
public final long cyclesPerBit;
protected Probe.List probeList;
public boolean activated;
/**
* The <code>TXRX</code> constructor method
*
* @param m Medium
* @param c Clock
*/
protected TXRX(Medium m, Clock c) {
medium = m;
clock = c;
long hz = c.getHZ();
int bps = medium.bitsPerSecond;
assert hz > bps;
cyclesPerBit = (hz / bps);
cyclesPerByte = BYTE_SIZE * cyclesPerBit;
leadCycles = (medium.leadBits * hz / bps);
}
protected long getBitNum(long time) {
return time / cyclesPerBit;
}
protected long getCycleTime(long bit) {
return bit * cyclesPerBit;
}
public void insertProbe(Medium.Probe probe) {
if (this.probeList == null) this.probeList = new Probe.List();
this.probeList.add(probe);
}
public void removeProbe(Medium.Probe probe) {
if (this.probeList != null) this.probeList.remove(probe);
}
}
/**
* The <code>Medium.Transmitter</code> class represents an object that is capable of
* making transmissions into the medium. When activated, it begins transmitting bytes
* into the medium after the lead time. Internally, this class implements its own
* clock-level synchronization so that clients only have to implement the
* <code>nextByte()</code> routine.
*/
public static abstract class Transmitter extends TXRX {
protected Transmission transmission;
protected final Transmitter.Ticker ticker;
protected boolean shutdown;
/**
* The constructor <code>Transmitter</code> creates an extension of TXRX
* constructor adding an instance of <code>Ticker</code>
*/
protected Transmitter(Medium m, Clock c) {
super(m, c);
ticker = new Ticker();
}
/**
* The <code>beginTransmit</code> method creates a new transmission instatiating
* <code>medium.newtransmission</code> and inserts a new ticker Event in
* the simulator in a leadCycles time
*
* @param pow power for the new transmission (dBm)
* @param freq frequency for the new transmission (Mhz)
*/
public final void beginTransmit(double pow, double freq) {
if (!activated) {
transmission = medium.newTransmission(this, pow, freq);
activated = true;
clock.insertEvent(ticker, leadCycles);
}
}
/**
* The <code>endTransmit</code> method shutdowns the transmitter and
* ends the transmission calling the <code>transmission.end</code> method
*/
public final void endTransmit() {
if (activated) {
shutdown = true;
transmission.end();
}
}
/**
* The <code>nextByte</code> abstract method which has to be implemented
* by the Radio implementation
*/
public abstract byte nextByte();
/**
* The <code>Ticker</code> class implements a Simulator Event call Ticker
* that is fired when a timed event occurs within the simulator in order
* to model a mote transmission
*/
protected class Ticker implements Simulator.Event {
public void fire() {
if (shutdown) {
// shut down the transmitter
if (probeList != null) probeList.fireBeforeTransmitEnd(Transmitter.this);
transmission = null;
shutdown = false;
activated = false;
} else if (activated) {
// otherwise, transmit a single byte and add it to the buffer
int indx = transmission.counter++;
byte val = nextByte();
if (indx >= transmission.data.length) {
// grow the transmission length when necessary
byte[] ndata = new byte[transmission.data.length + 16];
System.arraycopy(transmission.data, 0, ndata, 0, transmission.data.length);
transmission.data = ndata;
}
transmission.data[indx] = val;
if (probeList != null) probeList.fireBeforeTransmit(Transmitter.this, val);
clock.insertEvent(this, cyclesPerByte);
}
}
}
}
/**
* The <code>Medium.Receiver</code> class represents an object that can receive transmissions
* from the medium. When activated, it listens for transmissions synchronously using
* its own clock-level synchronization. It receives transmissions that may be the
* result of multiple interfering transmissions.
*/
public static abstract class Receiver extends TXRX {
private static final int BIT_DELAY = 1;
protected boolean locked;
protected double frequency;
public Receiver.Ticker ticker;
//Receiver class constructor
protected Receiver(Medium m, Clock c) {
super(m, c);
ticker = new Ticker();
}
//Begin receiving. Insert a event.
public final void beginReceive(double freq) {
frequency = freq;
if (!activated) {
activated = true;
clock.insertEvent(ticker, leadCycles + cyclesPerByte);
}
}
//Ending reception. Remove event.
public final void endReceive() {
// Reception has been terminated, but check if receiver was still locked onto some transmission
if (locked) {
nextByte(false, (byte) 0);
if (probeList != null) probeList.fireAfterReceiveEnd(Receiver.this);
}
activated = false;
locked = false;
clock.removeEvent(ticker);
}
public abstract byte nextByte(boolean lock, byte b);
public abstract void setRSSI(double rssi);
public abstract void setBER(double BER);
/**
* The <code>Ticker</code> class implements a Simulator Event call Ticker
* that is fired when a timed event occurs within the simulator in order
* to model a mote reception
*/
protected class Ticker implements Simulator.Event {
public void fire() {
if (activated) {
if (locked) {
// if receiver is locked onto some transmission, wait for neighbors' byte(s)
fireLocked(clock.getCount());
} else {
// if receiver is not locked, determine whether a lock will occur this interval
fireUnlocked(clock.getCount());
}
}
}
/**
* The <code>fireUnlocked</code> method is done when the receiver
* is not locked onto some transmission.
*
* @param time
*/
private void fireUnlocked(long time) {
long oneBitBeforeNow = getBitNum(time) - BIT_DELAY;
//wait until all neighbors are in time before a possible tx to this thread
waitForNeighbors(time - cyclesPerByte);
//find the earliest new transmission and store it in tx
Transmission tx = earliestNewTransmission(oneBitBeforeNow - BYTE_SIZE);
if (tx != null) {
// there is a new transmission; calculate delivery of first byte.
long dcycle = getCycleTime(tx.firstBit + BYTE_SIZE + BIT_DELAY);
long delta = dcycle - time;
//assert dcycle >= time;
if (delta <= 0) {
// lock on and deliver the first byte right now.
locked = true;
deliverByte(oneBitBeforeNow);
return;
} else if (delta < leadCycles) {
// lock on and insert event at delivery time of first bit.
locked = true;
clock.insertEvent(this, delta);
return;
} else if (delta < leadCycles + cyclesPerByte) {
// don't lock on yet, but wait for delivery time
clock.insertEvent(this, delta);
return;
}
}
// there is no transmission. Remain unlocked.
clock.insertEvent(this, leadCycles);
}
/**
* The <code>fireLocked</code> method is done when the receiver
* is locked onto some transmission
*
* @param time
*/
private void fireLocked(long time) {
long oneBitBeforeNow = getBitNum(time) - BIT_DELAY; // there is a one bit delay
waitForNeighbors(time - cyclesPerByte);
deliverByte(oneBitBeforeNow);
}
/**
* The <code>deliverByte</code> method delivers bytes to receiver
*
* @param oneBitBeforeNow
*/
private void deliverByte(long oneBitBeforeNow) {
List<Transmission> it = getIntersection(oneBitBeforeNow - BYTE_SIZE);
if (it != null) {//there is a transmission
boolean one = false;
double rssi = 0.0;
double SNR = 0;
assert it.size() > 0;
for (Transmission t : it) {
if (one) {//more than one transmission
double I = medium.arbitrator.computeReceivedPower(t, Receiver.this, (int) clock.cyclesToMillis(clock.getCount()));
//add interference to received power in linear scale
rssi = 10 * Math.log10(Math.pow(10, rssi / 10) + Math.pow(10, I / 10));
SNR = SNR - I;
} else {//only one transmission - no interference -
one = true;
Pr = medium.arbitrator.computeReceivedPower(t, Receiver.this, (int) clock.cyclesToMillis(clock.getCount()));
Pn = medium.arbitrator.getNoise((int) clock.cyclesToMillis(clock.getCount()));
rssi = Pr;
SNR = Pr - Pn;
}
}
double snr = Math.pow(10D, (SNR / 10D));
//ebno = snr / spectral efficiency = snr / log(1 + snr)
double ebno = snr / Math.log(1 + snr);
//BER vs Ebno in AWGN channel
double x = Math.sqrt(2 * ebno);
double x2 = Math.pow(x, 2);
double BER = Math.exp(-x2 / 2) / (1.64D * x + Math.sqrt(0.76D * (x2) + 4D));
setBER(BER);
setRSSI(rssi);
// merge transmissions into a single byte and send it to receiver
// we return val in order to get rssi and corr value
char val = medium.arbitrator.mergeTransmissions(Receiver.this, it, oneBitBeforeNow - BYTE_SIZE, (int) clock.cyclesToMillis(clock.getCount()));
//store high byte for corrupted bytes
int newval = (int) (val & 0xff00);
newval |= (int) (0xff & nextByte(true, (byte) val));
val = (char) newval;
if (probeList != null) probeList.fireAfterReceive(Receiver.this, val);
clock.insertEvent(this, cyclesPerByte);
} else {//no transmissions intersect
// all transmissions are over.
locked = false;
nextByte(false, (byte) 0);
if (probeList != null) probeList.fireAfterReceiveEnd(Receiver.this);
clock.insertEvent(this, leadCycles);
}
}
}
/**
* The <code>isChannelClear</code> method determines wether the channel is clear
* or not
*
* @return true if channel is clear and false otherwise
*/
public final boolean isChannelClear(int RSSI_reg, int MDMCTRL0_reg) {
if (activated && locked) {
// this is the only shortcut: receiver is on and locked to a transmission
return false;
}
else {
// the receiver could be off or it is not locked to a transmission
// the latter could happen also if the receiver was just turned on and the TX has been started before!
long time = clock.getCount();
long bit = getBitNum(time) - BIT_DELAY; // there is a one bit delay
waitForNeighbors(time - cyclesPerByte);
List<Transmission> it = getIntersection(bit - BYTE_SIZE);
if (it != null) { //if there is a transmission
//There are 3 modes (ED, 802.15.4 compliant detection, both)
int cca_mode = (MDMCTRL0_reg & 0x00c0) >>> 6;
//cca modes 1 and 3 compare threshold with rssi to determine CCA
if (cca_mode == 1 || cca_mode == 3) {
boolean one = false;
double rssi = 0.0;
assert it.size() > 0;
for (Transmission t : it) {
if (one) {//more than one transmission
double I = medium.arbitrator.computeReceivedPower(t, Receiver.this, (int) clock.cyclesToMillis(clock.getCount()));
//add interference to received power in linear scale
rssi = 10 * Math.log10(Math.pow(10, rssi / 10) + Math.pow(10, I / 10));
} else {//only one transmission - no interference -
one = true;
Pr = medium.arbitrator.computeReceivedPower(t, Receiver.this, (int) clock.cyclesToMillis(clock.getCount()));
Pn = medium.arbitrator.getNoise((int) clock.cyclesToMillis(clock.getCount()));
rssi = Pr;
}
}
int cca_hyst = (MDMCTRL0_reg & 0x0700) >>> 8;
int cca_thr = (RSSI_reg & 0xff00) >>> 8;
if (cca_thr > 127)
cca_thr -= 256;
int rssi_val = (int) rssi + 45;
return rssi_val < cca_thr - cca_hyst;
}
else return false; //other modes false since we have transmissions
}
else return true; //no transmissions: CCA true in all cases
}
}
/**
* The <code>earliestNewTransmission</code> method determines if there is a
* new transmission from the other threads
*
* @param bit equal to oneBitBeforeNow - BYTE_SIZE
* @return tx new transmission
*/
private Transmission earliestNewTransmission(long bit) {
Transmission tx = null;
synchronized (medium) {
Iterator<Transmission> i = medium.transmissions.iterator();
while (i.hasNext()) {
Transmission t = i.next();
if (bit <= t.firstBit && medium.arbitrator.lockTransmission(Receiver.this, t, (int) clock.cyclesToMillis(clock.getCount()))) {
if (tx == null) tx = t;
else if (t.firstBit < tx.firstBit) tx = t;
} else if (bit - 8 - 2 * medium.leadBits > t.lastBit) {
// remove older transmissions
i.remove();
}
}
}
return tx;
}
/**
* The <code>getIntersection</code> method calculate if transmissions intersect
*
* @param bit time in which calculate if tx intersect (oneBitBeforeNow - BYTE_SIZE)
* @return it representing the list of transmissions that intersect
*/
private List<Transmission> getIntersection(long bit) {
List<Transmission> it = null;
synchronized (medium) {
for (Transmission t : medium.transmissions) {
if (intersect(bit, t)) {
if (it == null) it = new LinkedList<Transmission>();
it.add(t);
}
}
}
return it;
}
/**
* The method <code>intersect</code> calculates if byte to transmit intersect
* with another transmission
*
* @param bit time in which calculate if tx intersect (oneBitBeforeNow - BYTE_SIZE)
* @param t Transmission to find out if intersects
* @return true if they intersect, false otherwise
*/
private boolean intersect(long bit, Transmission t) {
return bit >= t.firstBit && bit < t.lastBit;
}
private void waitForNeighbors(long gtime) {
if (medium.synch != null) medium.synch.waitForNeighbors(gtime);
}
}
public static class BasicArbitrator implements Arbitrator {
public boolean lockTransmission(Receiver receiver, Transmission trans, int Milliseconds) {
return true;
}
public char mergeTransmissions(Receiver receiver, List<Transmission> it, long bit, int Milliseconds) {
assert it.size() > 0;
Iterator<Transmission> i = it.iterator();
Transmission first = i.next();
int value = 0xff & first.getByteAtTime(bit);
while (i.hasNext()) {
Transmission next = i.next();
int nval = 0xff & next.getByteAtTime(bit);
value |= (nval << 8) ^ (value << 8); // compute corrupted bits
value |= nval;
}
return (char) value;
}
public double computeReceivedPower(Medium.Transmission t, Medium.Receiver receiver, int Milliseconds) {
return Pr;
}
public int getNoise(int index) {
return Pn;
}
}
/**
* The {@code Transmission} class represents a transmission originating from
* a particular {@code Transmitter} to this medium. A transmission consists
* of a sequences of bytes sent one after another into the medium. Each transmission
* has a start time and a power level.
*/
public class Transmission {
public final Transmitter origin;
public final long start;
public final long firstBit;
public final double power;
public final double Pt;
public final double f;
public long lastBit;
public long end;
protected int counter;
protected byte[] data;
/**
* The constructor for the <code> Transmission </code> class creates a new
* transmission with several properties like start and end times, first
* and last bit to be transmitted and the data itself.
*
* @param o Transmitter object
* @param pow Power for the transmission
* @param freq the frequency for the transmission
*/
protected Transmission(Transmitter o, double pow, double freq) {
origin = o;
power = pow;
Pt = pow;
f = freq;
start = o.clock.getCount();
end = Long.MAX_VALUE;
long l = start + o.leadCycles;
firstBit = origin.getBitNum(l);
lastBit = Long.MAX_VALUE;
data = new byte[Arithmetic.roundup(o.medium.maxLength, BYTE_SIZE)];
}
/**
* The method <code>end()</code> finishes the transmission and it updates
* end time and last bit transmitted
*/
public void end() {
end = origin.clock.getCount();
lastBit = firstBit + counter * BYTE_SIZE;
}
/**
* The method <code>getByteAtTime()</code> gets the transmission data byte
* at the time of bit
*
* @param bit time in bits for getting the byte
* @return hi byte gotten
*/
public byte getByteAtTime(long bit) {
assert bit >= firstBit;
int offset = (int) (bit - firstBit);
int shift = offset & 0x7;
int indx = offset / BYTE_SIZE;
int hi = 0xff & data[indx] << shift;
if (shift > 0) {
int low = 0xff & data[1 + indx];
return (byte) (hi | low >> (BYTE_SIZE - shift));
}
return (byte) hi;
}
}
public final Synchronizer synch;
public final Arbitrator arbitrator;
public final int bitsPerSecond;
public final int leadBits;
public final int minLength;
public final int maxLength;
protected List<Transmission> transmissions = new LinkedList<Transmission>();
/**
* The constructor for the <code>Medium</code> class creates a new shared transmission
* medium with the specified properties, including the bits per second, the lead time
* before beginning transmission, and the minimum transmission size in bits. These
* parameters are used to configure the medium and to ensure maximum possible simulation
* performance.
*
* @param synch the synchronizer used to synchronize concurrent senders and receivers
* @param arb the arbitrator that determines how to merge received transmissions
* @param bps the bits per second throughput of this medium
* @param ltb the lead time in bits before beginning a transmission and the first bit
* @param mintl the minimum transmission length
* @param maxtl the maximum transmission length
*/
public Medium(Synchronizer synch, Arbitrator arb, int bps, int ltb, int mintl, int maxtl) {
this.synch = synch;
bitsPerSecond = bps;
leadBits = ltb;
minLength = mintl;
maxLength = maxtl;
if (arb == null)
arbitrator = new BasicArbitrator();
else
arbitrator = arb;
}
/**
* The synchronized class <code>newTransmission</code> creates a new Transmission
* object and adds it to the list of transmissions
*
* @param o Transmitter that creates the new transmission
* @param p power for the new transmission
* @return tx new transmission created
*/
protected synchronized Transmission newTransmission(Transmitter o, double p, double f) {
Transmission tx = new Transmission(o, p, f);
transmissions.add(tx);
return tx;
}
/**
* The method <code>isCorruptedByte</code> computes if the byte is corrupted or not
*
* @param c byte to be computed
* @return true if it is corrupted, false otherwise
*/
public static boolean isCorruptedByte(char c) {
return (c & 0xff00) != 0;
}
public static byte getCorruptedBits(char c) {
return (byte) (c >> 8);
}
public static byte getTransmittedBits(char c) {
return (byte) c;
}
}
| |
package org.sagebionetworks.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
import org.sagebionetworks.schema.id.DotSeparatedAlphaNumeric;
import org.sagebionetworks.schema.id.OrganizationName;
import org.sagebionetworks.schema.id.SchemaId;
import org.sagebionetworks.schema.id.SchemaName;
import org.sagebionetworks.schema.parser.ParseException;
import org.sagebionetworks.schema.parser.SchemaIdParser;
import org.sagebionetworks.schema.semantic.version.AlphanumericIdentifier;
import org.sagebionetworks.schema.semantic.version.Build;
import org.sagebionetworks.schema.semantic.version.NumericIdentifier;
import org.sagebionetworks.schema.semantic.version.Prerelease;
import org.sagebionetworks.schema.semantic.version.PrereleaseIdentifier;
import org.sagebionetworks.schema.semantic.version.SemanticVersion;
import org.sagebionetworks.schema.semantic.version.VersionCore;
public class SchemaIdParserTest {
@Test
public void testNumericIdentifier() throws ParseException {
// test all numbers from 0 to 100
for(long i = 0; i<100; i++) {
numericTest(i, Long.toString(i));
}
}
void numericTest(Long expected, String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
NumericIdentifier numericId = parser.numericIdentifier();
assertEquals(new NumericIdentifier(expected), numericId);
}
@Test
public void testVersionCore() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("1.0.2");
VersionCore core = parser.versionCore();
assertNotNull(core);
assertEquals(1L, core.getMajor().getValue());
assertEquals(0L, core.getMinor().getValue());
assertEquals(2L, core.getPatch().getValue());
}
@Test
public void testVersionCoreAllZeros() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("0.0.0");
VersionCore core = parser.versionCore();
assertNotNull(core);
assertEquals(0L, core.getMajor().getValue());
assertEquals(0L, core.getMinor().getValue());
assertEquals(0L, core.getPatch().getValue());
}
@Test
public void testVersionCoreMajorLeadingZeror() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("01.0.2");
assertThrows(ParseException.class, ()->{
parser.versionCore();
});
}
@Test
public void testVersionCoreMinorLeadingZeror() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("1.05.2");
assertThrows(ParseException.class, ()->{
parser.versionCore();
});
}
@Test
public void testVersionCorePatchLeadingZeror() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("1.5.02");
// while this will parse everything after the zero is lost, so it would fail in a larger context
VersionCore core = parser.versionCore();
// confirm the 2 is lost
assertEquals("1.5.0", core.toString());
}
@Test
public void testAlphaNumeric() throws ParseException {
testAlphanumericIdentifier("a");
testAlphanumericIdentifier("a1123");
testAlphanumericIdentifier("aaa123");
testAlphanumericIdentifier("abcdefghijklmnopqurstuvwxyz");
testAlphanumericIdentifier("ABCDEFGHIJKLMNOPQRSTUVWXYZ");
}
public void testAlphanumericIdentifier(String toTest) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(toTest);
AlphanumericIdentifier alphanumeric = parser.alphanumericIdentifier();
assertEquals(toTest, alphanumeric.toString());
}
@Test
public void testAlphaNumericWithDash() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("-");
assertThrows(ParseException.class, ()->{
parser.alphanumericIdentifier();
});
}
@Test
public void testAlphaNumericWithSlash() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("/");
assertThrows(ParseException.class, ()->{
parser.alphanumericIdentifier();
});
}
@Test
public void testAlphaNumericStartWithNumber() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("9abc");
assertThrows(ParseException.class, ()->{
parser.alphanumericIdentifier();
});
}
@Test
public void testAlphaNumericStartWithZero() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("0123");
assertThrows(ParseException.class, ()->{
parser.alphanumericIdentifier();
});
}
@Test
public void testAlphaNumericWithZero() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("0");
assertThrows(ParseException.class, ()->{
parser.alphanumericIdentifier();
});
}
@Test
public void testPrereleaseIdentifier() throws ParseException {
testPrereleaseIdentifier("123");
testPrereleaseIdentifier("abc");
}
public void testPrereleaseIdentifier(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
PrereleaseIdentifier prereleaseIdentifier = parser.prereleaseIdentifier();
assertEquals(prereleaseIdentifier.toString(), input);
}
@Test
public void testPrereleaseIdentifierStartWithZero() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("045");
// while this does not fail it does not parse anything after the zero
PrereleaseIdentifier prereleaseIdentifier = parser.prereleaseIdentifier();
// digits after the zero are lost
assertEquals("0", prereleaseIdentifier.toString());
}
@Test
public void testPrerelease() throws ParseException {
testPrerelease("alpha");
testPrerelease("alpha.1");
testPrerelease("0.3.7");
testPrerelease("x.7.z.92");
}
public void testPrerelease(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
Prerelease prerelease = parser.prerelease();
assertEquals(prerelease.toString(), input);
}
@Test
public void testBuild() throws ParseException {
testBuild("001");
testBuild("20130313144700");
testBuild("exp.sha.5114f85");
}
public void testBuild(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
Build build = parser.build();
assertEquals(build.toString(), input);
}
@Test
public void testSemanticVersion() throws ParseException {
testSemanticVersion("0.0.0");
testSemanticVersion("1.23.456");
testSemanticVersion("1.23.456-x.7.z.92");
testSemanticVersion("1.23.456-x.7.z.92+exp.sha.5114f85");
testSemanticVersion("1.23.456+exp.sha.5114f85");
}
@Test
public void testSemanticVersionWithDash() {
assertThrows(ParseException.class, ()->{
testSemanticVersion("-");
});
}
public void testSemanticVersion(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
SemanticVersion semanticVersion = parser.semanticVersion();
assertEquals(input, semanticVersion.toString());
}
@Test
public void testDotSeparatedAlphanumeric() throws ParseException {
testDotSeparatedAlphanumeric("abc");
testDotSeparatedAlphanumeric("abc.xyz");
testDotSeparatedAlphanumeric("a1.b3.c4123");
}
@Test
public void testDotSeparatedAlphanumericStrartDot() {
assertThrows(ParseException.class, ()->{
testDotSeparatedAlphanumeric(".abc");
});
}
@Test
public void testDotSeparatedAlphanumericEndDot() {
assertThrows(ParseException.class, ()->{
testDotSeparatedAlphanumeric("abc.");
});
}
public void testDotSeparatedAlphanumeric(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
DotSeparatedAlphaNumeric dotSeparatedAlphaNumeric = parser.dotSeparatedAlphaNumeric();
assertEquals(input, dotSeparatedAlphaNumeric.toString());
}
@Test
public void testDotSeparatedAlphanumericStartsWithNumber() {
assertThrows(ParseException.class, ()->{
testDotSeparatedAlphanumeric("1abc");
});
}
@Test
public void testDotSeparatedAlphanumericStartsWithZero() {
assertThrows(ParseException.class, ()->{
testDotSeparatedAlphanumeric("0");
});
}
@Test
public void testOrganziationName() throws ParseException {
testOrganizationName("foo");
testOrganizationName("foo.bar.a1");
}
@Test
public void testOrganziationNameStartWithDot() {
assertThrows(ParseException.class, ()->{
testOrganizationName(".abc");
});
}
@Test
public void testOrganziationNameEndWithDot() {
assertThrows(ParseException.class, ()->{
testOrganizationName("abc.");
});
}
@Test
public void testOrganziationNameContainsNumers() {
assertThrows(ParseException.class, ()->{
testOrganizationName("foo.123.bar");
});
}
@Test
public void testOrganziationNameContainsZero() {
assertThrows(ParseException.class, ()->{
testOrganizationName("foo.0.bar");
});
}
public void testOrganizationName(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
OrganizationName organizationName = parser.organizationName();
assertEquals(input, organizationName.toString());
}
@Test
public void testSchemaName() throws ParseException {
testSchemaName("SomeClass");
testSchemaName("repo.model.SomeClass");
}
public void testSchemaName(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
SchemaName schemaName = parser.schemaName();
assertEquals(input, schemaName.toString());
}
@Test
public void testSchemaIdNoVersion() throws ParseException {
SchemaId id = testSchemaId("org.myorg-path.SomeClass");
assertNotNull(id);
assertNotNull(id.getOrganizationName());
assertEquals("org.myorg",id.getOrganizationName().toString());
assertNotNull(id.getSchemaName());
assertEquals("path.SomeClass", id.getSchemaName().toString());
assertNull(id.getSemanticVersion());
}
@Test
public void testSchemaIdWithVersion() throws ParseException {
SchemaId id = testSchemaId("org.myorg-path.SomeClass-1.2.3-alpha+1234f");
assertNotNull(id);
assertNotNull(id.getOrganizationName());
assertEquals("org.myorg",id.getOrganizationName().toString());
assertNotNull(id.getSchemaName());
assertEquals("path.SomeClass", id.getSchemaName().toString());
assertNotNull(id.getSemanticVersion());
assertEquals("1.2.3-alpha+1234f", id.getSemanticVersion().toString());
}
@Test
public void testSchemaIdWhiteSpace() throws ParseException {
SchemaIdParser parser = new SchemaIdParser("\n org.myorg-path.SomeClass-1.2.3-alpha+1234f \t");
SchemaId schemaId = parser.schemaId();
assertEquals("org.myorg-path.SomeClass-1.2.3-alpha+1234f", schemaId.toString());
}
@Test
public void testSchemaIdWithVersionVersionPatchLeadingZero() throws ParseException {
assertThrows(ParseException.class, ()->{
testSchemaId("org.myorg-path.SomeClass-1.2.03");
});
}
@Test
public void testSchemaIdWithVersionVersionEndSlash() throws ParseException {
assertThrows(ParseException.class, ()->{
testSchemaId("org.myorg-path.SomeClass-");
});
}
@Test
public void testSchemaIdWithVersionVersionEndDot() throws ParseException {
assertThrows(ParseException.class, ()->{
testSchemaId("org.myorg-path.SomeClass.");
});
}
public SchemaId testSchemaId(String input) throws ParseException {
SchemaIdParser parser = new SchemaIdParser(input);
SchemaId schemaId = parser.schemaId();
assertEquals(input, schemaId.toString());
return schemaId;
}
@Test
public void testParseSchemaId() {
SchemaId id = SchemaIdParser.parseSchemaId("org.valid-name");
assertNotNull(id);
assertEquals("org.valid-name", id.toString());
}
@Test
public void testParseSchemaIdInvalid() {
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, ()->{
SchemaIdParser.parseSchemaId("org.valid-name-0");
});
assertTrue(exception.getMessage().startsWith("Invalid '$id' : 'org.valid-name-0'"));
assertTrue(exception.getCause() instanceof ParseException);
}
@Test
public void testParseSchemaIdNullId() {
String id = null;
String message = assertThrows(IllegalArgumentException.class, ()->{
SchemaIdParser.parseSchemaId(id);
}).getMessage();
assertEquals("$id cannot be null", message);
}
@Test
public void testParseOrganizationName() {
OrganizationName orgName = SchemaIdParser.parseOrganizationName("org.valid");
assertNotNull(orgName);
assertEquals("org.valid", orgName.toString());
}
@Test
public void testParseOrganizationNameInvalid() {
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, ()->{
SchemaIdParser.parseOrganizationName("org.valid.0");
});
assertTrue(exception.getMessage().startsWith("Invalid 'organizationName' : 'org.valid.0"));
assertTrue(exception.getCause() instanceof ParseException);
}
@Test
public void testParseOrganizationNameContainsSlash() {
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, ()->{
SchemaIdParser.parseOrganizationName("org.valid/bar");
});
assertTrue(exception.getMessage().startsWith("Invalid 'organizationName' : 'org.valid/bar"));
}
@Test
public void testParseOrganizationNameContainsDash() {
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, ()->{
SchemaIdParser.parseOrganizationName("org.valid-bar");
});
assertTrue(exception.getMessage().startsWith("Invalid 'organizationName' : 'org.valid-bar"));
}
@Test
public void testParseOrganizationNameNull() {
String name = null;
String message = assertThrows(IllegalArgumentException.class, ()->{
SchemaIdParser.parseOrganizationName(name);
}).getMessage();
assertEquals("Organization name cannot be null", message);
}
@Test
public void testParseSchemaIdFull() {
SchemaId id = SchemaIdParser.parseSchemaId("test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta");
assertNotNull(id);
assertEquals("test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta", id.toString());
}
@Test
public void testExtractRelative() {
String start = "https://repo-prod.prod.sagebase.org/repo/v1/schema/type/registered/test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta";
String relative = SchemaIdParser.extractRelative$id(start);
assertEquals("test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta", relative);
}
@Test
public void testExtractRelativeWithRelative() {
String start = "test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta";
String relative = SchemaIdParser.extractRelative$id(start);
assertEquals("test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta", relative);
}
@Test
public void testExtractRelativeWithNull() {
String start = null;
String relative = SchemaIdParser.extractRelative$id(start);
assertEquals(null, relative);
}
@Test
public void testParseSchemaIdWithAbsoluteFull() {
String fullId = "https://repo-prod.prod.sagebase.org/repo/v1/schema/type/registered/test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta";
SchemaId id = SchemaIdParser.parseSchemaId(fullId);
assertNotNull(id);
assertEquals("test.integeration.organization-integration.test.Schema.json-1.45.67-alpha+beta", id.toString());
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.testutil.TestJar;
import com.facebook.buck.zip.JarBuilder;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.jar.JarFile;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import javax.tools.DiagnosticCollector;
import javax.tools.JavaFileObject;
import javax.tools.StandardLocation;
import javax.tools.ToolProvider;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
/** Tests {@link JavaInMemoryFileManager} */
public class JavaInMemoryFileManagerTest {
@Rule public TemporaryFolder temp = new TemporaryFolder();
private JavaInMemoryFileManager inMemoryFileManager;
@Before
public void setUp() {
DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>();
inMemoryFileManager =
new JavaInMemoryFileManager(
ToolProvider.getSystemJavaCompiler().getStandardFileManager(diagnostics, null, null),
Paths.get(URI.create("file:///tmp/test.jar!/")),
/*classesToBeRemovedFromJar */ RemoveClassesPatternsMatcher.EMPTY);
}
@Test
public void testJavaFileName() throws Exception {
JavaFileObject fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"com.facebook.buck.jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
assertEquals(JavaFileObject.Kind.CLASS, fileObject.getKind());
assertEquals("com/facebook/buck/jvm/java/JavaFileParser.class", fileObject.getName());
}
@Test
public void testWriteContent() throws Exception {
JavaFileObject fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT, "JavaFileParser", JavaFileObject.Kind.CLASS, null);
OutputStream stream = fileObject.openOutputStream();
stream.write("Hello World!".getBytes());
stream.close();
TestJar jar = writeToJar();
List<String> entries = jar.getEntriesContent();
assertEquals(3, entries.size());
assertEquals("Hello World!", entries.get(2));
}
@Test
public void testFilesWrittenInSortedOrder() throws Exception {
JavaFileObject fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT, "B.C", JavaFileObject.Kind.CLASS, null);
fileObject.openOutputStream().close();
fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT, "A", JavaFileObject.Kind.CLASS, null);
fileObject.openOutputStream().close();
fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT, "B", JavaFileObject.Kind.CLASS, null);
fileObject.openOutputStream().close();
fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT, "B$D", JavaFileObject.Kind.CLASS, null);
fileObject.openOutputStream().close();
TestJar jar = writeToJar();
assertThat(
jar.getZipEntries().stream().map(ZipEntry::getName).collect(Collectors.toList()),
Matchers.contains(
"META-INF/",
JarFile.MANIFEST_NAME,
"A.class",
"B$D.class",
"B.class",
"B/",
"B/C.class"));
}
@Test
public void testIntermediateDirectoriesAreCreated() throws Exception {
JavaFileObject fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
fileObject.openOutputStream().close();
TestJar jar = writeToJar();
List<String> zipEntries =
jar.getZipEntries().stream().map(ZipEntry::getName).collect(Collectors.toList());
assertThat(
zipEntries,
Matchers.contains(
"META-INF/",
JarFile.MANIFEST_NAME,
"jvm/",
"jvm/java/",
"jvm/java/JavaFileParser.class"));
}
@Test
public void testMultipleFilesInSamePackage() throws Exception {
JavaFileObject fileObject1 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
JavaFileObject fileObject2 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaInMemoryFileManager",
JavaFileObject.Kind.CLASS,
null);
fileObject1.openOutputStream().close();
fileObject2.openOutputStream().close();
TestJar jar = writeToJar();
List<String> zipEntries =
jar.getZipEntries().stream().map(ZipEntry::getName).collect(Collectors.toList());
assertThat(
zipEntries,
Matchers.contains(
"META-INF/",
JarFile.MANIFEST_NAME,
"jvm/",
"jvm/java/",
"jvm/java/JavaFileParser.class",
"jvm/java/JavaInMemoryFileManager.class"));
}
@Test
public void testIsNotSameFile() throws Exception {
JavaFileObject fileObject1 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
JavaFileObject fileObject2 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaInMemoryFileManager",
JavaFileObject.Kind.CLASS,
null);
assertFalse(inMemoryFileManager.isSameFile(fileObject1, fileObject2));
}
@Test
public void testIsSameFile() throws Exception {
JavaFileObject fileObject1 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
JavaFileObject fileObject2 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
assertTrue(inMemoryFileManager.isSameFile(fileObject1, fileObject2));
}
@Test
public void testNonRecursiveListOperationReturnsNewlyCreatedFile() throws Exception {
JavaFileObject fileObject1 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
Iterator<JavaFileObject> nonRecursiveIterable =
inMemoryFileManager
.list(
StandardLocation.CLASS_OUTPUT,
"jvm.java",
Collections.singleton(JavaFileObject.Kind.CLASS),
false)
.iterator();
assertEquals(fileObject1, nonRecursiveIterable.next());
assertFalse(nonRecursiveIterable.hasNext());
}
@Test
public void testNonRecursiveListOperationDoesntReturnNewlyCreatedFileOnOtherDir()
throws Exception {
JavaFileObject fileObject1 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
assertEquals("jvm/java/JavaFileParser.class", fileObject1.getName());
Iterator<JavaFileObject> recursiveIterable =
inMemoryFileManager
.list(
StandardLocation.CLASS_OUTPUT,
"jvm",
Collections.singleton(JavaFileObject.Kind.CLASS),
false)
.iterator();
assertFalse(recursiveIterable.hasNext());
}
@Test
public void testRecursiveListOperationReturnsNewlyCreatedFile() throws Exception {
JavaFileObject fileObject1 =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.CLASS,
null);
Iterator<JavaFileObject> recursiveIterable =
inMemoryFileManager
.list(
StandardLocation.CLASS_OUTPUT,
"jvm",
Collections.singleton(JavaFileObject.Kind.CLASS),
true)
.iterator();
assertEquals(fileObject1, recursiveIterable.next());
assertFalse(recursiveIterable.hasNext());
}
@Test
public void testGetFileForOutputWithoutOpeningWritesNothing() throws IOException {
inMemoryFileManager.getFileForOutput(
StandardLocation.CLASS_OUTPUT, "jvm.java", "JavaFileParser", null);
TestJar jar = writeToJar();
assertThat(
jar.getZipEntries().stream().map(ZipEntry::getName).collect(Collectors.toList()),
Matchers.contains("META-INF/", JarFile.MANIFEST_NAME));
}
@Test
public void testGetJavaFileForOutputWithoutOpeningWritesNothing() throws IOException {
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT, "jvm.java.JavaFileParser", JavaFileObject.Kind.OTHER, null);
TestJar jar = writeToJar();
assertThat(
jar.getZipEntries().stream().map(ZipEntry::getName).collect(Collectors.toList()),
Matchers.contains("META-INF/", JarFile.MANIFEST_NAME));
}
@Test
public void testWriteToStreamWithoutClosingWritesNothing() throws IOException {
JavaFileObject fileObject =
inMemoryFileManager.getJavaFileForOutput(
StandardLocation.CLASS_OUTPUT,
"jvm.java.JavaFileParser",
JavaFileObject.Kind.OTHER,
null);
fileObject.openOutputStream().write("Hello".getBytes());
TestJar jar = writeToJar();
assertThat(
jar.getZipEntries().stream().map(ZipEntry::getName).collect(Collectors.toList()),
Matchers.contains("META-INF/", JarFile.MANIFEST_NAME));
}
private TestJar writeToJar() throws IOException {
File jarFile = temp.newFile();
JarBuilder jarBuilder = new JarBuilder();
inMemoryFileManager.writeToJar(jarBuilder);
jarBuilder.createJarFile(jarFile.toPath());
return new TestJar(jarFile);
}
}
| |
/**
* Copyright (C) 2012 KRM Associates, Inc. healtheme@krminc.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.krminc.phr.api.vitals.service;
import com.krminc.phr.api.service.Api;
import com.krminc.phr.api.vitals.converter.BloodPressureConverter;
import com.krminc.phr.api.vitals.converter.BloodPressuresConverter;
import com.krminc.phr.dao.PersistenceService;
import com.krminc.phr.domain.vitals.BloodPressure;
import com.sun.jersey.api.core.ResourceContext;
import java.util.Collection;
import javax.persistence.EntityManager;
import javax.ws.rs.Path;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Produces;
import javax.ws.rs.Consumes;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.MediaType;
import oracle.toplink.essentials.config.CascadePolicy;
import oracle.toplink.essentials.config.HintValues;
import oracle.toplink.essentials.config.TopLinkQueryHints;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
/**
* Blood Sugars API RESTful resource class and mapping.
*
* @author cmccall
*/
public class BloodPressuresResource {
final Logger logger = LoggerFactory.getLogger(BloodPressuresResource.class);
@Context
protected UriInfo uriInfo;
@Context
protected ResourceContext resourceContext;
@Context
protected transient HttpServletRequest servletRequest;
protected Long healthRecordId;
public BloodPressuresResource() {
}
public void setHealthRecordId(Long healthRecordId) {
this.healthRecordId = healthRecordId;
}
/**
* Get method for retrieving a collection of BloodPressure instance in XML format.
*
* @return an instance of BloodPressuresConverter
*/
@GET
@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
public BloodPressuresConverter get(
@QueryParam("start") @DefaultValue("0") int start,
@QueryParam("max") @DefaultValue("10") int max,
@QueryParam("source") @DefaultValue("self") String source,
@QueryParam("orderBy") @DefaultValue("dateadded") String orderBy,
@QueryParam("desc") @DefaultValue("1") int desc)
{
PersistenceService persistenceSvc = PersistenceService.getInstance();
try {
persistenceSvc.beginTx();
return new BloodPressuresConverter(
getEntities(start, max, source, orderBy, desc),
uriInfo.getAbsolutePath(),
Api.DEFAULT_EXPAND_LEVEL
);
} finally {
persistenceSvc.commitTx();
persistenceSvc.close();
}
}
/**
* Post method for creating an instance of BloodPressure using XML as the input format.
*
* @param data an BloodPressureConverter entity that is deserialized from an XML stream
* @return an instance of BloodPressureConverter
*/
@POST
@Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
public Response post(BloodPressureConverter data) {
//check healthrecord is genuine
try {
Long session = new Long(servletRequest.getSession().getAttribute("healthRecordId").toString());
if (session!=null) {
data.setHealthRecordId(session);
} else {
throw new WebApplicationException(Response.Status.FORBIDDEN);
}
}
catch(NullPointerException ex) {
throw new WebApplicationException(Response.Status.FORBIDDEN);
}
PersistenceService persistenceSvc = PersistenceService.getInstance();
try {
if (data.hasError) {
throw new WebApplicationException(Response.Status.PRECONDITION_FAILED);
}
persistenceSvc.beginTx();
EntityManager em = persistenceSvc.getEntityManager();
BloodPressure entity = data.resolveEntity(em);
createEntity(data.resolveEntity(em));
persistenceSvc.commitTx();
return Response.created(uriInfo.getAbsolutePath().resolve(entity.getBloodPressureId() + "/")).build();
} finally {
persistenceSvc.close();
}
}
/**
* Returns a dynamic instance of BloodPressureResource used for entity navigation.
*
* @param id bloodPressureId
* @return an instance of BloodPressureResource
*/
@Path("{bloodPressureId: \\d+}/")
public BloodPressureResource getBloodPressureResource(@PathParam("bloodPressureId") Long id) {
BloodPressureResource resource = resourceContext.getResource(BloodPressureResource.class);
resource.setId(id);
return resource;
}
@Path("count/")
@GET
@Produces({MediaType.APPLICATION_JSON})
public JSONObject getCount(
@QueryParam("source")
@DefaultValue("self")
String source){
EntityManager em = PersistenceService.getInstance().getEntityManager();
Long result = 0L;
result = (Long) em.createNamedQuery("BloodPressure.countByHealthRecordId")
.setParameter("healthRecordId" , this.healthRecordId)
.getSingleResult();
JSONObject jsonResult = new JSONObject();
try {
jsonResult.put("count", result);
}
catch (JSONException ex) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
return jsonResult;
}
/**
* Returns all the entities associated with this resource.
*
* @return a collection of BloodPressure instances
*/
protected Collection<BloodPressure> getEntities(int start, int max, String source, String orderBy, int desc) {
EntityManager em = PersistenceService.getInstance().getEntityManager();
//build query based on source.
// orderBy :
// date
// addeddate (default)
// desc:
// 0 -- ascending (default)
// >0 -- descending
if(start<0) start=0;
if(max<1) max=1;
if(max>100) max=100;
StringBuilder query = new StringBuilder("SELECT e FROM BloodPressure e WHERE e.healthRecordId = :healthRecordId");
if(orderBy.equalsIgnoreCase("observeddate")){
query.append(" ORDER BY e.observedDate");
} else if(orderBy.equalsIgnoreCase("value")){
query.append(" ORDER BY e.systolic");
} else if(orderBy.equalsIgnoreCase("diastolic")){
query.append(" ORDER BY e.diastolic");
} else {
query.append(" ORDER BY e.dateAdded");
}
if (desc>0) {
query.append(" DESC");
} else {
query.append(" ASC");
}
return em.createQuery(query.toString())
.setParameter("healthRecordId", healthRecordId)
.setHint(TopLinkQueryHints.REFRESH, HintValues.TRUE)
.setHint(TopLinkQueryHints.REFRESH_CASCADE, CascadePolicy.CascadeAllParts)
.setFirstResult(start)
.setMaxResults(max)
.getResultList();
}
/**
* Persist the given entity.
*
* @param entity the entity to persist
*/
protected void createEntity(BloodPressure entity) {
entity.setBloodPressureId(null);
EntityManager em = PersistenceService.getInstance().getEntityManager();
em.persist(entity);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.server.query;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static javax.ws.rs.core.Response.Status.NOT_FOUND;
import static org.apache.lens.api.error.LensCommonErrorCode.INTERNAL_SERVER_ERROR;
import static org.apache.lens.cube.error.LensCubeErrorCode.COLUMN_UNAVAILABLE_IN_TIME_RANGE;
import static org.apache.lens.cube.error.LensCubeErrorCode.SYNTAX_ERROR;
import static org.apache.lens.server.common.RestAPITestUtil.*;
import static org.apache.lens.server.common.TestDataUtils.*;
import static org.apache.lens.server.error.LensServerErrorCode.*;
import static org.testng.Assert.assertTrue;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.xml.datatype.DatatypeConfigurationException;
import org.apache.lens.api.LensConf;
import org.apache.lens.api.LensSessionHandle;
import org.apache.lens.api.SupportedOperations;
import org.apache.lens.api.jaxb.LensJAXBContextResolver;
import org.apache.lens.api.metastore.*;
import org.apache.lens.api.result.LensAPIResult;
import org.apache.lens.api.result.LensErrorTO;
import org.apache.lens.api.util.MoxyJsonConfigurationContextResolver;
import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
import org.apache.lens.cube.metadata.HDFSStorage;
import org.apache.lens.server.LensJerseyTest;
import org.apache.lens.server.LensRequestLoggingFilter;
import org.apache.lens.server.common.ErrorResponseExpectedData;
import org.apache.lens.server.common.RestAPITestUtil;
import org.apache.lens.server.error.GenericExceptionMapper;
import org.apache.lens.server.error.LensJAXBValidationExceptionMapper;
import org.apache.lens.server.metastore.MetastoreResource;
import org.apache.lens.server.session.SessionResource;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.moxy.json.MoxyJsonFeature;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.TestProperties;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import lombok.NonNull;
@Test(groups = "unit-test")
public class QueryAPIErrorResponseTest extends LensJerseyTest {
private static final String MOCK_QUERY = "mock-query";
private static final String INVALID_OPERATION = "invalid-operation";
@BeforeTest
public void setUp() throws Exception {
super.setUp();
}
@AfterTest
public void tearDown() throws Exception {
super.tearDown();
}
@Override
protected Application configure() {
enable(TestProperties.LOG_TRAFFIC);
enable(TestProperties.DUMP_ENTITY);
return new ResourceConfig(LensRequestLoggingFilter.class, SessionResource.class, MetastoreResource.class,
QueryServiceResource.class, MultiPartFeature.class, GenericExceptionMapper.class,
LensJAXBContextResolver.class,
LensRequestLoggingFilter.class, LensJAXBValidationExceptionMapper.class,
MoxyJsonConfigurationContextResolver.class, MoxyJsonFeature.class);
}
@Test(dataProvider = "mediaTypeData")
public void testErrorResponseWhenSessionIdIsAbsent(MediaType mt) {
Response response = estimate(target(), Optional.<LensSessionHandle>absent(), Optional.of(MOCK_QUERY), mt);
final String expectedErrMsg = "Session id not provided. Please provide a session id.";
LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
SESSION_ID_NOT_PROVIDED.getLensErrorInfo().getErrorCode(), expectedErrMsg, MOCK_STACK_TRACE);
ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
expectedData.verify(response);
}
@Test(dataProvider = "mediaTypeData")
public void testErrorResponseWhenQueryIsAbsent(MediaType mt) {
LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
Optional<String> testQuery = Optional.absent();
Response response = estimate(target(), Optional.of(sessionId), testQuery, mt);
final String expectedErrMsg = "Query is not provided, or it is empty or blank. Please provide a valid query.";
LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
NULL_OR_EMPTY_OR_BLANK_QUERY.getLensErrorInfo().getErrorCode(), expectedErrMsg, MOCK_STACK_TRACE);
ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
expectedData.verify(response);
closeSession(target(), sessionId, mt);
}
@Test(dataProvider = "mediaTypeData")
public void testErrorResponseWhenInvalidOperationIsSubmitted(MediaType mt) {
LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
Response response = postQuery(target(), Optional.of(sessionId), Optional.of(MOCK_QUERY),
Optional.of(INVALID_OPERATION), mt);
final String expectedErrMsg = "Provided Operation is not supported. Supported Operations are: "
+ "[estimate, execute, explain, execute_with_timeout]";
LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
UNSUPPORTED_OPERATION.getLensErrorInfo().getErrorCode(),
expectedErrMsg, MOCK_STACK_TRACE, new SupportedOperations());
ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
expectedData.verify(response);
closeSession(target(), sessionId, mt);
}
@Test(dataProvider = "mediaTypeData")
public void testErrorResponseWhenLensMultiCauseExceptionOccurs(MediaType mt) {
LensSessionHandle sessionId = openSession(target(), "foo", "bar", mt);
final String testQuery = "select * from non_existing_table";
Response response = estimate(target(), Optional.of(sessionId), Optional.of(testQuery), mt);
final String expectedErrMsg1 = "Semantic Error : Error while compiling statement: "
+ "FAILED: SemanticException [Error 10001]: Line 1:31 Table not found 'non_existing_table'";
final String expectedErrMsg2 = "Semantic Error : user lacks privilege or object not found: NON_EXISTING_TABLE";
LensErrorTO expectedLensErrorTO1 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
expectedErrMsg1, MOCK_STACK_TRACE);
LensErrorTO expectedLensErrorTO2 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
expectedErrMsg2, MOCK_STACK_TRACE);
LensErrorTO responseLensErrorTO = response.readEntity(LensAPIResult.class).getLensErrorTO();
assertTrue(expectedLensErrorTO1.getMessage().equals(responseLensErrorTO.getMessage())
|| expectedLensErrorTO2.getMessage().equals(responseLensErrorTO.getMessage()),
"Message is " + responseLensErrorTO.getMessage());
closeSession(target(), sessionId, mt);
}
@Test(dataProvider = "mediaTypeData")
public void testErrorResponseWithSyntaxErrorInQuery(MediaType mt) {
LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
Response response = estimate(target(), Optional.of(sessionId), Optional.of(MOCK_QUERY), mt);
final String expectedErrMsg = "Syntax Error: line 1:0 cannot recognize input near 'mock' '-' 'query'";
LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(SYNTAX_ERROR.getLensErrorInfo().getErrorCode(),
expectedErrMsg, MOCK_STACK_TRACE);
ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
expectedData.verify(response);
closeSession(target(), sessionId, mt);
}
@Test(dataProvider = "mediaTypeData")
public void testQueryColumnWithBothStartDateAndEndDate(MediaType mt) throws DatatypeConfigurationException {
/* This test will have a col which has both start date and end date set */
/* Col will be queried for a time range which does not fall in start date and end date */
DateTime startDateOneJan2015 = new DateTime(2015, 01, 01, 0, 0, DateTimeZone.UTC);
DateTime endDateThirtyJan2015 = new DateTime(2015, 01, 30, 23, 0, DateTimeZone.UTC);
DateTime queryFromOneJan2014 = new DateTime(2014, 01, 01, 0, 0, DateTimeZone.UTC);
DateTime queryTillThreeJan2014 = new DateTime(2014, 01, 03, 0, 0, DateTimeZone.UTC);
final String expectedErrMsgSuffix = " can only be queried after Thursday, January 1, 2015 12:00:00 AM UTC and "
+ "before Friday, January 30, 2015 11:00:00 PM UTC. Please adjust the selected time range accordingly.";
testColUnAvailableInTimeRange(Optional.of(startDateOneJan2015),
Optional.of(endDateThirtyJan2015), queryFromOneJan2014, queryTillThreeJan2014, expectedErrMsgSuffix, mt);
}
@Test(dataProvider = "mediaTypeData")
public void testQueryColumnWithOnlyStartDate(MediaType mt) throws DatatypeConfigurationException {
/* This test will have a col which has only start date set */
/* Col will be queried for a time range which is before start date */
DateTime startDateOneJan2015 = new DateTime(2015, 01, 01, 0, 0, DateTimeZone.UTC);
DateTime queryFromOneJan2014 = new DateTime(2014, 01, 01, 0, 0, DateTimeZone.UTC);
DateTime queryTillThreeJan2014 = new DateTime(2014, 01, 03, 0, 0, DateTimeZone.UTC);
final String expectedErrMsgSuffix = " can only be queried after Thursday, January 1, 2015 12:00:00 AM UTC. "
+ "Please adjust the selected time range accordingly.";
testColUnAvailableInTimeRange(Optional.of(startDateOneJan2015),
Optional.<DateTime>absent(), queryFromOneJan2014, queryTillThreeJan2014, expectedErrMsgSuffix, mt);
}
@Test(dataProvider = "mediaTypeData")
public void testQueryColumnWithOnlyEndDate(MediaType mt) throws DatatypeConfigurationException {
/* This test will have a col which has only end date set */
/* Col will be queried for a time range which is after end date */
DateTime endDateThirtyJan2015 = new DateTime(2015, 01, 30, 23, 0, DateTimeZone.UTC);
DateTime queryFromOneJan2016 = new DateTime(2016, 01, 01, 0, 0, DateTimeZone.UTC);
DateTime queryTillThreeJan2016 = new DateTime(2016, 01, 03, 0, 0, DateTimeZone.UTC);
final String expectedErrMsgSuffix = " can only be queried before Friday, January 30, 2015 11:00:00 PM UTC. "
+ "Please adjust the selected time range accordingly.";
testColUnAvailableInTimeRange(Optional.<DateTime>absent(),
Optional.of(endDateThirtyJan2015), queryFromOneJan2016, queryTillThreeJan2016, expectedErrMsgSuffix, mt);
}
private void testColUnAvailableInTimeRange(@NonNull final Optional<DateTime> colStartDate,
@NonNull final Optional<DateTime> colEndDate, @NonNull DateTime queryFrom, @NonNull DateTime queryTill,
@NonNull final String expectedErrorMsgSuffix, @NonNull final MediaType mt) throws DatatypeConfigurationException {
final WebTarget target = target();
final String testDb = getRandomDbName();
final String testCube = getRandomCubeName();
final String testDimensionField = getRandomDimensionField();
final String testFact = getRandomFactName();
final String testStorage = getRandomStorageName();
/* Setup: Begin */
LensSessionHandle sessionId = openSession(target, "foo", "bar", new LensConf(), mt);
try {
createAndSetCurrentDbFailFast(target, sessionId, testDb, mt);
/* Create a test cube with test dimension field having a start Date and end Date */
XDimAttribute testXDim = createXDimAttribute(testDimensionField, colStartDate, colEndDate);
XCube xcube = createXCubeWithDummyMeasure(testCube, Optional.of("dt"), testXDim);
createCubeFailFast(target, sessionId, xcube, mt);
/* Create Storage */
XStorage xs = new XStorage();
xs.setClassname(HDFSStorage.class.getCanonicalName());
xs.setName(testStorage);
RestAPITestUtil.createStorageFailFast(target, sessionId, xs, mt);
/* Create a fact with test dimension field */
XColumn xColumn = createXColumn(testDimensionField);
XFactTable xFactTable = createXFactTableWithColumns(testFact, testCube, xColumn);
//Create a StorageTable
XStorageTables tables = new XStorageTables();
tables.getStorageTable().add(createStorageTblElement(testStorage, "DAILY"));
xFactTable.setStorageTables(tables);
createFactFailFast(target, sessionId, xFactTable, mt);
/* Setup: End */
DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd-HH");
final String testQuery = "cube select " + testDimensionField + " from " + testCube + " where TIME_RANGE_IN(dt, "
+ "\"" + dtf.print(queryFrom) + "\",\"" + dtf.print(queryTill) + "\")";
Response response = estimate(target, Optional.of(sessionId), Optional.of(testQuery), mt);
final String expectedErrMsg = testDimensionField + expectedErrorMsgSuffix;
Long expecAvailableFrom = colStartDate.isPresent() ? colStartDate.get().getMillis() : null;
Long expecAvailableTill = colEndDate.isPresent() ? colEndDate.get().getMillis() : null;
final ColUnAvailableInTimeRange expectedErrorPayload = new ColUnAvailableInTimeRange(testDimensionField,
expecAvailableFrom, expecAvailableTill);
LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
COLUMN_UNAVAILABLE_IN_TIME_RANGE.getLensErrorInfo().getErrorCode(),
expectedErrMsg, MOCK_STACK_TRACE, expectedErrorPayload, null);
ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
expectedData.verify(response);
} finally {
dropDatabaseFailFast(target, sessionId, testDb, mt);
closeSessionFailFast(target, sessionId, mt);
}
}
/**
* Test execute failure in with selected driver throwing Runtime exception.
*
* @throws InterruptedException the interrupted exception
*/
@Test(dataProvider = "mediaTypeData")
public void testExplainRuntimeException(MediaType mt) throws InterruptedException {
LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
try {
Response response = explain(target(), Optional.of(sessionId), Optional.of("select fail, execute_runtime "
+ " from non_exist"), mt);
final String expectedErrMsg = "Internal server error:Runtime exception from query explain";
LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
INTERNAL_SERVER_ERROR.getValue(), expectedErrMsg, MOCK_STACK_TRACE);
ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(Response.Status.INTERNAL_SERVER_ERROR,
expectedLensErrorTO);
expectedData.verify(response);
} finally {
closeSessionFailFast(target(), sessionId, mt);
}
}
/**
* Test execute failure in with selected driver throwing webapp exception.
*
* @throws InterruptedException the interrupted exception
*/
@Test(dataProvider = "mediaTypeData")
public void testExplainWebappException(MediaType mt) throws InterruptedException {
LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
try {
Response response = explain(target(), Optional.of(sessionId), Optional.of("select fail, webappexception "
+ " from non_exist"), mt);
final String expectedErrMsg = "Not found from mock driver";
LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
NOT_FOUND.getStatusCode(), expectedErrMsg, MOCK_STACK_TRACE);
ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(Response.Status.NOT_FOUND,
expectedLensErrorTO);
expectedData.verify(response);
} finally {
closeSessionFailFast(target(), sessionId, mt);
}
}
private XStorageTableElement createStorageTblElement(String storageName, String... updatePeriod) {
XStorageTableElement tbl = new XStorageTableElement();
tbl.setUpdatePeriods(new XUpdatePeriods());
tbl.setStorageName(storageName);
if (updatePeriod != null) {
for (String p : updatePeriod) {
tbl.getUpdatePeriods().getUpdatePeriod().add(XUpdatePeriod.valueOf(p));
}
}
tbl.setTableDesc(new XStorageTableDesc());
return tbl;
}
}
| |
package es.tid.emulator.node;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.util.Properties;
import es.tid.vntm.topology.elements.Node;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import es.tid.emulator.node.transport.lsp.te.TechnologyParameters;
public class NodeInformation {
private final String mainNode;
private final String defaultNode;
/**
* This class loads the file with all the information referenced to the node
*/
/**
* Node ID: IPv4
*/
private Inet4Address id;
/**
* Local Address to bind the socket
*/
private Inet4Address localNodeAddress;
/**
* Technology of the Node
*/
private int nodeTechnology=0;
/**
* Topology Name
*/
private String topologyName;
/**
* IPv4 of the PCE
*/
private Inet4Address pceID;
/**
* Port of the PCE
*/
private int pcePort;
private boolean setTraces;
private boolean rsvpMode;
private Logger log;
private boolean isStatefull = false;
private boolean isSRCapable = false;
private int MSD = 0;
/**
* Default class constructor.
*/
public NodeInformation(){
this("/usr/local/nodeConfig/defaultConfiguration.properties", "/usr/local/MynodeConfig/defaultConfiguration.properties");
log=LoggerFactory.getLogger("ROADM");
log.info("Using default .properties configuration files and directories.");
}
/**
* Class constructor with two parameters.
* @param mainNode Main node properties configuration file.
* @param defaultNode Default node properties configuration file.
*/
public NodeInformation(String mainNode, String defaultNode) {
log=LoggerFactory.getLogger("ROADM");
this.mainNode = mainNode;
this.defaultNode = defaultNode;
}
public void readNodeConfiguration(){
Properties props = new Properties();
Properties props_node = new Properties();
try{
log.debug("Reading from "+this.mainNode);
props.load(new FileInputStream(this.mainNode));
log.debug("Reading local nodeId from "+this.defaultNode);
props_node.load(new FileInputStream(this.defaultNode));
String nodeId = props_node.getProperty("nodeId").trim();
String localNodeAddressString=nodeId;
if ( props_node.getProperty("localNodeAddress")!=null) {
localNodeAddressString= props_node.getProperty("localNodeAddress").trim();
}
String pceAddress = props.getProperty("PCEAddress").trim();
String pcepPort = props.getProperty("PCEPPort").trim();
String flexi_s = props.getProperty("flexi").trim();
String mpls_s = props.getProperty("mpls").trim();
String rsvpM = props.getProperty("RSVPMode").trim();
String SetTraces = props.getProperty("SetTraces").trim();
topologyName = props.getProperty("networkDescriptionFile");
rsvpMode = Boolean.parseBoolean(rsvpM);
pceID = (Inet4Address) (Inet4Address.getByName(pceAddress));
pcePort = Integer.parseInt(pcepPort);
id = (Inet4Address)InetAddress.getByName(nodeId);
localNodeAddress = (Inet4Address)InetAddress.getByName(localNodeAddressString);
setTraces = Boolean.parseBoolean(SetTraces);
if (Boolean.parseBoolean(flexi_s)==true){
nodeTechnology = TechnologyParameters.SSON;
} else {
if (Boolean.parseBoolean(mpls_s)==true){
nodeTechnology = TechnologyParameters.MPLS;
}else{
nodeTechnology = TechnologyParameters.WSON;
}
}
}catch(IOException e){
e.printStackTrace();
// FIXME: Meter mensaje de error
}
}
public int getNodeTechnology() {
return nodeTechnology;
}
public void setNodeTechnology(int nodeTechnology) {
this.nodeTechnology = nodeTechnology;
}
/**
* Method to extract the node id.
* @return The ROADM identifier as int.
*/
public Inet4Address getId(){
return id;
}
/**
*
* @param id
*/
public void setId(Inet4Address id){
this.id = id;
}
public String getTopologyName() {
return topologyName;
}
public void setTopologyName(String topologyName) {
this.topologyName = topologyName;
}
public Inet4Address getPceID() {
return pceID;
}
public void setPceID(Inet4Address pceID) {
this.pceID = pceID;
}
public int getPcePort() {
return pcePort;
}
public void setPcePort(int pcePort) {
this.pcePort = pcePort;
}
public boolean isRsvpMode() {
return rsvpMode;
}
public void setRsvpMode(boolean rsvpMode) {
this.rsvpMode = rsvpMode;
}
public boolean isSetTraces() {
return setTraces;
}
public void setSetTraces(boolean setTraces) {
this.setTraces = setTraces;
}
public boolean isStatefull() {
return isStatefull;
}
public void setStatefull(boolean isStatefull) {
this.isStatefull = isStatefull;
}
public Inet4Address getLocalNodeAddress() {
return localNodeAddress;
}
public void setLocalNodeAddress(Inet4Address localNodeAddress) {
this.localNodeAddress = localNodeAddress;
}
@Override
public String toString() {
return "NodeInformation [id=" + id + ", localNodeAddress="
+ localNodeAddress + ", nodeTechnology=" + nodeTechnology
+ ", topologyName=" + topologyName + ", pceID=" + pceID
+ ", pcePort=" + pcePort + ", rsvpMode=" + rsvpMode
+ ", isStatefull=" + isStatefull + ", isSRCapable="
+ isSRCapable + "]";
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tree.love.providers.downloads.util;
/**
* Map of {@code long} to {@code long}. Unlike a normal array of longs, there
* can be gaps in the indices. It is intended to be more memory efficient than
* using a {@code HashMap}, both because it avoids auto-boxing keys and values
* and its data structure doesn't rely on an extra entry object for each
* mapping.
* <p>
* Note that this container keeps its mappings in an array data structure, using
* a binary search to find keys. The implementation is not intended to be
* appropriate for data structures that may contain large numbers of items. It
* is generally slower than a traditional HashMap, since lookups require a
* binary search and adds and removes require inserting and deleting entries in
* the array. For containers holding up to hundreds of items, the performance
* difference is not significant, less than 50%.
* </p>
* <p>
* It is possible to iterate over the items in this container using
* {@link #keyAt(int)} and {@link #valueAt(int)}. Iterating over the keys using
* <code>keyAt(int)</code> with ascending values of the index will return the
* keys in ascending order, or the values corresponding to the keys in ascending
* order in the case of <code>valueAt(int)</code>.
* </p>
*
* @hide
*/
public class LongSparseLongArray implements Cloneable {
private long[] mKeys;
private long[] mValues;
private int mSize;
/**
* Creates a new SparseLongArray containing no mappings.
*/
public LongSparseLongArray() {
this(10);
}
/**
* Creates a new SparseLongArray containing no mappings that will not
* require any additional memory allocation to store the specified number of
* mappings. If you supply an initial capacity of 0, the sparse array will
* be initialized with a light-weight representation not requiring any
* additional array allocations.
*/
public LongSparseLongArray(int initialCapacity) {
if (initialCapacity == 0) {
mKeys = ContainerHelpers.EMPTY_LONGS;
mValues = ContainerHelpers.EMPTY_LONGS;
} else {
initialCapacity = ArrayUtils.idealLongArraySize(initialCapacity);
mKeys = new long[initialCapacity];
mValues = new long[initialCapacity];
}
mSize = 0;
}
@Override
public LongSparseLongArray clone() {
LongSparseLongArray clone = null;
try {
clone = (LongSparseLongArray) super.clone();
clone.mKeys = mKeys.clone();
clone.mValues = mValues.clone();
} catch (CloneNotSupportedException cnse) {
/* ignore */
}
return clone;
}
/**
* Gets the long mapped from the specified key, or <code>0</code> if no such
* mapping has been made.
*/
public long get(long key) {
return get(key, 0);
}
/**
* Gets the long mapped from the specified key, or the specified value if no
* such mapping has been made.
*/
public long get(long key, long valueIfKeyNotFound) {
int i = ContainerHelpers.binarySearch(mKeys, mSize, key);
if (i < 0) {
return valueIfKeyNotFound;
} else {
return mValues[i];
}
}
/**
* Removes the mapping from the specified key, if there was any.
*/
public void delete(long key) {
int i = ContainerHelpers.binarySearch(mKeys, mSize, key);
if (i >= 0) {
removeAt(i);
}
}
/**
* Removes the mapping at the given index.
*/
public void removeAt(int index) {
System.arraycopy(mKeys, index + 1, mKeys, index, mSize - (index + 1));
System.arraycopy(mValues, index + 1, mValues, index, mSize - (index + 1));
mSize--;
}
/**
* Adds a mapping from the specified key to the specified value, replacing
* the previous mapping from the specified key if there was one.
*/
public void put(long key, long value) {
int i = ContainerHelpers.binarySearch(mKeys, mSize, key);
if (i >= 0) {
mValues[i] = value;
} else {
i = ~i;
if (mSize >= mKeys.length) {
growKeyAndValueArrays(mSize + 1);
}
if (mSize - i != 0) {
System.arraycopy(mKeys, i, mKeys, i + 1, mSize - i);
System.arraycopy(mValues, i, mValues, i + 1, mSize - i);
}
mKeys[i] = key;
mValues[i] = value;
mSize++;
}
}
/**
* Returns the number of key-value mappings that this SparseIntArray
* currently stores.
*/
public int size() {
return mSize;
}
/**
* Given an index in the range <code>0...size()-1</code>, returns the key
* from the <code>index</code>th key-value mapping that this SparseLongArray
* stores.
* <p>
* The keys corresponding to indices in ascending order are guaranteed to be
* in ascending order, e.g., <code>keyAt(0)</code> will return the smallest
* key and <code>keyAt(size()-1)</code> will return the largest key.
* </p>
*/
public long keyAt(int index) {
return mKeys[index];
}
/**
* Given an index in the range <code>0...size()-1</code>, returns the value
* from the <code>index</code>th key-value mapping that this SparseLongArray
* stores.
* <p>
* The values corresponding to indices in ascending order are guaranteed to
* be associated with keys in ascending order, e.g., <code>valueAt(0)</code>
* will return the value associated with the smallest key and
* <code>valueAt(size()-1)</code> will return the value associated with the
* largest key.
* </p>
*/
public long valueAt(int index) {
return mValues[index];
}
/**
* Returns the index for which {@link #keyAt} would return the specified
* key, or a negative number if the specified key is not mapped.
*/
public int indexOfKey(long key) {
return ContainerHelpers.binarySearch(mKeys, mSize, key);
}
/**
* Returns an index for which {@link #valueAt} would return the specified
* key, or a negative number if no keys map to the specified value. Beware
* that this is a linear search, unlike lookups by key, and that multiple
* keys can map to the same value and this will find only one of them.
*/
public int indexOfValue(long value) {
for (int i = 0; i < mSize; i++)
if (mValues[i] == value)
return i;
return -1;
}
/**
* Removes all key-value mappings from this SparseIntArray.
*/
public void clear() {
mSize = 0;
}
/**
* Puts a key/value pair into the array, optimizing for the case where the
* key is greater than all existing keys in the array.
*/
public void append(long key, long value) {
if (mSize != 0 && key <= mKeys[mSize - 1]) {
put(key, value);
return;
}
int pos = mSize;
if (pos >= mKeys.length) {
growKeyAndValueArrays(pos + 1);
}
mKeys[pos] = key;
mValues[pos] = value;
mSize = pos + 1;
}
private void growKeyAndValueArrays(int minNeededSize) {
int n = ArrayUtils.idealLongArraySize(minNeededSize);
long[] nkeys = new long[n];
long[] nvalues = new long[n];
System.arraycopy(mKeys, 0, nkeys, 0, mKeys.length);
System.arraycopy(mValues, 0, nvalues, 0, mValues.length);
mKeys = nkeys;
mValues = nvalues;
}
/**
* {@inheritDoc}
* <p>
* This implementation composes a string by iterating over its mappings.
*/
@Override
public String toString() {
if (size() <= 0) {
return "{}";
}
StringBuilder buffer = new StringBuilder(mSize * 28);
buffer.append('{');
for (int i = 0; i < mSize; i++) {
if (i > 0) {
buffer.append(", ");
}
long key = keyAt(i);
buffer.append(key);
buffer.append('=');
long value = valueAt(i);
buffer.append(value);
}
buffer.append('}');
return buffer.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package net.groboclown.idea.p4ic.config.win;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.ArrayList;
import java.util.List;
import java.util.prefs.Preferences;
/**
* Taken from
* http://svn.apache.org/repos/asf/incubator/npanday/trunk/components/dotnet-registry/src/main/java/npanday/registry/impl/WinRegistry.java
* It's been modified to remove the external dependencies and not allow writing to the
* registry.
*
* It takes advantage of the Preferences class' ability to inspect the Windows registry.
* Note that this may not be compatible with all Java versions.
*/
public class PreferencesWinRegistry {
public static final int HKEY_CURRENT_USER = 0x80000001;
public static final int HKEY_LOCAL_MACHINE = 0x80000002;
public static final int REG_SUCCESS = 0;
private static final int KEY_ALL_ACCESS = 0xf003f;
private static final int KEY_READ = 0x20019;
private static Preferences userRoot = Preferences.userRoot();
private static Preferences systemRoot = Preferences.systemRoot();
private static Class<? extends Preferences> userClass = userRoot.getClass();
private static Method regOpenKey = null;
private static Method regCloseKey = null;
private static Method regQueryValueEx = null;
private static Method regEnumValue = null;
private static Method regQueryInfoKey = null;
private static Method regEnumKeyEx = null;
static {
try {
regOpenKey = userClass.getDeclaredMethod(
"WindowsRegOpenKey", new Class[]{int.class, byte[].class, int.class}
);
regOpenKey.setAccessible(true);
regCloseKey = userClass.getDeclaredMethod("WindowsRegCloseKey", new Class[]{int.class});
regCloseKey.setAccessible(true);
regQueryValueEx = userClass.getDeclaredMethod(
"WindowsRegQueryValueEx", new Class[]{int.class, byte[].class}
);
regQueryValueEx.setAccessible(true);
regEnumValue = userClass.getDeclaredMethod(
"WindowsRegEnumValue", new Class[]{int.class, int.class, int.class}
);
regEnumValue.setAccessible(true);
regQueryInfoKey = userClass.getDeclaredMethod("WindowsRegQueryInfoKey1", new Class[]{int.class});
regQueryInfoKey.setAccessible(true);
regEnumKeyEx = userClass.getDeclaredMethod(
"WindowsRegEnumKeyEx", new Class[]{int.class, int.class, int.class}
);
regEnumKeyEx.setAccessible(true);
} catch (NoSuchMethodException e) {
// we are not on windows, then!
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Read a value from key and value name
*
* @param hkey HKEY_CURRENT_USER/HKEY_LOCAL_MACHINE
* @param key
* @param valueName
* @return the value
* @throws IllegalArgumentException
* @throws IllegalAccessException
* @throws java.lang.reflect.InvocationTargetException
*/
public static String readString(int hkey, String key, String valueName) throws
IllegalArgumentException,
IllegalAccessException,
InvocationTargetException {
if (hkey == HKEY_LOCAL_MACHINE) {
return readString(systemRoot, hkey, key, valueName);
} else if (hkey == HKEY_CURRENT_USER) {
return readString(userRoot, hkey, key, valueName);
} else {
throw new IllegalArgumentException("hkey=" + hkey);
}
}
/**
* Read value(s) and value name(s) form given key
*
* @param hkey HKEY_CURRENT_USER/HKEY_LOCAL_MACHINE
* @param key
* @return the value name(s) plus the value(s)
* @throws IllegalArgumentException
* @throws IllegalAccessException
* @throws java.lang.reflect.InvocationTargetException
*/
public static Map<String, String> readStringValues(int hkey, String key) throws
IllegalArgumentException,
IllegalAccessException,
InvocationTargetException {
if (hkey == HKEY_LOCAL_MACHINE) {
return readStringValues(systemRoot, hkey, key);
} else if (hkey == HKEY_CURRENT_USER) {
return readStringValues(userRoot, hkey, key);
} else {
throw new IllegalArgumentException("hkey=" + hkey);
}
}
/**
* Read the value name(s) from a given key
*
* @param hkey HKEY_CURRENT_USER/HKEY_LOCAL_MACHINE
* @param key
* @return the value name(s)
* @throws IllegalArgumentException
* @throws IllegalAccessException
* @throws java.lang.reflect.InvocationTargetException
*/
public static List<String> readStringSubKeys(int hkey, String key) throws
IllegalArgumentException,
IllegalAccessException,
InvocationTargetException {
if (hkey == HKEY_LOCAL_MACHINE) {
return readStringSubKeys(systemRoot, hkey, key);
} else if (hkey == HKEY_CURRENT_USER) {
return readStringSubKeys(userRoot, hkey, key);
} else {
throw new IllegalArgumentException("hkey=" + hkey);
}
}
// =====================
private static String readString(Preferences root, int hkey, String key, String value) throws
IllegalArgumentException,
IllegalAccessException,
InvocationTargetException {
int[] handles = (int[]) regOpenKey.invoke(
root, new Object[]{
new Integer(hkey), toCstr(key), new Integer(KEY_READ)
}
);
if (handles[1] != REG_SUCCESS) {
return null;
}
byte[] valb = (byte[]) regQueryValueEx.invoke(
root, new Object[]{new Integer(handles[0]), toCstr(value)}
);
regCloseKey.invoke(root, new Object[]{new Integer(handles[0])});
return (valb != null ? new String(valb).trim() : null);
}
private static Map<String, String> readStringValues(Preferences root, int hkey, String key) throws
IllegalArgumentException,
IllegalAccessException,
InvocationTargetException {
HashMap<String, String> results = new HashMap<String, String>();
int[] handles = (int[]) regOpenKey.invoke(
root, new Object[]{
new Integer(hkey), toCstr(key), new Integer(KEY_READ)
}
);
if (handles[1] != REG_SUCCESS) {
return null;
}
int[] info = (int[]) regQueryInfoKey.invoke(root, new Object[]{new Integer(handles[0])});
int count = info[2]; // count
int maxlen = info[3]; // value length max
for (int index = 0; index < count; index++) {
byte[] name = (byte[]) regEnumValue.invoke(
root, new Object[]{
new Integer(handles[0]), new Integer(index), new Integer(maxlen + 1)
}
);
String value = readString(hkey, key, new String(name));
results.put(new String(name).trim(), value);
}
regCloseKey.invoke(root, new Object[]{new Integer(handles[0])});
return results;
}
private static List<String> readStringSubKeys(Preferences root, int hkey, String key) throws
IllegalArgumentException,
IllegalAccessException,
InvocationTargetException {
List<String> results = new ArrayList<String>();
int[] handles = (int[]) regOpenKey.invoke(
root, new Object[]{
new Integer(hkey), toCstr(key), new Integer(KEY_READ)
}
);
if (handles[1] != REG_SUCCESS) {
return null;
}
int[] info = (int[]) regQueryInfoKey.invoke(root, new Object[]{new Integer(handles[0])});
int count = info[0]; // count
int maxlen = info[3]; // value length max
for (int index = 0; index < count; index++) {
byte[] name = (byte[]) regEnumKeyEx.invoke(
root, new Object[]{
new Integer(handles[0]), new Integer(index), new Integer(maxlen + 1)
}
);
results.add(new String(name).trim());
}
regCloseKey.invoke(root, new Object[]{new Integer(handles[0])});
return results;
}
// utility
private static byte[] toCstr(String str) {
byte[] result = new byte[str.length() + 1];
for (int i = 0; i < str.length(); i++) {
result[i] = (byte) str.charAt(i);
}
result[str.length()] = 0;
return result;
}
/*
private static Pattern REGISTRY_REFERENCE_REGEX = Pattern.compile("\\$\\(Registry:([A-Z_]+)\\\\(.*)@(.*)\\)");
public String getValue(
RegistryHKey registryHKey, String key, String valueName) throws WindowsRegistryAccessException {
if (!Os.isFamily(Os.FAMILY_WINDOWS))
return null;
try {
String value = WinRegistry.readString(registryHKey.getHKey(), key, valueName);
if (value != null) {
Matcher m = REGISTRY_REFERENCE_REGEX.matcher(value);
if (m.matches()) {
value = getValue(RegistryHKey.tryGetFromName(m.group(1)), m.group(2), m.group(3));
}
}
return value;
} catch (InvocationTargetException e) {
throw new WindowsRegistryAccessException(
"NPANDAY-117-000: Error while retrieving a windows registry value", e
);
} catch (IllegalAccessException e) {
throw new WindowsRegistryAccessException(
"NPANDAY-117-001: Error while retrieving a windows registry value", e
);
}
}
*/
}
| |
/*
* Copyright 2006 Webmedia Group Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.araneaframework.http.widget;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.Serializable;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.araneaframework.InputData;
import org.araneaframework.OutputData;
import org.araneaframework.Path;
import org.araneaframework.core.Assert;
import org.araneaframework.core.BaseApplicationWidget;
import org.araneaframework.http.util.ServletUtil;
/**
* Widget that serves downloadable content.
*
* @author Alar Kvell (alar@araneaframework.org)
*/
public class DownloaderWidget extends BaseApplicationWidget {
protected InputStream dataStream;
protected int length = -1;
protected String contentType;
protected Map<String, String> headers;
protected DownloadStreamCallback dataStreamCallback;
/**
* Instantiates widget with download data.
*
* @param data The data to be downloaded as bytes.
* @param contentType The content type for the bytes.
*/
public DownloaderWidget(byte[] data, String contentType) {
this(new ByteArrayInputStream(data), data.length, contentType);
}
/**
* Instantiates widget with download data.
*
* @param data The data to be downloaded as bytes.
* @param headers Headers to be sent with the downloaded data.
* @since 1.1
*/
public DownloaderWidget(byte[] data, Map<String, String> headers) {
this(new ByteArrayInputStream(data), data.length, null, headers);
}
/**
* Instantiates widget with download data.
*
* @param dataStream The stream pointing to the data to be downloaded.
* @param contentType The content type for the data stream.
* @since 2.0
*/
public DownloaderWidget(InputStream dataStream, String contentType) {
this(dataStream, -1, contentType);
}
/**
* Instantiates widget with download data.
*
* @param dataStream The stream pointing to the data to be downloaded.
* @param length The (data stream) length information to send with the downloaded data. Specify -1 for no length.
* @param contentType The content type for the data stream.
* @since 2.0
*/
public DownloaderWidget(InputStream dataStream, long length, String contentType) {
this(dataStream, length, contentType, null);
}
/**
* Instantiates widget with download data.
*
* @param dataStream The stream pointing to the data to be downloaded.
* @param length The (data stream) length information to send with the downloaded data. Specify -1 for no length.
* @param contentType The content type for the data stream.
* @param headers Headers to be sent with the downloaded data.
* @since 2.0
*/
public DownloaderWidget(InputStream dataStream, long length, String contentType, Map<String, String> headers) {
Assert.notNullParam(dataStream, "dataStream");
this.dataStream = dataStream;
this.length = (int) length;
this.contentType = contentType;
this.headers = headers;
}
/**
* Instantiates widget with download data. Providing a callback is the best way for sending large files to the client!
*
* @param callback The callback that will be called to fetch download data.
* @since 2.0
*/
public DownloaderWidget(DownloadStreamCallback callback) {
Assert.notNullParam(callback, "callback");
this.dataStreamCallback = callback;
}
protected InputStream getDataStream() {
DownloadStreamCallback c = this.dataStreamCallback;
InputStream result = c != null ? c.getStreamToDownload() : null;
return result != null ? result : this.dataStream;
}
protected String getContentType() {
DownloadStreamCallback c = this.dataStreamCallback;
String result = c != null ? c.getContentType() : null;
return StringUtils.defaultString(result, this.contentType);
}
protected int getLength() {
DownloadStreamCallback c = this.dataStreamCallback;
int result = c != null ? c.getLength() : -1;
return result >= 0 ? result : this.length;
}
protected Map<String, String> getHeaders() {
DownloadStreamCallback c = this.dataStreamCallback;
return c != null && c.getHeaders() != null ? c.getHeaders() : this.headers;
}
@Override
protected void action(Path path, InputData input, OutputData output) throws Exception {
HttpServletResponse response = ServletUtil.getResponse(output);
beforeFile(response);
InputStream dataStream = getDataStream();
long length = IOUtils.copyLarge(dataStream, response.getOutputStream());
IOUtils.closeQuietly(dataStream);
afterFile(response, length);
}
protected void beforeFile(HttpServletResponse response) {
response.setContentType(getContentType());
if (getHeaders() != null) {
for (Map.Entry<String, String> header : getHeaders().entrySet()) {
response.setHeader(header.getKey(), header.getValue());
}
}
if (getLength() >= 0) {
response.setContentLength(getLength());
}
}
protected void afterFile(HttpServletResponse response, long length) {
if (getLength() < 0 && length >= 0 && length < Integer.MAX_VALUE) {
response.setContentLength((int) length);
}
this.dataStream = null; // ByteArrayOutputStream is not serializable!
this.dataStreamCallback = null; // Should not be used anymore!
}
/**
* If a file download stream is given to e.g. popup context, the stream must be serializable. Since streams are not
* serializable, this callback request the stream only when needed to output it. Therefore, it escapes the
* serialization step.
*
* @author Martti Tamm (martti@araneaframework.org)
* @since 2.0
*/
public abstract static class DownloadStreamCallback implements Serializable {
/**
* Override this to specify a stream to data to send as the download content.
*
* @return The stream for the response content.
*/
public abstract InputStream getStreamToDownload();
/**
* Override this when the callback can give the content type of the stream.
*
* @return The content type of the stream.
*/
public String getContentType() {
return null;
}
/**
* Override this when the callback can give the length of the stream.
*
* @return The length of the stream.
*/
public int getLength() {
return -1;
}
/**
* Override this when the callback can give the HTTP header with the stream.
*
* @return The HTTP headers to send with the stream.
*/
public Map<String, String> getHeaders() {
return null;
}
}
}
| |
package com.robparrett.genericmatchinggame;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.InputAdapter;
import com.badlogic.gdx.InputMultiplexer;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Cell;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.utils.viewport.ScreenViewport;
/**
* Created by Rob on 4/25/2015.
*/
public class ScoresScreen extends InputAdapter implements Screen {
public enum Tabs {EASY, MED, HARD}
public Tabs tab = Tabs.EASY;
public int lastIndex = -1;
private GenericMatchingGame game;
private OrthographicCamera cam;
private Sprite bgSprite;
private Table table;
private Table easyTable;
private Table medTable;
private Table hardTable;
private EasyTabButton easyTabButton;
private MedTabButton medTabButton;
private HardTabButton hardTabButton;
private PlayAgainButton playAgainButton;
private Cell scoresCell;
private InputMultiplexer inputMultiplexer;
private Stage stage;
public ScoresScreen(GenericMatchingGame g) {
game = g;
float w = Gdx.graphics.getWidth();
float h = Gdx.graphics.getHeight();
cam = new OrthographicCamera(w, h);
cam.position.set(cam.viewportWidth / 2f, cam.viewportHeight / 2f, 0);
bgSprite = new Sprite(game.atlas.findRegion("bg"));
bgSprite.setSize(w, h);
Label title = new Label("High Scores", game.skin, "title");
easyTabButton = new EasyTabButton(game);
medTabButton = new MedTabButton(game);
hardTabButton = new HardTabButton(game);
playAgainButton = new PlayAgainButton(game);
table = new Table();
table.setFillParent(true);
table.add(title).colspan(3).pad(80);
table.row();
table.add(easyTabButton).padBottom(40);
table.add(medTabButton).padBottom(40);
table.add(hardTabButton).padBottom(40);
table.row();
scoresCell = table.add().colspan(3);
table.row();
table.add(playAgainButton).colspan(3).expandY();
initTables();
switch (game.data.lastDifficulty) {
case EASY:
switchTab(Tabs.EASY);
break;
case MED:
switchTab(Tabs.MED);
break;
case HARD:
switchTab(Tabs.HARD);
break;
}
stage = new Stage(new ScreenViewport());
inputMultiplexer = new InputMultiplexer(stage, this);
stage.addActor(table);
}
public void initTables() {
easyTable = new Table();
medTable = new Table();
hardTable = new Table();
for (Tabs tab : Tabs.values()) {
Table table;
ScoresList scores;
MatchScreen.Difficulty difficulty;
switch (tab) {
default:
case EASY:
table = easyTable;
difficulty = MatchScreen.Difficulty.EASY;
scores = game.data.getScoresList(difficulty);
break;
case MED:
table = medTable;
difficulty = MatchScreen.Difficulty.MED;
scores = game.data.getScoresList(difficulty);
break;
case HARD:
table = hardTable;
difficulty = MatchScreen.Difficulty.HARD;
scores = game.data.getScoresList(difficulty);
break;
}
for (int i = 0; i < 10; i++) {
String style = "default";
if (difficulty == game.data.lastDifficulty && i == lastIndex) {
style = "newScore";
}
table.add(new Label(MiscUtil.ordinal(i + 1), game.skin, style)).padRight(40);
if (i < scores.size()) {
ScoreEntry s = scores.get(i);
table.add(new Label(s.initials, game.skin, style));
table.add(new Label(String.format("%.2f", s.score), game.skin, style)).padLeft(40);
} else {
table.add(new Label("---", game.skin, style));
table.add(new Label("??.??", game.skin, style)).padLeft(40);
}
table.row();
}
}
}
public void switchTab(MatchScreen.Difficulty d) {
switch (d) {
default:
case EASY:
switchTab(Tabs.EASY);
break;
case MED:
switchTab(Tabs.MED);
break;
case HARD:
switchTab(Tabs.HARD);
break;
}
}
public void switchTab(Tabs t) {
tab = t;
scoresCell.clearActor();
easyTabButton.setChecked(false);
medTabButton.setChecked(false);
hardTabButton.setChecked(false);
switch (t) {
case EASY:
scoresCell.setActor(easyTable);
easyTabButton.setChecked(true);
break;
case MED:
scoresCell.setActor(medTable);
medTabButton.setChecked(true);
break;
case HARD:
scoresCell.setActor(hardTable);
hardTabButton.setChecked(true);
break;
}
table.layout();
}
public void setShowPlayAgainButton(boolean show) {
playAgainButton.setVisible(show);
}
@Override
public void render(float delta) {
Gdx.gl.glClearColor(0, 0, 0.2f, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
// bg
cam.update();
game.batch.setProjectionMatrix(cam.combined);
game.batch.begin();
bgSprite.draw(game.batch);
game.batch.end();
// stage
stage.act(delta);
stage.draw();
}
@Override
public void resize(int width, int height) {
stage.getViewport().update(width, height, true);
}
@Override
public void show() {
Gdx.input.setInputProcessor(inputMultiplexer);
}
@Override
public void hide() {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void dispose() {
stage.dispose();
}
@Override
public boolean keyDown(int keyCode) {
if (keyCode == Input.Keys.BACK) {
game.setScreen(game.mainMenuScreen);
}
return false;
}
@Override
public boolean touchDown(int screenX, int screenY, int pointer, int button) {
Gdx.app.log("DEBUG", "scoreScreen touchDown");
return false;
}
}
| |
package de.orestes;
import java.io.*;
/**
*
* @author Rolfs
*/
public class Debtor implements Serializable
{
static final long serialVersionUID = 1L;
private int clientId;
private int debtorNo;
private String name;
private String salutation;
private String name1;
private String name2;
private String name3;
private String street;
private String zIP;
private String city;
private String country;
private String phone;
private String mobile;
private String eMail;
private String assign1;
private String assign2;
private String salesClass;
private Double creditLimit;
private Double openValue;
private String invoiceState;
private String ticketprintState;
private String ticketprintText;
private String password;
//--------------------------------------------------------------------- 0
/** Creates a new instance of Debtor */
public Debtor()
{
clientId = 0;
debtorNo = 0;
name = new String("");
salutation = new String("");
name1 = new String("");
name2 = new String("");
name3 = new String("");
street = new String("");
zIP = new String("");
city = new String("");
country = new String("");
phone = new String("");
mobile = new String("");
eMail = new String("");
assign1 = new String("");
assign2 = new String("");
salesClass = new String("");
creditLimit = 0.0;
openValue = 0.0;
invoiceState = new String("");
ticketprintState = new String("");
ticketprintText = new String("");
}
//--------------------------------------------------------------------- 3
// fuer XMLParseDebtor
public Debtor( Integer inClientId, Integer inDebtorNo, String inName, String inSalutation, String inName1, String inName2, String inName3,
String inStreet, String inZIP, String inCity, String inCountry, String inPhone, String inMobile, String inEMail,
String inAssign1, String inAssign2, String inSalesClass, Double inCreditLimit, Double inOpenValue,
String inInvoiceState, String inTicketprintState, String inTicketprintText)
{
clientId = inClientId;
debtorNo = inDebtorNo;
name = inName;
salutation = inSalutation;
name1 = inName1;
name2 = inName2;
name3 = inName3;
street = inStreet;
zIP = inZIP;
city = inCity;
country = inCountry;
phone = inPhone;
mobile = inMobile;
eMail = inEMail;
assign1 = inAssign1;
assign2 = inAssign2;
salesClass = inSalesClass;
creditLimit = inCreditLimit;
openValue = inOpenValue;
invoiceState = inInvoiceState;
ticketprintState = inTicketprintState;
ticketprintText = inTicketprintText;
}
//--------------------------------------------------------------------- 3
// fuer XMLParseDebtor
public Debtor( Integer inClientId, Integer inDebtorNo, String inName, String inSalutation, String inName1, String inName2, String inName3,
String inStreet, String inZIP, String inCity, String inCountry, String inPhone, String inMobile, String inEMail,
String inAssign1, String inAssign2, String inSalesClass, String inCreditLimit, String inOpenValue,
String inInvoiceState, String inTicketprintState, String inTicketprintText)
{
clientId = inClientId;
debtorNo = inDebtorNo;
name = inName;
salutation = inSalutation;
name1 = inName1;
name2 = inName2;
name3 = inName3;
street = inStreet;
zIP = inZIP;
city = inCity;
country = inCountry;
phone = inPhone;
mobile = inMobile;
eMail = inEMail;
assign1 = inAssign1;
assign2 = inAssign2;
salesClass = inSalesClass;
this.setCreditLimit(inCreditLimit);
this.setOpenValue(inOpenValue);
invoiceState = inInvoiceState;
ticketprintState = inTicketprintState;
ticketprintText = inTicketprintText;
}
//---------------------------------------------------------------------
public int getClientId()
{
return clientId;
}
//---------------------------------------------------------------------
public int getDebtorNo()
{
return debtorNo;
}
//---------------------------------------------------------------------
public String getName()
{
return name;
}
//---------------------------------------------------------------------
public String getSalutation()
{
return salutation;
}
//---------------------------------------------------------------------
public String getName1()
{
return name1;
}
//---------------------------------------------------------------------
public String getName2()
{
return name2;
}
//---------------------------------------------------------------------
public String getName3()
{
return name3;
}
//---------------------------------------------------------------------
public String getStreet()
{
return street;
}
//---------------------------------------------------------------------
public String getZIP()
{
return zIP;
}
//---------------------------------------------------------------------
public String getCity()
{
return city;
}
//---------------------------------------------------------------------
public String getCountry()
{
return country;
}
//---------------------------------------------------------------------
public String getPhone()
{
return phone;
}
//---------------------------------------------------------------------
public String getMobile()
{
return mobile;
}
//---------------------------------------------------------------------
public String getEMail()
{
return eMail;
}
//---------------------------------------------------------------------
public String getAssign1()
{
return assign1;
}
//---------------------------------------------------------------------
public String getAssign2()
{
return assign2;
}
//---------------------------------------------------------------------
public Double getCreditLimit()
{
return creditLimit;
}
//---------------------------------------------------------------------
public String getInvoiceState()
{
return invoiceState;
}
public Boolean isInternetBooking ()
{
return invoiceState.contains("IN");
}
public Boolean isDirectDebit ()
{
return invoiceState.contains("LE");
}
public Boolean isAcknowledgment ()
{
return invoiceState.contains("AB");
}
public Boolean isTheaterAccount ()
{
return invoiceState.contains("TK");
}
//---------------------------------------------------------------------
public Double getOpenValue()
{
return openValue;
}
//---------------------------------------------------------------------
public String getSalesClass()
{
return salesClass;
}
//---------------------------------------------------------------------
public String getTicketprintState()
{
return ticketprintState;
}
public Boolean isReseller ()
{
return ticketprintState.contains("WK");
}
public Boolean isTicketprintText ()
{
return ticketprintState.contains("KT");
}
//---------------------------------------------------------------------
public String getTicketprintText()
{
return ticketprintText;
}
//---------------------------------------------------------------------
public String getPassword()
{
return password;
}
//---------------------------------------------------------------------
public void setClientId(int nInClientId)
{
clientId = nInClientId;
}
//---------------------------------------------------------------------
public void setClientId(String strInClientId)
{
Integer intClid = new Integer(strInClientId);
clientId = intClid.intValue();
}
//---------------------------------------------------------------------
public void setDebtorNo(int nInDebtorNo)
{
debtorNo = nInDebtorNo;
}
//---------------------------------------------------------------------
public void setDebtorNo(String strInDebtorNo)
{
Integer intDeb = new Integer(strInDebtorNo);
debtorNo = intDeb.intValue();
}
//---------------------------------------------------------------------
public void setName(String strInName)
{
name = strInName;
}
//---------------------------------------------------------------------
public void setSalutation(String strInSalutation)
{
salutation = strInSalutation;
}
//---------------------------------------------------------------------
public void setName1(String strInName1)
{
name1 = strInName1;
}
//---------------------------------------------------------------------
public void setName2(String strInName2)
{
name2 = strInName2;
}
//---------------------------------------------------------------------
public void setName3(String strInName3)
{
name3 = strInName3;
}
//---------------------------------------------------------------------
public void setStreet(String strInStreet)
{
street = strInStreet;
}
//---------------------------------------------------------------------
public void setZIP(String strInZIP)
{
zIP = strInZIP;
}
//---------------------------------------------------------------------
public void setCity(String strInCity)
{
city = strInCity;
}
//---------------------------------------------------------------------
public void setCountry(String strInCountry)
{
country = strInCountry;
}
//---------------------------------------------------------------------
public void setPhone(String strInPhone)
{
phone = strInPhone;
}
//---------------------------------------------------------------------
public void setMobile(String strInMobile)
{
mobile = strInMobile;
}
//---------------------------------------------------------------------
public void setEMail(String strInEMail)
{
eMail = strInEMail;
}
//---------------------------------------------------------------------
public void setAssign1(String assign1)
{
this.assign1 = assign1;
}
//---------------------------------------------------------------------
public void setAssign2(String assign2)
{
this.assign2 = assign2;
}
//---------------------------------------------------------------------
public void setSalesClass(String salesClass)
{
this.salesClass = salesClass;
}
//---------------------------------------------------------------------
public void setCreditLimit(String creditLimit)
{
try {
Double temp = new Double(creditLimit);
this.creditLimit = temp;
}
catch (Exception ex)
{
this.creditLimit = 0.0;
}
}
//---------------------------------------------------------------------
public void setCreditLimit(Double creditLimit)
{
this.creditLimit = creditLimit;
}
//---------------------------------------------------------------------
public void setOpenValue(String openValue)
{
try {
Double temp = new Double(openValue);
this.openValue = temp;
}
catch (Exception ex)
{
this.openValue = 0.0;
}
}
//---------------------------------------------------------------------
public void setOpenValue(Double openValue)
{
this.openValue = openValue;
}
//---------------------------------------------------------------------
public void setInvoiceState(String invoiceState)
{
this.invoiceState = invoiceState;
}
//---------------------------------------------------------------------
public void setTicketprintState(String ticketprintState)
{
this.ticketprintState = ticketprintState;
}
//---------------------------------------------------------------------
public void setTicketprintText(String ticketprintText)
{
this.ticketprintText = ticketprintText;
}
//---------------------------------------------------------------------
public void setPassword(String strInPassword)
{
password = strInPassword;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.introduceField;
import com.intellij.codeInsight.TestFrameworks;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.JavaRefactoringSettings;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.introduce.inplace.KeyboardComboSwitcher;
import com.intellij.refactoring.ui.TypeSelectorManager;
import com.intellij.ui.ListCellRendererWrapper;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemListener;
/**
* User: anna
* Date: 4/8/11
*/
public class IntroduceFieldPopupPanel extends IntroduceFieldCentralPanel {
private @Nullable JComboBox myInitializerCombo;
private JComboBox myVisibilityCombo;
private DefaultComboBoxModel myInitialisersPlaceModel;
public IntroduceFieldPopupPanel(PsiClass parentClass,
PsiExpression initializerExpression,
PsiLocalVariable localVariable,
boolean isCurrentMethodConstructor,
boolean isInvokedOnDeclaration,
boolean willBeDeclaredStatic,
PsiExpression[] occurrences,
boolean allowInitInMethod,
boolean allowInitInMethodIfAll,
TypeSelectorManager typeSelectorManager) {
super(parentClass, initializerExpression, localVariable, isCurrentMethodConstructor, isInvokedOnDeclaration, willBeDeclaredStatic,
occurrences, allowInitInMethod, allowInitInMethodIfAll, typeSelectorManager);
}
protected void initializeInitializerPlace(PsiExpression initializerExpression,
BaseExpressionToFieldHandler.InitializationPlace ourLastInitializerPlace) {
if (initializerExpression != null) {
setEnabledInitializationPlaces(initializerExpression);
if (!myAllowInitInMethod) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
} else {
myInitialisersPlaceModel.removeAllElements();
}
final PsiMethod setUpMethod = TestFrameworks.getInstance().findSetUpMethod(myParentClass);
final boolean setupEnabled = hasSetUpChoice();
if (ourLastInitializerPlace == BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD &&
setupEnabled && (myInitializerExpression != null && PsiTreeUtil.isAncestor(setUpMethod, myInitializerExpression, false) ||
TestFrameworks.getInstance().isTestClass(myParentClass))) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD);
}
else if (ourLastInitializerPlace == BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR &&
myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR) > -1 && myParentClass.getConstructors().length > 0) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR);
}
else if (ourLastInitializerPlace == BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION &&
myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION) > -1) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
}
else {
selectInCurrentMethod();
}
}
@Override
protected void initializeControls(PsiExpression initializerExpression,
BaseExpressionToFieldHandler.InitializationPlace ourLastInitializerPlace) {
}
@Override
public boolean isDeclareFinal() {
return ourLastCbFinalState && allowFinal();
}
private void selectInCurrentMethod() {
if (myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD) > -1) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
else if (myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION) > -1) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
}
else {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
}
public BaseExpressionToFieldHandler.InitializationPlace getInitializerPlace() {
if (myInitializerCombo != null) {
return (BaseExpressionToFieldHandler.InitializationPlace)myInitializerCombo.getSelectedItem();
}
return (BaseExpressionToFieldHandler.InitializationPlace)myInitialisersPlaceModel.getElementAt(0);
}
public String getFieldVisibility() {
String visibility = JavaRefactoringSettings.getInstance().INTRODUCE_FIELD_VISIBILITY;
if (visibility == null) {
visibility = PsiModifier.PRIVATE;
}
return visibility;
}
protected JComponent createInitializerPlacePanel(final ItemListener itemListener, final ItemListener finalUpdater) {
JPanel groupPanel = new JPanel(new GridBagLayout());
final GridBagConstraints gridBagConstraints =
new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL,
JBUI.emptyInsets(), 0, 0);
myInitialisersPlaceModel = new DefaultComboBoxModel();
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR);
if (TestFrameworks.getInstance().isTestClass(myParentClass)) {
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD);
}
initializeInitializerPlace(myInitializerExpression, InplaceIntroduceFieldPopup.ourLastInitializerPlace);
if (myInitialisersPlaceModel.getSize() > 1) {
final JLabel initLabel = new JLabel(RefactoringBundle.message("initialize.in.border.title") + ":");
initLabel.setDisplayedMnemonic('i');
gridBagConstraints.insets.left = 5;
gridBagConstraints.anchor = GridBagConstraints.WEST;
groupPanel.add(initLabel, gridBagConstraints);
JComboBox initializersCombo = new JComboBox(myInitialisersPlaceModel);
KeyboardComboSwitcher.setupActions(initializersCombo, myParentClass.getProject());
initLabel.setLabelFor(initializersCombo);
initializersCombo.setRenderer(new ListCellRendererWrapper<BaseExpressionToFieldHandler.InitializationPlace>() {
@Override
public void customize(JList list,
BaseExpressionToFieldHandler.InitializationPlace value,
int index,
boolean selected,
boolean hasFocus) {
setText(getPresentableText(value));
}
});
initializersCombo.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
itemListener.itemStateChanged(null);
finalUpdater.itemStateChanged(null);
}
});
gridBagConstraints.gridx = 1;
gridBagConstraints.insets.top = 0;
gridBagConstraints.insets.left = 0;
groupPanel.add(initializersCombo, gridBagConstraints);
myInitializerCombo = initializersCombo;
}
return groupPanel;
}
@Nullable
private static String getPresentableText(BaseExpressionToFieldHandler.InitializationPlace value) {
if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD) {
return "current method";
} else if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR) {
return "constructor";
} else if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION) {
return "field declaration";
} else if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD){
return "setUp";
}
return null;
}
@Override
protected boolean updateInitializationPlaceModel(boolean initializedInSetup, boolean initializedInConstructor) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
if (!initializedInConstructor) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR);
}
if (!initializedInSetup) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD);
} else {
return true;
}
return false;
}
@Override
protected boolean hasSetUpChoice() {
return myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD) > -1;
}
public void setInitializeInFieldDeclaration() {
LOG.assertTrue(myInitializerCombo != null);
myInitializerCombo.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
}
public void setVisibility(String visibility) {
myVisibilityCombo.setSelectedItem(visibility);
}
@Override
protected void updateCbFinal() {
}
@Override
protected boolean allowFinal() {
final Object selectedItem = getInitializerPlace();
boolean allowFinal = selectedItem == BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION ||
(selectedItem == BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR && !myWillBeDeclaredStatic);
if (selectedItem == BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD && myIsCurrentMethodConstructor) {
final PsiMethod[] constructors = myParentClass.getConstructors();
allowFinal = constructors.length <= 1;
}
return super.allowFinal() && allowFinal;
}
@Override
protected void updateInitializerSelection() {
if (myAllowInitInMethodIfAll || !isReplaceAllOccurrences()) {
if (myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD) == -1) {
myInitialisersPlaceModel.insertElementAt(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD, 0);
}
} else {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
}
@Override
protected boolean shouldUpdateTypeSelector() {
return false;
}
@Override
protected JPanel appendCheckboxes(ItemListener itemListener) {
final JPanel panel = new JPanel(new GridBagLayout());
appendOccurrences(itemListener, new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE,
JBUI.emptyInsets(), 0, 0), panel);
return panel;
}
protected JPanel composeWholePanel(JComponent initializerPlacePanel, JPanel checkboxPanel) {
final JPanel panel = new JPanel(new GridBagLayout());
final GridBagConstraints constraints =
new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE,
JBUI.emptyInsets(), 0, 0);
panel.add(initializerPlacePanel, constraints);
constraints.gridy++;
panel.add(checkboxPanel, constraints);
return panel;
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collection;
/**
* A partial implementation of the {@link MessageLite} interface which
* implements as many methods of that interface as possible in terms of other
* methods.
*
* @author kenton@google.com Kenton Varda
*/
public abstract class AbstractMessageLite<
MessageType extends AbstractMessageLite<MessageType, BuilderType>,
BuilderType extends AbstractMessageLite.Builder<MessageType, BuilderType>>
implements MessageLite {
protected int memoizedHashCode = 0;
@Override
public ByteString toByteString() {
try {
final ByteString.CodedBuilder out =
ByteString.newCodedBuilder(getSerializedSize());
writeTo(out.getCodedOutput());
return out.build();
} catch (IOException e) {
throw new RuntimeException(getSerializingExceptionMessage("ByteString"), e);
}
}
@Override
public byte[] toByteArray() {
try {
final byte[] result = new byte[getSerializedSize()];
final CodedOutputStream output = CodedOutputStream.newInstance(result);
writeTo(output);
output.checkNoSpaceLeft();
return result;
} catch (IOException e) {
throw new RuntimeException(getSerializingExceptionMessage("byte array"), e);
}
}
@Override
public void writeTo(final OutputStream output) throws IOException {
final int bufferSize =
CodedOutputStream.computePreferredBufferSize(getSerializedSize());
final CodedOutputStream codedOutput =
CodedOutputStream.newInstance(output, bufferSize);
writeTo(codedOutput);
codedOutput.flush();
}
@Override
public void writeDelimitedTo(final OutputStream output) throws IOException {
final int serialized = getSerializedSize();
final int bufferSize = CodedOutputStream.computePreferredBufferSize(
CodedOutputStream.computeRawVarint32Size(serialized) + serialized);
final CodedOutputStream codedOutput =
CodedOutputStream.newInstance(output, bufferSize);
codedOutput.writeRawVarint32(serialized);
writeTo(codedOutput);
codedOutput.flush();
}
/**
* Package private helper method for AbstractParser to create
* UninitializedMessageException.
*/
UninitializedMessageException newUninitializedMessageException() {
return new UninitializedMessageException(this);
}
private String getSerializingExceptionMessage(String target) {
return "Serializing " + getClass().getName() + " to a " + target
+ " threw an IOException (should never happen).";
}
protected static void checkByteStringIsUtf8(ByteString byteString)
throws IllegalArgumentException {
if (!byteString.isValidUtf8()) {
throw new IllegalArgumentException("Byte string is not UTF-8.");
}
}
protected static <T> void addAll(final Iterable<T> values,
final Collection<? super T> list) {
Builder.addAll(values, list);
}
/**
* A partial implementation of the {@link Message.Builder} interface which
* implements as many methods of that interface as possible in terms of
* other methods.
*/
@SuppressWarnings("unchecked")
public abstract static class Builder<
MessageType extends AbstractMessageLite<MessageType, BuilderType>,
BuilderType extends Builder<MessageType, BuilderType>>
implements MessageLite.Builder {
// The compiler produces an error if this is not declared explicitly.
@Override
public abstract BuilderType clone();
@Override
public BuilderType mergeFrom(final CodedInputStream input) throws IOException {
return mergeFrom(input, ExtensionRegistryLite.getEmptyRegistry());
}
// Re-defined here for return type covariance.
@Override
public abstract BuilderType mergeFrom(
final CodedInputStream input, final ExtensionRegistryLite extensionRegistry)
throws IOException;
@Override
public BuilderType mergeFrom(final ByteString data) throws InvalidProtocolBufferException {
try {
final CodedInputStream input = data.newCodedInput();
mergeFrom(input);
input.checkLastTagWas(0);
return (BuilderType) this;
} catch (InvalidProtocolBufferException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException(getReadingExceptionMessage("ByteString"), e);
}
}
@Override
public BuilderType mergeFrom(
final ByteString data, final ExtensionRegistryLite extensionRegistry)
throws InvalidProtocolBufferException {
try {
final CodedInputStream input = data.newCodedInput();
mergeFrom(input, extensionRegistry);
input.checkLastTagWas(0);
return (BuilderType) this;
} catch (InvalidProtocolBufferException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException(getReadingExceptionMessage("ByteString"), e);
}
}
@Override
public BuilderType mergeFrom(final byte[] data) throws InvalidProtocolBufferException {
return mergeFrom(data, 0, data.length);
}
@Override
public BuilderType mergeFrom(final byte[] data, final int off, final int len)
throws InvalidProtocolBufferException {
try {
final CodedInputStream input =
CodedInputStream.newInstance(data, off, len);
mergeFrom(input);
input.checkLastTagWas(0);
return (BuilderType) this;
} catch (InvalidProtocolBufferException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException(getReadingExceptionMessage("byte array"), e);
}
}
@Override
public BuilderType mergeFrom(final byte[] data, final ExtensionRegistryLite extensionRegistry)
throws InvalidProtocolBufferException {
return mergeFrom(data, 0, data.length, extensionRegistry);
}
@Override
public BuilderType mergeFrom(
final byte[] data,
final int off,
final int len,
final ExtensionRegistryLite extensionRegistry)
throws InvalidProtocolBufferException {
try {
final CodedInputStream input =
CodedInputStream.newInstance(data, off, len);
mergeFrom(input, extensionRegistry);
input.checkLastTagWas(0);
return (BuilderType) this;
} catch (InvalidProtocolBufferException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException(getReadingExceptionMessage("byte array"), e);
}
}
@Override
public BuilderType mergeFrom(final InputStream input) throws IOException {
final CodedInputStream codedInput = CodedInputStream.newInstance(input);
mergeFrom(codedInput);
codedInput.checkLastTagWas(0);
return (BuilderType) this;
}
@Override
public BuilderType mergeFrom(
final InputStream input, final ExtensionRegistryLite extensionRegistry) throws IOException {
final CodedInputStream codedInput = CodedInputStream.newInstance(input);
mergeFrom(codedInput, extensionRegistry);
codedInput.checkLastTagWas(0);
return (BuilderType) this;
}
/**
* An InputStream implementations which reads from some other InputStream
* but is limited to a particular number of bytes. Used by
* mergeDelimitedFrom(). This is intentionally package-private so that
* UnknownFieldSet can share it.
*/
static final class LimitedInputStream extends FilterInputStream {
private int limit;
LimitedInputStream(InputStream in, int limit) {
super(in);
this.limit = limit;
}
@Override
public int available() throws IOException {
return Math.min(super.available(), limit);
}
@Override
public int read() throws IOException {
if (limit <= 0) {
return -1;
}
final int result = super.read();
if (result >= 0) {
--limit;
}
return result;
}
@Override
public int read(final byte[] b, final int off, int len)
throws IOException {
if (limit <= 0) {
return -1;
}
len = Math.min(len, limit);
final int result = super.read(b, off, len);
if (result >= 0) {
limit -= result;
}
return result;
}
@Override
public long skip(final long n) throws IOException {
final long result = super.skip(Math.min(n, limit));
if (result >= 0) {
limit -= result;
}
return result;
}
}
@Override
public boolean mergeDelimitedFrom(
final InputStream input, final ExtensionRegistryLite extensionRegistry) throws IOException {
final int firstByte = input.read();
if (firstByte == -1) {
return false;
}
final int size = CodedInputStream.readRawVarint32(firstByte, input);
final InputStream limitedInput = new LimitedInputStream(input, size);
mergeFrom(limitedInput, extensionRegistry);
return true;
}
@Override
public boolean mergeDelimitedFrom(final InputStream input) throws IOException {
return mergeDelimitedFrom(input,
ExtensionRegistryLite.getEmptyRegistry());
}
@Override
@SuppressWarnings("unchecked") // isInstance takes care of this
public BuilderType mergeFrom(final MessageLite other) {
if (!getDefaultInstanceForType().getClass().isInstance(other)) {
throw new IllegalArgumentException(
"mergeFrom(MessageLite) can only merge messages of the same type.");
}
return internalMergeFrom((MessageType) other);
}
protected abstract BuilderType internalMergeFrom(MessageType message);
private String getReadingExceptionMessage(String target) {
return "Reading " + getClass().getName() + " from a " + target
+ " threw an IOException (should never happen).";
}
/**
* Construct an UninitializedMessageException reporting missing fields in
* the given message.
*/
protected static UninitializedMessageException
newUninitializedMessageException(MessageLite message) {
return new UninitializedMessageException(message);
}
/**
* Adds the {@code values} to the {@code list}. This is a helper method
* used by generated code. Users should ignore it.
*
* @throws NullPointerException if {@code values} or any of the elements of
* {@code values} is null. When that happens, some elements of
* {@code values} may have already been added to the result {@code list}.
*/
protected static <T> void addAll(final Iterable<T> values,
final Collection<? super T> list) {
if (values == null) {
throw new NullPointerException();
}
if (values instanceof LazyStringList) {
// For StringOrByteStringLists, check the underlying elements to avoid
// forcing conversions of ByteStrings to Strings.
checkForNullValues(((LazyStringList) values).getUnderlyingElements());
list.addAll((Collection<T>) values);
} else if (values instanceof Collection) {
checkForNullValues(values);
list.addAll((Collection<T>) values);
} else {
for (final T value : values) {
if (value == null) {
throw new NullPointerException();
}
list.add(value);
}
}
}
private static void checkForNullValues(final Iterable<?> values) {
for (final Object value : values) {
if (value == null) {
throw new NullPointerException();
}
}
}
}
}
| |
/*
* Copyright 2001-2005 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.time;
import junit.framework.Assert;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
/**
* Unit test the parsing of ISO format datetimes
*
* @author Guy Allard
* @author Stephen Colebourne
*/
public class TestParseISO extends TestCase {
private static final int DEFAULT = 99999;
/**
* This is the main class for this test suite.
* @param args command line arguments.
*/
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
/**
* TestSuite suite() is a junit required method.
* @see org.joda.test.time.BulkTest
*/
public static TestSuite suite() {
return new TestSuite(TestParseISO.class);
}
/**
* Constructor.
* @param name
*/
public TestParseISO(String name) {
super(name);
}
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
//-----------------------------------------------------------------------
// Dates
//-----------------------------------------------------------------------
public void testSpecCompleteDate() {
new DMatcher("5.2.1.1", "1999-10-20", "19991020",
19, 99, 10, 20, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecReducedPrecisionCYM() {
new DMatcher("5.2.1.2", "1999-10", "199910",
19, 99, 10, DEFAULT, DEFAULT).run();
}
public void testSpecReducedPrecisionCY() {
new DMatcher("5.2.1.2", "1999", "1999",
19, 99, DEFAULT, DEFAULT, DEFAULT).run();
}
public void testSpecReducedPrecisionC() {
new DMatcher("5.2.1.2", "20", "20",
20, DEFAULT, DEFAULT, DEFAULT, DEFAULT).run();
new DMatcher("5.2.1.2", "19", "19",
19, DEFAULT, DEFAULT, DEFAULT, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecTruncatedYMD() {
new DMatcher("5.2.1.3", "85-04-11", "850411",
DEFAULT, 85, 4, 11, DEFAULT).run();
}
public void testSpecTruncatedYM() {
new DMatcher("5.2.1.3", "-85-04", "-8504",
DEFAULT, 85, 4, DEFAULT, DEFAULT).run();
}
public void testSpecTruncatedY() {
new DMatcher("5.2.1.3", "-85", "-85",
DEFAULT, 85, DEFAULT, DEFAULT, DEFAULT).run();
}
public void testSpecTruncatedMD() {
new DMatcher("5.2.1.3", "--04-11", "--0411",
DEFAULT, DEFAULT, 4, 11, DEFAULT).run();
}
public void testSpecTruncatedM() {
new DMatcher("5.2.1.3", "--04", "--04",
DEFAULT, DEFAULT, 4, DEFAULT, DEFAULT).run();
}
public void testSpecTruncatedD() {
new DMatcher("5.2.1.3", "---11", "---11",
DEFAULT, DEFAULT, DEFAULT, 11, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecExpandedCYMD() {
new DMatcher("5.2.1.4", "+001985-04-11", "+0019850411",
19, 85, 4, 11, DEFAULT).run();
}
public void testSpecExpandedCYM() {
new DMatcher("5.2.1.4", "+001985-04", "+00198504",
19, 85, 4, DEFAULT, DEFAULT).run();
}
public void testSpecExpandedCY() {
new DMatcher("5.2.1.4", "+001985", "+001985",
19, 85, DEFAULT, DEFAULT, DEFAULT).run();
}
public void testSpecExpandedC() {
// Not supported - could only tell difference from CY if you knew
// number of digits representing year
// new DMatcher("5.2.1.4", "+0019", "+0019",
// 19, DEFAULT, DEFAULT, DEFAULT, DEFAULT).assert();
}
//-----------------------------------------------------------------------
// Ordinal based date
//-----------------------------------------------------------------------
public void testSpecOrdinalComplete() {
new DMatcher("5.2.2.1", "1985-101", "1985101",
19, 85, 4, 11, DEFAULT).run();
new DMatcher("5.2.2.1", "1985-021", "1985021",
19, 85, 1, 21, DEFAULT).run();
new DMatcher("5.2.2.1", "1985-006", "1985006",
19, 85, 1, 6, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecOrdinalTruncatedYD() {
new DMatcher("5.2.2.2", "85-101", "85101",
DEFAULT, 85, 4, 11, DEFAULT).run();
}
public void testSpecOrdinalTruncatedD() {
new DMatcher("5.2.2.2", "-101", "-101",
DEFAULT, DEFAULT, 4, 11, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecOrdinalExpandedYD() {
new DMatcher("5.2.2.3", "+001985-101", "+001985101",
19, 85, 4, 11, DEFAULT).run();
}
//-----------------------------------------------------------------------
// Week based date
//-----------------------------------------------------------------------
public void testSpecWeekComplete() {
new DMatcher("5.2.3.1", "1985-W15-1", "1985W151",
19, 85, 4, 8, DEFAULT).run();
new DMatcher("5.2.3.1", "1985-W15-2", "1985W152",
19, 85, 4, 9, DEFAULT).run();
new DMatcher("5.2.3.1", "1985-W15-3", "1985W153",
19, 85, 4, 10, DEFAULT).run();
new DMatcher("5.2.3.1", "1985-W15-4", "1985W154",
19, 85, 4, 11, DEFAULT).run();
new DMatcher("5.2.3.1", "1985-W15-5", "1985W155",
19, 85, 4, 12, DEFAULT).run();
new DMatcher("5.2.3.1", "1985-W15-6", "1985W156",
19, 85, 4, 13, DEFAULT).run();
new DMatcher("5.2.3.1", "1985-W15-7", "1985W157",
19, 85, 4, 14, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecWeekReducedPrecision() {
// test date is Sunday, which should be left alone
new DMatcher("5.2.3.2", "1985-W15", "1985W15",
19, 85, 4, 14, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecWeekTruncatedYWD() {
new DMatcher("5.2.3.2", "85-W154", "85W154",
DEFAULT, 85, 4, 11, DEFAULT).run();
}
public void testSpecWeekTruncatedYW() {
// test date is Sunday, which should be left alone
new DMatcher("5.2.3.2", "85-W15", "85W15",
DEFAULT, 85, 4, 14, DEFAULT).run();
}
public void testSpecWeekTruncatedDWD() {
// decade not supported
}
public void testSpecWeekTruncatedDW() {
// decade not supported
}
public void testSpecWeekTruncatedWD() {
new DMatcher("5.2.3.2", "-W154", "-W154",
DEFAULT, DEFAULT, 4, 11, DEFAULT).run();
}
public void testSpecWeekTruncatedW() {
// test date is Sunday, which should be left alone
new DMatcher("5.2.3.2", "-W15", "-W15",
DEFAULT, DEFAULT, 4, 14, DEFAULT).run();
}
public void testSpecWeekTruncatedD() {
// test date is Sunday 3rd Dec, thus relative Thursday is 30th Nov
new DMatcher("5.2.3.3", "-W-4", "-W-4",
DEFAULT, DEFAULT, 11, 30, DEFAULT).run();
}
public void testSpecWeekExpandedYWD() {
// test date is Sunday 3rd Dec, thus relative Thursday is 30th Nov
new DMatcher("5.2.3.4", "+001985-W15-4", "+001985W154",
19, 85, 4, 11, DEFAULT).run();
}
//-----------------------------------------------------------------------
// Times
//-----------------------------------------------------------------------
public void testSpecTimeComplete() {
new TMatcher("5.3.1.1", "23:20:50", "232050",
23, 20, 50, 0, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecTimeReducedPrecisionHM() {
new TMatcher("5.3.1.2", "23:20", "2320",
23, 20, DEFAULT, DEFAULT, DEFAULT).run();
}
public void testSpecTimeReducedPrecisionH() {
new TMatcher("5.3.1.2", "23", "23",
23, DEFAULT, DEFAULT, DEFAULT, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecTimeFractionalHMS() {
new TMatcher("5.3.1.3", "23:20:50.607", "232050.607",
23, 20, 50, 607, DEFAULT).run();
new TMatcher("5.3.1.3", "23:20:50,607", "232050,607",
23, 20, 50, 607, DEFAULT).run();
}
public void testSpecTimeFractionalHM() {
new TMatcher("5.3.1.3", "23:20.4", "2320.4",
23, 20, 24, 0, DEFAULT).run();
new TMatcher("5.3.1.3", "23:20,4", "2320,4",
23, 20, 24, 0, DEFAULT).run();
}
public void testSpecTimeFractionalH() {
new TMatcher("5.3.1.3", "23.25", "23.25",
23, 15, 0, 0, DEFAULT).run();
new TMatcher("5.3.1.3", "23.25", "23,25",
23, 15, 0, 0, DEFAULT).run();
}
//-----------------------------------------------------------------------
public void testSpecTimeTruncatedMS() {
new TMatcher("5.3.1.4", "-20:50", "-2050",
DEFAULT, 20, 50, 0, DEFAULT).run();
}
public void testSpecTimeTruncatedM() {
new TMatcher("5.3.1.4", "-20", "-20",
DEFAULT, 20, DEFAULT, DEFAULT, DEFAULT).run();
}
public void testSpecTimeTruncatedS() {
new TMatcher("5.3.1.4", "--50", "--50",
DEFAULT, DEFAULT, 50, 0, DEFAULT).run();
}
public void testSpecTimeTruncatedFractionMS() {
new TMatcher("5.3.1.4", "-20:50.607", "-2050.607",
DEFAULT, 20, 50, 607, DEFAULT).run();
}
public void testSpecTimeTruncatedFractionM() {
new TMatcher("5.3.1.4", "-20.4", "-20.4",
DEFAULT, 20, 24, 0, DEFAULT).run();
}
public void testSpecTimeTruncatedFractionS() {
new TMatcher("5.3.1.4", "--50.607", "--50.607",
DEFAULT, DEFAULT, 50, 607, DEFAULT).run();
}
//-----------------------------------------------------------------------
//-----------------------------------------------------------------------
//-----------------------------------------------------------------------
/**
* Perform test.
*/
protected static abstract class Matcher extends Assert {
String spec, extended, basic;
int century, yearOfCentury, monthOfYear, dayOfMonth, hour, min, sec, milli, zone;
MutableDateTime dt;
protected Matcher(String spec, String extended, String basic) {
this.spec = spec;
this.extended = extended;
this.basic = basic;
}
protected abstract void run();
protected void assertDate() {
String msg = "\nSpec: " + spec + "\nParsed: " + extended + "\nTo: " + dt;
assertEquals(msg + "\nCentury: ", century, dt.getCenturyOfEra());
assertEquals(msg + "\nYear: ", yearOfCentury, dt.getYearOfCentury());
assertEquals(msg + "\nMonth: ", monthOfYear, dt.getMonthOfYear());
assertEquals(msg + "\nDay: ", dayOfMonth, dt.getDayOfMonth());
assertEquals(msg + "\nHour: ", hour, dt.getHourOfDay());
assertEquals(msg + "\nMinute: ", min, dt.getMinuteOfHour());
assertEquals(msg + "\nSecond: ", sec, dt.getSecondOfMinute());
assertEquals(msg + "\nMilli: ", milli, dt.getMillisOfSecond());
DateTimeZone z;
if (zone == DEFAULT) {
z = DateTimeZone.getDefault();
} else if (zone == 0) {
z = DateTimeZone.UTC;
} else {
String str = "0" + Math.abs(zone) + ":00";
str = str.substring(str.length() - 4);
str = (zone < 0 ? "-" : "+") + str;
z = DateTimeZone.forID(str);
}
assertEquals(msg + "\nZone: ", z, dt.getZone());
}
protected void parse(DateTimeFormatter p) {
int result = p.parseInto(dt, extended, 0);
assertTrue("\nSpec: " + spec + "\nParsed: " + extended + "\nTo: "
+ dt + "\nParse failed at: " + ~result,
result >= 0);
}
}
protected static class DTMatcher extends Matcher {
protected DTMatcher(String spec, String extended, String basic,
int century, int yearOfCentury, int monthOfYear, int dayOfMonth,
int hour, int min, int sec, int milli, int zone) {
super(spec, extended, basic);
this.century = (century == DEFAULT ? 19 : century);
this.yearOfCentury = (yearOfCentury == DEFAULT ? 72 : yearOfCentury);
this.monthOfYear = (monthOfYear == DEFAULT ? 12 : monthOfYear);
this.dayOfMonth = (dayOfMonth == DEFAULT ? 3 : dayOfMonth);
this.hour = (hour == DEFAULT ? 10 : hour);
this.min = (min == DEFAULT ? 32 : min);
this.sec = (sec == DEFAULT ? 40 : sec);
this.milli = (milli == DEFAULT ? 205 : milli);
this.zone = zone;
}
protected void run() {
dt = new MutableDateTime(1972, 12, 3, 10, 32, 40, 205);
parse(ISODateTimeFormat.dateTimeParser());
super.assertDate();
}
}
protected static class DMatcher extends Matcher {
protected DMatcher(String spec, String extended, String basic,
int century, int yearOfCentury, int monthOfYear, int dayOfMonth, int zone) {
super(spec, extended, basic);
this.century = (century == DEFAULT ? 19 : century);
this.yearOfCentury = (yearOfCentury == DEFAULT ? 72 : yearOfCentury);
this.monthOfYear = (monthOfYear == DEFAULT ? 12 : monthOfYear);
this.dayOfMonth = (dayOfMonth == DEFAULT ? 3 : dayOfMonth);
this.hour = 10;
this.min = 32;
this.sec = 40;
this.milli = 205;
this.zone = zone;
}
protected void run() {
dt = new MutableDateTime(1972, 12, 3, 10, 32, 40, 205);
parse(ISODateTimeFormat.dateParser());
super.assertDate();
dt = new MutableDateTime(1972, 12, 3, 10, 32, 40, 205);
parse(ISODateTimeFormat.dateTimeParser());
super.assertDate();
}
}
protected static class TMatcher extends Matcher {
protected TMatcher(String spec, String extended, String basic,
int hour, int min, int sec, int milli, int zone) {
super(spec, extended, basic);
this.century = 19;
this.yearOfCentury = 72;
this.monthOfYear = 12;
this.dayOfMonth = 3;
this.hour = (hour == DEFAULT ? 10 : hour);
this.min = (min == DEFAULT ? 32 : min);
this.sec = (sec == DEFAULT ? 40 : sec);
this.milli = (milli == DEFAULT ? 205 : milli);
this.zone = zone;
}
protected void run() {
dt = new MutableDateTime(1972, 12, 3, 10, 32, 40, 205);
parse(ISODateTimeFormat.timeParser());
super.assertDate();
extended = "T" + extended;
dt = new MutableDateTime(1972, 12, 3, 10, 32, 40, 205);
parse(ISODateTimeFormat.timeParser());
super.assertDate();
dt = new MutableDateTime(1972, 12, 3, 10, 32, 40, 205);
parse(ISODateTimeFormat.dateTimeParser());
super.assertDate();
}
}
}
| |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.util.aws;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.security.SignatureException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.StringTokenizer;
import java.util.TimeZone;
import java.util.TreeMap;
import java.util.TreeSet;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
public final class AwsUtils {
private AwsUtils() {
}
private static final Logger LOG = LoggerFactory.getLogger(AwsUtils.class);
private static final String UTF8_CHARSET = "UTF-8";
private static final String HMAC_SHA1_ALGORITHM = "HmacSHA1";
private static final String HMAC_SHA256_ALGORITHM = "HmacSHA256";
/**
* Returns the AccountId of the user who is running the instance.
*
* @return String representing the AccountId or the owner-id
* @throws java.io.IOException if failed to retrieve the AccountId using the
* Cloud infrastructure
*/
public static String getAccountId() throws IOException {
// Get the MAC address of the machine.
String macUrl = getMetadataUrl() + "/network/interfaces/macs/";
String mac = invokeUrl(macUrl).trim();
// Use the MAC address to obtain the owner-id or the
// AWS AccountId.
String idUrl = macUrl + mac + "owner-id";
String acctId = invokeUrl(idUrl).trim();
assert acctId != null;
return acctId;
}
/**
* Returns the URL that is used to fetch the image metadata. Based on
* the Cloud Vendor/Provider, this will be different.
*
* @return String URL to be used to retrieve image metadata
*/
public static String getMetadataUrl() {
return "http://169.254.169.254/latest/meta-data";
}
/**
* Returns the region in which the instance is running.
*
* @return String representing the region where the instance is
* running
* @throws java.io.IOException if failed to retrieve the region information
* using the Cloud infrastructure
*/
public static String getRegion() throws IOException {
String url = getMetadataUrl() + "/placement/availability-zone";
String zone = invokeUrl(url);
zone = zone.trim();
// In case of AWS, the zone includes an extra character
// at the end such as "us-east-1a", "us-east-1b", "eu-west-1a",
// etc. We have to strip that last character to get the
// correct region.
String region = zone.substring(0, zone.length() - 1);
assert region != null;
return region;
}
/**
* Returns the name of the security group from the list that is
* obtained from the resource vendor. An instance may belong to multiple
* security groups. And, the list of security groups obtained from the
* vendor may not be ordered. If the vendor supports the notion of
* a default security group, then that should be returned. Otherwise,
* the implementation will be vendor-specific.
*
* @return
* @throws java.io.IOException
*/
public static String getSecurityGroupName() throws IOException {
// For AWS, we are returning the first security group from the list
// that is obtained by querying the meta-data.
String url = getMetadataUrl() + "/security-groups";
String groups = invokeUrl(url);
if ((groups == null) || (groups.trim().length() == 0)) {
String msg = "No security-group assigned to the instance";
throw new IllegalStateException(msg);
}
StringTokenizer tokenizer = new StringTokenizer(groups, "\n");
return tokenizer.nextToken(); // We only need the first one.
}
/**
* Returns the local address (IPv4) of the instance. The local address
* is defined to be
* Public IP address if launched with direct addressing; private IP
* address if launched with public addressing.
*
* @return local IP address (IPv4) of the instance
* @throws java.io.IOException
*/
public static String getLocalIPv4() throws IOException {
String url = getMetadataUrl() + "/local-ipv4";
String localIPv4 = invokeUrl(url);
if ((localIPv4 == null) || (localIPv4.trim().length() == 0)) {
String msg = "No local IPv4 assigned to the instance";
throw new IllegalStateException(msg);
}
return localIPv4.trim();
}
/**
* Returns the URL that is used to fetch the instance's user-data.
* Based on the Cloud Vendor/Provider, this will be different.
*
* @return String URL to be used to retrieve image metadata
*/
public static String getUserdataUrl() {
return "http://169.254.169.254/latest/user-data";
}
/**
* Creates a "SignatureVersion 1" based signed request. The requestURI
* parameter includes the <protocol>://<endpoint>/<uri> in it. This
* method will ONLY add "Timestamp", "AWSAccessKeyId", "SignatureVersion"
* and "Signature" parameters. It's the callers responsibility to include
* other parameters in the params map. The other parameters will be specific
* to the the REST API that is being invoked. So, the caller should include
* parameters such as "Action", etc.
*
* @param uri format will <protocol>://<endpoint>/<resource>
* @param params Map with parameters such as "Action", etc.
* @param awsAccessKeyId AccessKeyId of the caller to create a signature
* @param awsSecretKey SecretKey of the caller to create the signature
* @return String that is a completely signed URL based on
* "SignatureVersion 1" scheme
* @throws java.security.SignatureException
*/
public static String getVersion1SignedRequest(String uri,
Map<String, String> params,
String awsAccessKeyId,
String awsSecretKey)
throws SignatureException {
if ((params == null) ||
(awsAccessKeyId == null) ||
(awsSecretKey == null) ||
(uri == null)) {
throw new IllegalArgumentException("Null parameter passed in");
}
params.put("AWSAccessKeyId", awsAccessKeyId);
params.put("SignatureVersion", "1");
params.put("Timestamp", getTimestamp());
String stringToSign = getV1StringToSign(params);
String signature = createV1Signature(stringToSign,
awsSecretKey,
HMAC_SHA1_ALGORITHM);
params.put("Signature", signature);
// Encode the query parameter values and construct the
// query string.
StringBuffer queryString = new StringBuffer();
for (Map.Entry<String, String> entry : params.entrySet()) {
String value = entry.getValue();
String encValue = null;
try {
encValue = URLEncoder.encode(value, UTF8_CHARSET);
} catch (UnsupportedEncodingException e) {
encValue = value;
}
String separator = (queryString.length() == 0) ? "?" : "&";
queryString.append(separator);
queryString.append(entry.getKey() + "=" + encValue);
}
return uri + queryString.toString();
}
/**
* This method returns a complete signed request using HmaSHA256 algorithm
* using the "SignatureVersion 2" scheme. Currently, this method creates
* "SignatureVersion 2" based signed requests ONLY for the HTTP "GET"
* method. It's the callers the responsibility such as "Action" and such
* based on the REST API that they are trying to exercise. This method adds
* "SignatureVersion", "SignatureMethod", "AWSAccessKeyId", "Timestamp", and
* "Signature" parameters to the request.
*
* @param requestMethod Only "GET at this point.
* @param endpoint endpoint or the host
* @param requestURI following the endpoint up until the query params
* @param params Map of name-value pairs containing params such
* as "Action", etc.
* @param awsAccessKeyId AccessKeyId of the caller to create a signature
* @param awsSecretKey SecretKey of the caller to create the signature
* @return String the complete URL with proper encoding and the Signature
* query param appended
* @throws java.security.SignatureException
*/
public static String getVersion2SignedRequest(String requestMethod,
String protocol,
String endpoint,
String requestURI,
Map<String, String> params,
String awsAccessKeyId,
String awsSecretKey)
throws SignatureException {
if ((requestMethod == null) ||
(protocol == null) ||
(endpoint == null) ||
(requestURI == null) ||
(params == null) ||
(awsAccessKeyId == null) ||
(awsSecretKey == null)) {
throw new IllegalArgumentException("Null parameter passed in");
}
params.put("AWSAccessKeyId", awsAccessKeyId);
params.put("SignatureMethod", HMAC_SHA256_ALGORITHM);
params.put("SignatureVersion", "2");
params.put("Timestamp", getTimestamp());
String canonicalQS = getV2CanonicalizedQueryString(params);
String stringToSign = requestMethod + "\n" +
endpoint + "\n" +
requestURI + "\n" +
canonicalQS;
String signature = createSignature(stringToSign,
awsSecretKey,
HMAC_SHA256_ALGORITHM);
String request = protocol + "://" + endpoint + requestURI
+ "?" + canonicalQS
+ "&Signature=" + signature;
return request;
}
/**
* This is a generic method to invoke the REST API. The URL that is passed
* in should result in a signed request with the "Signature" query parameter
* included. It's the caller's responsibility to deal with the return value
* which may be a simple string or a XML response.
*
* @param url represents a signed request with "Signature" param
* @return String representing the outcome of the REST API call
* @throws java.io.IOException is thrown if the connection times out or the query
* params are incorrectly specified
*/
public static String invokeUrl(String url) throws IOException {
if (url == null) {
throw new IllegalArgumentException("Null parameter passed in");
}
URL urlObj = null;
InputStream inStream = null;
String response = null;
HttpURLConnection connection = null;
try {
// Create the HttpURLConnection.
urlObj = new URL(url);
connection = (HttpURLConnection) urlObj.openConnection();
// Only need HTTP GET.
connection.setRequestMethod("GET");
// Set 2seconds timeout interval.
connection.setConnectTimeout(2 * 1000);
connection.setReadTimeout(2 * 1000);
connection.connect();
// Read the output from the server.
try {
inStream = connection.getInputStream();
// System.out.println("Return Code: " + connection.getResponseCode());
}
catch (IOException ex) {
// Check the error stream for additional information that can
// be useful to address the issue. If there is nothing in the
// response body, HttpURLConnection.getErrorStream() returns a
// null.
inStream = connection.getErrorStream();
if (inStream == null) {
// If connection.getErrorStream() is null, just use the
// message from the exception.
response = ex.getMessage();
}
else {
// Otherwise, get the error stream content and use it as
// a message for the new IOException instance that wraps the
// original IOException instance.
response = getResponse(inStream);
inStream.close();
inStream = null; // To deal with the check in finally.
}
throw new IOException(url + "\n" + response, ex);
}
response = getResponse(inStream);
}
finally {
if (inStream != null) {
try {
inStream.close();
}
catch (IOException ioe) {
// Swallow this exception.
}
}
if (connection != null) {
connection.disconnect();
}
}
return response;
}
/**
* Indicates whether the Gateway is currently deployed in AWS environment
* by returning true. Otherwise, it returns false.
*
* It could either be DevPay or non-DevPay license.
*
* @return boolean
*/
public static boolean isDeployedToAWS() {
try {
// Ping the AWS-specific meta-data URL to figure out whether the
// Gateway is deployed in a AWS Cloud environment.
invokeUrl("http://169.254.169.254/latest/meta-data");
}
catch (IOException ex) {
return false;
}
return true;
}
/**
* Returns a Document(DOM) object representing the parsed XML.
*
* @param xmlStr
* @return Document
*/
public static Document parseXMLResponse(String xmlStr) {
if ((xmlStr == null) || (xmlStr.length() == 0)) {
return null;
}
// FIXME: error handling for the DOM parsing...
try {
DocumentBuilder db = DocumentBuilderFactory.newInstance().newDocumentBuilder();
return db.parse(new ByteArrayInputStream(xmlStr.getBytes()));
} catch (ParserConfigurationException pcex) {
//ignore
} catch (SAXException saxEx) {
//ignore
} catch (IOException ioex) {
//ignore
}
return null;
}
// ------------------------- Private Methods -----------------------------
private static String createSignature(String stringToSign,
String awsSecretKey,
String algorithm)
throws SignatureException {
assert stringToSign != null;
assert awsSecretKey != null;
assert algorithm != null;
String signature = null;
try {
byte[] secretyKeyBytes = awsSecretKey.getBytes(UTF8_CHARSET);
SecretKeySpec secretKeySpec = new SecretKeySpec(secretyKeyBytes,
algorithm);
Mac mac = Mac.getInstance(algorithm);
mac.init(secretKeySpec);
byte[] data = stringToSign.getBytes(UTF8_CHARSET);
byte[] rawHmac = mac.doFinal(data);
signature = rfc3986Conformance(new String(Codec.base64Encode(rawHmac)));
} catch (Exception e) {
throw new SignatureException("Failed to generate HMAC : " +
e.getMessage());
}
return signature;
}
private static String getResponse(InputStream in) {
if (in == null) {
return null;
}
InputStreamReader inReader = new InputStreamReader(in);
BufferedReader reader = new BufferedReader(inReader);
StringBuilder strBuilder = new StringBuilder();
try {
String line;
while ((line = reader.readLine()) != null) {
strBuilder.append(line + "\n");
}
}
catch (Exception ex) {
return null;
}
finally {
if (reader != null) {
try {
reader.close();
}
catch (IOException ioe) {
// Swallow this exception.
}
}
}
String response = (strBuilder.length() > 0) ? strBuilder.toString() :
null;
return response;
}
private static String createV1Signature(String stringToSign, String awsSecretKey, String algorithm)
throws SignatureException {
String signature = null;
if ((stringToSign == null) ||
(awsSecretKey == null) ||
(algorithm == null)) {
return null;
}
try {
byte[] secretyKeyBytes = awsSecretKey.getBytes();
SecretKeySpec secretKeySpec = new SecretKeySpec(secretyKeyBytes, algorithm);
Mac mac = Mac.getInstance(algorithm);
mac.init(secretKeySpec);
byte[] data = stringToSign.getBytes();
byte[] rawHmac = mac.doFinal(data);
signature = Codec.base64Encode(rawHmac);
} catch (Exception e) {
throw new SignatureException("Failed to generate HMAC : " + e.getMessage());
}
return signature;
}
private static String getTimestamp() {
Calendar cal = Calendar.getInstance();
DateFormat dfm = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
dfm.setTimeZone(TimeZone.getTimeZone("GMT"));
return dfm.format(cal.getTime());
}
private static String getV1StringToSign(Map<String, String> paramMap) {
assert paramMap != null;
Set<String> sortedKeys = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
sortedKeys.addAll(paramMap.keySet());
// Don't include "Signature" in the string to sign.
sortedKeys.remove("Signature");
StringBuilder stringBuilder = new StringBuilder();
for (String key : sortedKeys) {
stringBuilder.append(key);
stringBuilder.append(paramMap.get(key));
}
return stringBuilder.toString();
}
private static String getV2CanonicalizedQueryString(Map<String, String> params) {
assert params != null && !params.isEmpty();
SortedMap<String, String> sortedMap = new TreeMap<String, String>(params);
// Remove "Signature" parameter, if added.
sortedMap.remove("Signature");
StringBuffer buffer = new StringBuffer();
Iterator<Map.Entry<String, String>> iter = sortedMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, String> kvpair = iter.next();
buffer.append(rfc3986Conformance(kvpair.getKey()));
buffer.append("=");
buffer.append(rfc3986Conformance(kvpair.getValue()));
if (iter.hasNext()) {
buffer.append("&");
}
}
return buffer.toString();
}
// Based on RFC 3986 and AWS doc, further encode certain characters.
private static String rfc3986Conformance(String s) {
assert s != null;
String out = null;
if (s == null) {
return null;
}
try {
out = URLEncoder.encode(s, UTF8_CHARSET).replace("+", "%20")
.replace("*", "%2A").replace("%7E", "~");
} catch (UnsupportedEncodingException e) {
out = s;
}
return out;
}
}
| |
/*
* Copyright 2003-2016 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.psiutils;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class ControlFlowUtils {
private ControlFlowUtils() {}
public static boolean isElseIf(PsiIfStatement ifStatement) {
final PsiElement parent = ifStatement.getParent();
if (!(parent instanceof PsiIfStatement)) {
return false;
}
final PsiIfStatement parentStatement = (PsiIfStatement)parent;
final PsiStatement elseBranch = parentStatement.getElseBranch();
return ifStatement.equals(elseBranch);
}
public static boolean statementMayCompleteNormally(@Nullable PsiStatement statement) {
if (statement == null) {
return true;
}
if (statement instanceof PsiBreakStatement || statement instanceof PsiContinueStatement ||
statement instanceof PsiReturnStatement || statement instanceof PsiThrowStatement) {
return false;
}
else if (statement instanceof PsiExpressionListStatement || statement instanceof PsiEmptyStatement ||
statement instanceof PsiAssertStatement || statement instanceof PsiDeclarationStatement ||
statement instanceof PsiSwitchLabelStatement || statement instanceof PsiForeachStatement) {
return true;
}
else if (statement instanceof PsiExpressionStatement) {
final PsiExpressionStatement expressionStatement = (PsiExpressionStatement)statement;
final PsiExpression expression = expressionStatement.getExpression();
if (!(expression instanceof PsiMethodCallExpression)) {
return true;
}
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)expression;
final PsiMethod method = methodCallExpression.resolveMethod();
if (method == null) {
return true;
}
@NonNls final String methodName = method.getName();
if (!methodName.equals("exit")) {
return true;
}
final PsiClass aClass = method.getContainingClass();
if (aClass == null) {
return true;
}
final String className = aClass.getQualifiedName();
return !"java.lang.System".equals(className);
}
else if (statement instanceof PsiForStatement) {
return forStatementMayCompleteNormally((PsiForStatement)statement);
}
else if (statement instanceof PsiWhileStatement) {
return whileStatementMayCompleteNormally((PsiWhileStatement)statement);
}
else if (statement instanceof PsiDoWhileStatement) {
return doWhileStatementMayCompleteNormally((PsiDoWhileStatement)statement);
}
else if (statement instanceof PsiSynchronizedStatement) {
final PsiCodeBlock body = ((PsiSynchronizedStatement)statement).getBody();
return codeBlockMayCompleteNormally(body);
}
else if (statement instanceof PsiBlockStatement) {
final PsiCodeBlock codeBlock = ((PsiBlockStatement)statement).getCodeBlock();
return codeBlockMayCompleteNormally(codeBlock);
}
else if (statement instanceof PsiLabeledStatement) {
return labeledStatementMayCompleteNormally((PsiLabeledStatement)statement);
}
else if (statement instanceof PsiIfStatement) {
return ifStatementMayCompleteNormally((PsiIfStatement)statement);
}
else if (statement instanceof PsiTryStatement) {
return tryStatementMayCompleteNormally((PsiTryStatement)statement);
}
else if (statement instanceof PsiSwitchStatement) {
return switchStatementMayCompleteNormally((PsiSwitchStatement)statement);
}
else if (statement instanceof PsiTemplateStatement || statement instanceof PsiClassLevelDeclarationStatement) {
return true;
}
else {
assert false : "unknown statement type: " + statement.getClass();
return true;
}
}
private static boolean doWhileStatementMayCompleteNormally(@NotNull PsiDoWhileStatement loopStatement) {
final PsiExpression condition = loopStatement.getCondition();
final Object value = ExpressionUtils.computeConstantExpression(condition);
final PsiStatement body = loopStatement.getBody();
return statementMayCompleteNormally(body) && value != Boolean.TRUE
|| statementIsBreakTarget(loopStatement) || statementContainsContinueToAncestor(loopStatement);
}
private static boolean whileStatementMayCompleteNormally(@NotNull PsiWhileStatement loopStatement) {
final PsiExpression condition = loopStatement.getCondition();
final Object value = ExpressionUtils.computeConstantExpression(condition);
return value != Boolean.TRUE || statementIsBreakTarget(loopStatement) || statementContainsContinueToAncestor(loopStatement);
}
private static boolean forStatementMayCompleteNormally(@NotNull PsiForStatement loopStatement) {
if (statementIsBreakTarget(loopStatement)) {
return true;
}
if (statementContainsContinueToAncestor(loopStatement)) {
return true;
}
final PsiExpression condition = loopStatement.getCondition();
if (condition == null) {
return false;
}
final Object value = ExpressionUtils.computeConstantExpression(condition);
return Boolean.TRUE != value;
}
private static boolean switchStatementMayCompleteNormally(@NotNull PsiSwitchStatement switchStatement) {
if (statementIsBreakTarget(switchStatement)) {
return true;
}
final PsiCodeBlock body = switchStatement.getBody();
if (body == null) {
return true;
}
final PsiStatement[] statements = body.getStatements();
if (statements.length == 0) {
return true;
}
int numCases = 0;
boolean hasDefaultCase = false;
for (PsiStatement statement : statements) {
if (statement instanceof PsiSwitchLabelStatement) {
numCases++;
final PsiSwitchLabelStatement switchLabelStatement = (PsiSwitchLabelStatement)statement;
if (switchLabelStatement.isDefaultCase()) {
hasDefaultCase = true;
}
}
if (statement instanceof PsiBreakStatement) {
final PsiBreakStatement breakStatement = (PsiBreakStatement)statement;
if (breakStatement.getLabelIdentifier() == null) {
return true;
}
}
}
final boolean isEnum = isEnumSwitch(switchStatement);
if (!hasDefaultCase && !isEnum) {
return true;
}
if (!hasDefaultCase) {
final PsiExpression expression = switchStatement.getExpression();
if (expression == null) {
return true;
}
final PsiClassType type = (PsiClassType)expression.getType();
if (type == null) {
return true;
}
final PsiClass aClass = type.resolve();
if (aClass == null) {
return true;
}
final PsiField[] fields = aClass.getFields();
int numEnums = 0;
for (final PsiField field : fields) {
final PsiType fieldType = field.getType();
if (fieldType.equals(type)) {
numEnums++;
}
}
if (numEnums != numCases) {
return true;
}
}
return statementMayCompleteNormally(statements[statements.length - 1]);
}
private static boolean isEnumSwitch(PsiSwitchStatement statement) {
final PsiExpression expression = statement.getExpression();
if (expression == null) {
return false;
}
final PsiType type = expression.getType();
if (type == null) {
return false;
}
if (!(type instanceof PsiClassType)) {
return false;
}
final PsiClass aClass = ((PsiClassType)type).resolve();
return aClass != null && aClass.isEnum();
}
private static boolean tryStatementMayCompleteNormally(@NotNull PsiTryStatement tryStatement) {
final PsiCodeBlock finallyBlock = tryStatement.getFinallyBlock();
if (finallyBlock != null) {
if (!codeBlockMayCompleteNormally(finallyBlock)) {
return false;
}
}
final PsiCodeBlock tryBlock = tryStatement.getTryBlock();
if (codeBlockMayCompleteNormally(tryBlock)) {
return true;
}
final PsiCodeBlock[] catchBlocks = tryStatement.getCatchBlocks();
for (final PsiCodeBlock catchBlock : catchBlocks) {
if (codeBlockMayCompleteNormally(catchBlock)) {
return true;
}
}
return false;
}
private static boolean ifStatementMayCompleteNormally(@NotNull PsiIfStatement ifStatement) {
final PsiExpression condition = ifStatement.getCondition();
final Object value = ExpressionUtils.computeConstantExpression(condition);
final PsiStatement thenBranch = ifStatement.getThenBranch();
if (value == Boolean.TRUE) {
return statementMayCompleteNormally(thenBranch);
}
final PsiStatement elseBranch = ifStatement.getElseBranch();
if (value == Boolean.FALSE) {
return statementMayCompleteNormally(elseBranch);
}
// process branch with fewer statements first
PsiStatement branch1;
PsiStatement branch2;
if ((thenBranch == null ? 0 : thenBranch.getTextLength()) < (elseBranch == null ? 0 : elseBranch.getTextLength())) {
branch1 = thenBranch;
branch2 = elseBranch;
}
else {
branch2 = thenBranch;
branch1 = elseBranch;
}
return statementMayCompleteNormally(branch1) || statementMayCompleteNormally(branch2);
}
private static boolean labeledStatementMayCompleteNormally(@NotNull PsiLabeledStatement labeledStatement) {
final PsiStatement statement = labeledStatement.getStatement();
if (statement == null) {
return false;
}
return statementMayCompleteNormally(statement) || statementIsBreakTarget(statement);
}
public static boolean codeBlockMayCompleteNormally(@Nullable PsiCodeBlock block) {
if (block == null) {
return true;
}
final PsiStatement[] statements = block.getStatements();
for (final PsiStatement statement : statements) {
if (!statementMayCompleteNormally(statement)) {
return false;
}
}
return true;
}
private static boolean statementIsBreakTarget(@NotNull PsiStatement statement) {
final BreakFinder breakFinder = new BreakFinder(statement);
statement.accept(breakFinder);
return breakFinder.breakFound();
}
private static boolean statementContainsContinueToAncestor(@NotNull PsiStatement statement) {
PsiElement parent = statement.getParent();
while (parent instanceof PsiLabeledStatement) {
statement = (PsiStatement)parent;
parent = parent.getParent();
}
final ContinueToAncestorFinder continueToAncestorFinder = new ContinueToAncestorFinder(statement);
statement.accept(continueToAncestorFinder);
return continueToAncestorFinder.continueToAncestorFound();
}
public static boolean containsReturn(@NotNull PsiElement element) {
final ReturnFinder returnFinder = new ReturnFinder();
element.accept(returnFinder);
return returnFinder.returnFound();
}
public static boolean statementIsContinueTarget(@NotNull PsiStatement statement) {
final ContinueFinder continueFinder = new ContinueFinder(statement);
statement.accept(continueFinder);
return continueFinder.continueFound();
}
public static boolean containsSystemExit(@NotNull PsiElement element) {
final SystemExitFinder systemExitFinder = new SystemExitFinder();
element.accept(systemExitFinder);
return systemExitFinder.exitFound();
}
public static boolean elementContainsCallToMethod(PsiElement context, String containingClassName, PsiType returnType,
String methodName, PsiType... parameterTypes) {
final MethodCallFinder methodCallFinder = new MethodCallFinder(containingClassName, returnType, methodName, parameterTypes);
context.accept(methodCallFinder);
return methodCallFinder.containsCallToMethod();
}
public static boolean isInLoop(@NotNull PsiElement element) {
final PsiLoopStatement loopStatement = PsiTreeUtil.getParentOfType(element, PsiLoopStatement.class, true, PsiClass.class);
if (loopStatement == null) {
return false;
}
final PsiStatement body = loopStatement.getBody();
return body != null && PsiTreeUtil.isAncestor(body, element, true);
}
public static boolean isInFinallyBlock(@NotNull PsiElement element) {
PsiElement currentElement = element;
while (true) {
final PsiTryStatement tryStatement = PsiTreeUtil.getParentOfType(currentElement, PsiTryStatement.class, true, PsiClass.class, PsiLambdaExpression.class);
if (tryStatement == null) {
return false;
}
final PsiCodeBlock finallyBlock = tryStatement.getFinallyBlock();
if (finallyBlock != null) {
if (PsiTreeUtil.isAncestor(finallyBlock, currentElement, true)) {
final PsiMethod elementMethod = PsiTreeUtil.getParentOfType(currentElement, PsiMethod.class);
final PsiMethod finallyMethod = PsiTreeUtil.getParentOfType(finallyBlock, PsiMethod.class);
return elementMethod != null && elementMethod.equals(finallyMethod);
}
}
currentElement = tryStatement;
}
}
public static boolean isInCatchBlock(@NotNull PsiElement element) {
return PsiTreeUtil.getParentOfType(element, PsiCatchSection.class, true, PsiClass.class) != null;
}
public static boolean isInExitStatement(@NotNull PsiExpression expression) {
return isInReturnStatementArgument(expression) || isInThrowStatementArgument(expression);
}
private static boolean isInReturnStatementArgument(@NotNull PsiExpression expression) {
return PsiTreeUtil.getParentOfType(expression, PsiReturnStatement.class) != null;
}
public static boolean isInThrowStatementArgument(@NotNull PsiExpression expression) {
return PsiTreeUtil.getParentOfType(expression, PsiThrowStatement.class) != null;
}
@Nullable
public static PsiStatement stripBraces(@Nullable PsiStatement statement) {
if (statement instanceof PsiBlockStatement) {
final PsiBlockStatement block = (PsiBlockStatement)statement;
final PsiStatement onlyStatement = getOnlyStatementInBlock(block.getCodeBlock());
return (onlyStatement != null) ? onlyStatement : block;
}
else {
return statement;
}
}
public static boolean statementCompletesWithStatement(@NotNull PsiStatement containingStatement, @NotNull PsiStatement statement) {
PsiElement statementToCheck = statement;
while (true) {
if (statementToCheck.equals(containingStatement)) {
return true;
}
final PsiElement container = getContainingStatementOrBlock(statementToCheck);
if (container == null) {
return false;
}
if (container instanceof PsiCodeBlock) {
if (!statementIsLastInBlock((PsiCodeBlock)container, (PsiStatement)statementToCheck)) {
return false;
}
}
if (container instanceof PsiLoopStatement) {
return false;
}
statementToCheck = container;
}
}
public static boolean blockCompletesWithStatement(@NotNull PsiCodeBlock body, @NotNull PsiStatement statement) {
PsiElement statementToCheck = statement;
while (true) {
if (statementToCheck == null) {
return false;
}
final PsiElement container = getContainingStatementOrBlock(statementToCheck);
if (container == null) {
return false;
}
if (container instanceof PsiLoopStatement) {
return false;
}
if (container instanceof PsiCodeBlock) {
if (!statementIsLastInBlock((PsiCodeBlock)container, (PsiStatement)statementToCheck)) {
return false;
}
if (container.equals(body)) {
return true;
}
statementToCheck = PsiTreeUtil.getParentOfType(container, PsiStatement.class);
}
else {
statementToCheck = container;
}
}
}
@Nullable
private static PsiElement getContainingStatementOrBlock(@NotNull PsiElement statement) {
return PsiTreeUtil.getParentOfType(statement, PsiStatement.class, PsiCodeBlock.class);
}
private static boolean statementIsLastInBlock(@NotNull PsiCodeBlock block, @NotNull PsiStatement statement) {
for (PsiElement child = block.getLastChild(); child != null; child = child.getPrevSibling()) {
if (!(child instanceof PsiStatement)) {
continue;
}
final PsiStatement childStatement = (PsiStatement)child;
if (statement.equals(childStatement)) {
return true;
}
if (!(statement instanceof PsiEmptyStatement)) {
return false;
}
}
return false;
}
@Nullable
public static PsiStatement getFirstStatementInBlock(@Nullable PsiCodeBlock codeBlock) {
return PsiTreeUtil.getChildOfType(codeBlock, PsiStatement.class);
}
@Nullable
public static PsiStatement getLastStatementInBlock(@Nullable PsiCodeBlock codeBlock) {
return getLastChildOfType(codeBlock, PsiStatement.class);
}
private static <T extends PsiElement> T getLastChildOfType(@Nullable PsiElement element, @NotNull Class<T> aClass) {
if (element == null) return null;
for (PsiElement child = element.getLastChild(); child != null; child = child.getPrevSibling()) {
if (aClass.isInstance(child)) {
//noinspection unchecked
return (T)child;
}
}
return null;
}
/**
* @return null, if zero or more than one statements in the specified code block.
*/
@Nullable
public static PsiStatement getOnlyStatementInBlock(@Nullable PsiCodeBlock codeBlock) {
return getOnlyChildOfType(codeBlock, PsiStatement.class);
}
static <T extends PsiElement> T getOnlyChildOfType(@Nullable PsiElement element, @NotNull Class<T> aClass) {
if (element == null) return null;
T result = null;
for (PsiElement child = element.getFirstChild(); child != null; child = child.getNextSibling()) {
if (aClass.isInstance(child)) {
if (result == null) {
//noinspection unchecked
result = (T)child;
}
else {
return null;
}
}
}
return result;
}
public static boolean hasStatementCount(@Nullable PsiCodeBlock codeBlock, int count) {
return hasChildrenOfTypeCount(codeBlock, count, PsiStatement.class);
}
static <T extends PsiElement> boolean hasChildrenOfTypeCount(@Nullable PsiElement element, int count, @NotNull Class<T> aClass) {
if (element == null) return false;
int i = 0;
for (PsiElement child = element.getFirstChild(); child != null; child = child.getNextSibling()) {
if (aClass.isInstance(child)) {
i++;
if (i > count) return false;
}
}
return i == count;
}
public static boolean isEmptyCodeBlock(PsiCodeBlock codeBlock) {
return hasStatementCount(codeBlock, 0);
}
public static boolean methodAlwaysThrowsException(@NotNull PsiMethod method) {
final PsiCodeBlock body = method.getBody();
if (body == null) {
return true;
}
return !containsReturn(body) && !codeBlockMayCompleteNormally(body);
}
public static boolean lambdaExpressionAlwaysThrowsException(PsiLambdaExpression expression) {
final PsiElement body = expression.getBody();
if (body instanceof PsiExpression) {
return false;
}
if (!(body instanceof PsiCodeBlock)) {
return true;
}
final PsiCodeBlock codeBlock = (PsiCodeBlock)body;
return !containsReturn(codeBlock) && !codeBlockMayCompleteNormally(codeBlock);
}
public static boolean statementContainsNakedBreak(PsiStatement statement) {
if (statement == null) {
return false;
}
final NakedBreakFinder breakFinder = new NakedBreakFinder();
statement.accept(breakFinder);
return breakFinder.breakFound();
}
/**
* Checks whether the given statement effectively breaks given loop. Returns true
* if the statement is {@link PsiBreakStatement} having given loop as a target. Also may return
* true in other cases if the statement is semantically equivalent to break like this:
*
* <pre>{@code
* int myMethod(int[] data) {
* for(int val : data) {
* if(val == 5) {
* System.out.println(val);
* return 0; // this statement is semantically equivalent to break.
* }
* }
* return 0;
* }}</pre>
*
* @param statement statement which may break the loop
* @param loop a loop to break
* @return true if the statement actually breaks the loop
*/
@Contract("null, _ -> false")
public static boolean statementBreaksLoop(PsiStatement statement, PsiLoopStatement loop) {
if(statement instanceof PsiBreakStatement) {
return ((PsiBreakStatement)statement).findExitedStatement() == loop;
}
if(statement instanceof PsiReturnStatement) {
PsiExpression returnValue = ((PsiReturnStatement)statement).getReturnValue();
PsiElement cur = loop;
for(PsiElement parent = cur.getParent();;parent = cur.getParent()) {
if(parent instanceof PsiLabeledStatement) {
cur = parent;
} else if(parent instanceof PsiCodeBlock) {
PsiCodeBlock block = (PsiCodeBlock)parent;
PsiStatement[] statements = block.getStatements();
if(block.getParent() instanceof PsiBlockStatement && statements.length > 0 && statements[statements.length-1] == cur) {
cur = block.getParent();
} else break;
} else if(parent instanceof PsiIfStatement) {
if(cur == ((PsiIfStatement)parent).getThenBranch() || cur == ((PsiIfStatement)parent).getElseBranch()) {
cur = parent;
} else break;
} else break;
}
PsiElement nextElement = PsiTreeUtil.skipSiblingsForward(cur, PsiComment.class, PsiWhiteSpace.class);
if(nextElement instanceof PsiReturnStatement) {
return EquivalenceChecker.getCanonicalPsiEquivalence()
.expressionsAreEquivalent(returnValue, ((PsiReturnStatement)nextElement).getReturnValue());
}
if(nextElement == null && returnValue == null && cur.getParent() instanceof PsiMethod) {
return true;
}
}
return false;
}
/**
* Checks whether control flow after executing given statement will definitely not go into the next iteration of given loop.
*
* @param statement executed statement. It's not checked whether this statement itself breaks the loop.
* @param loop a surrounding loop. Must be parent of statement
* @return true if it can be statically defined that next loop iteration will not be executed.
*/
@Contract("null, _ -> false")
public static boolean flowBreaksLoop(PsiStatement statement, PsiLoopStatement loop) {
if(statement == null || statement == loop) return false;
for (PsiStatement sibling = nextExecutedStatement(statement); sibling != null; sibling = nextExecutedStatement(sibling)) {
if(sibling instanceof PsiContinueStatement) return false;
if(sibling instanceof PsiThrowStatement || sibling instanceof PsiReturnStatement) return true;
if(sibling instanceof PsiBreakStatement) {
PsiBreakStatement breakStatement = (PsiBreakStatement)sibling;
PsiStatement exitedStatement = breakStatement.findExitedStatement();
if(exitedStatement == loop) return true;
return flowBreaksLoop(exitedStatement, loop);
}
}
return false;
}
@Nullable
private static PsiStatement nextExecutedStatement(PsiStatement statement) {
PsiStatement next = PsiTreeUtil.getNextSiblingOfType(statement, PsiStatement.class);
while (next instanceof PsiBlockStatement) {
PsiStatement[] statements = ((PsiBlockStatement)next).getCodeBlock().getStatements();
if (statements.length == 0) break;
next = statements[0];
}
if (next == null) {
PsiElement parent = statement.getParent();
if (parent instanceof PsiCodeBlock) {
PsiElement gParent = parent.getParent();
if (gParent instanceof PsiBlockStatement || gParent instanceof PsiSwitchStatement) {
return nextExecutedStatement((PsiStatement)gParent);
}
}
else if (parent instanceof PsiLabeledStatement || parent instanceof PsiIfStatement || parent instanceof PsiSwitchLabelStatement
|| parent instanceof PsiSwitchStatement) {
return nextExecutedStatement((PsiStatement)parent);
}
}
return next;
}
private static class NakedBreakFinder extends JavaRecursiveElementWalkingVisitor {
private boolean m_found;
private boolean breakFound() {
return m_found;
}
@Override
public void visitElement(PsiElement element) {
if (m_found) {
return;
}
super.visitElement(element);
}
@Override
public void visitReferenceExpression(
PsiReferenceExpression expression) {
}
@Override
public void visitBreakStatement(PsiBreakStatement statement) {
if (statement.getLabelIdentifier() != null) {
return;
}
m_found = true;
}
@Override
public void visitDoWhileStatement(PsiDoWhileStatement statement) {
// don't drill down
}
@Override
public void visitForStatement(PsiForStatement statement) {
// don't drill down
}
@Override
public void visitForeachStatement(PsiForeachStatement statement) {
// don't drill down
}
@Override
public void visitWhileStatement(PsiWhileStatement statement) {
// don't drill down
}
@Override
public void visitSwitchStatement(PsiSwitchStatement statement) {
// don't drill down
}
}
private static class SystemExitFinder extends JavaRecursiveElementWalkingVisitor {
private boolean m_found;
private boolean exitFound() {
return m_found;
}
@Override
public void visitClass(@NotNull PsiClass aClass) {
// do nothing to keep from drilling into inner classes
}
@Override
public void visitMethodCallExpression(
@NotNull PsiMethodCallExpression expression) {
if (m_found) {
return;
}
super.visitMethodCallExpression(expression);
final PsiMethod method = expression.resolveMethod();
if (method == null) {
return;
}
@NonNls final String methodName = method.getName();
if (!methodName.equals("exit")) {
return;
}
final PsiClass aClass = method.getContainingClass();
if (aClass == null) {
return;
}
final String className = aClass.getQualifiedName();
if (!"java.lang.System".equals(className) && !"java.lang.Runtime".equals(className)) {
return;
}
m_found = true;
}
}
private static class ReturnFinder extends JavaRecursiveElementWalkingVisitor {
private boolean m_found;
private boolean returnFound() {
return m_found;
}
@Override
public void visitClass(@NotNull PsiClass psiClass) {
// do nothing, to keep drilling into inner classes
}
@Override
public void visitLambdaExpression(PsiLambdaExpression expression) {
}
@Override
public void visitReturnStatement(@NotNull PsiReturnStatement returnStatement) {
if (m_found) {
return;
}
super.visitReturnStatement(returnStatement);
m_found = true;
}
}
private static class BreakFinder extends JavaRecursiveElementWalkingVisitor {
private boolean m_found;
private final PsiStatement m_target;
private BreakFinder(@NotNull PsiStatement target) {
m_target = target;
}
private boolean breakFound() {
return m_found;
}
@Override
public void visitBreakStatement(@NotNull PsiBreakStatement statement) {
if (m_found) {
return;
}
super.visitBreakStatement(statement);
final PsiStatement exitedStatement = statement.findExitedStatement();
if (exitedStatement == null) {
return;
}
if (PsiTreeUtil.isAncestor(exitedStatement, m_target, false)) {
m_found = true;
}
}
@Override
public void visitIfStatement(PsiIfStatement statement) {
if (m_found) {
return;
}
final PsiExpression condition = statement.getCondition();
final Object value = ExpressionUtils.computeConstantExpression(condition);
if (Boolean.FALSE != value) {
final PsiStatement thenBranch = statement.getThenBranch();
if (thenBranch != null) {
thenBranch.accept(this);
}
}
if (Boolean.TRUE != value) {
final PsiStatement elseBranch = statement.getElseBranch();
if (elseBranch != null) {
elseBranch.accept(this);
}
}
}
}
private static class ContinueFinder extends JavaRecursiveElementWalkingVisitor {
private boolean m_found;
private final PsiStatement m_target;
private ContinueFinder(@NotNull PsiStatement target) {
m_target = target;
}
private boolean continueFound() {
return m_found;
}
@Override
public void visitContinueStatement(@NotNull PsiContinueStatement statement) {
if (m_found) {
return;
}
super.visitContinueStatement(statement);
final PsiStatement continuedStatement = statement.findContinuedStatement();
if (continuedStatement == null) {
return;
}
if (PsiTreeUtil.isAncestor(continuedStatement, m_target, false)) {
m_found = true;
}
}
@Override
public void visitIfStatement(PsiIfStatement statement) {
if (m_found) {
return;
}
final PsiExpression condition = statement.getCondition();
final Object value = ExpressionUtils.computeConstantExpression(condition);
if (Boolean.FALSE != value) {
final PsiStatement thenBranch = statement.getThenBranch();
if (thenBranch != null) {
thenBranch.accept(this);
}
}
if (Boolean.TRUE != value) {
final PsiStatement elseBranch = statement.getElseBranch();
if (elseBranch != null) {
elseBranch.accept(this);
}
}
}
}
private static class MethodCallFinder extends JavaRecursiveElementWalkingVisitor {
private final String containingClassName;
private final PsiType returnType;
private final String methodName;
private final PsiType[] parameterTypeNames;
private boolean containsCallToMethod;
private MethodCallFinder(String containingClassName, PsiType returnType, String methodName, PsiType... parameterTypeNames) {
this.containingClassName = containingClassName;
this.returnType = returnType;
this.methodName = methodName;
this.parameterTypeNames = parameterTypeNames;
}
@Override
public void visitElement(PsiElement element) {
if (containsCallToMethod) {
return;
}
super.visitElement(element);
}
@Override
public void visitMethodCallExpression(
PsiMethodCallExpression expression) {
if (containsCallToMethod) {
return;
}
super.visitMethodCallExpression(expression);
if (!MethodCallUtils.isCallToMethod(expression, containingClassName, returnType, methodName, parameterTypeNames)) {
return;
}
containsCallToMethod = true;
}
private boolean containsCallToMethod() {
return containsCallToMethod;
}
}
private static class ContinueToAncestorFinder extends JavaRecursiveElementWalkingVisitor {
private final PsiStatement statement;
private boolean found;
private ContinueToAncestorFinder(PsiStatement statement) {
this.statement = statement;
}
@Override
public void visitElement(PsiElement element) {
if (found) {
return;
}
super.visitElement(element);
}
@Override
public void visitContinueStatement(
PsiContinueStatement continueStatement) {
if (found) {
return;
}
super.visitContinueStatement(continueStatement);
final PsiIdentifier labelIdentifier = continueStatement.getLabelIdentifier();
if (labelIdentifier == null) {
return;
}
final PsiStatement continuedStatement = continueStatement.findContinuedStatement();
if (continuedStatement == null) {
return;
}
if (PsiTreeUtil.isAncestor(continuedStatement, statement, true)) {
found = true;
}
}
private boolean continueToAncestorFound() {
return found;
}
}
}
| |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.gridfs;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.MongoException;
import org.bson.types.ObjectId;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
/**
* <p>This class represents a GridFS file to be written to the database. Operations include:</p>
*
* <ul>
* <li>Writing data obtained from an InputStream</li>
* <li>Getting an OutputStream to stream the data out to</li>
* </ul>
*
* @mongodb.driver.manual core/gridfs/ GridFS
*/
public class GridFSInputFile extends GridFSFile {
private final InputStream inputStream;
private final boolean closeStreamOnPersist;
private boolean savedChunks = false;
private byte[] buffer = null;
private int currentChunkNumber = 0;
private int currentBufferPosition = 0;
private long totalBytes = 0;
private OutputStream outputStream = null;
/**
* Default constructor setting the GridFS file name and providing an input stream containing data to be written to the file.
*
* @param gridFS The GridFS connection handle.
* @param inputStream Stream used for reading data from.
* @param filename Name of the file to be created.
* @param closeStreamOnPersist indicate the passed in input stream should be closed once the data chunk persisted
*/
protected GridFSInputFile(final GridFS gridFS, final InputStream inputStream, final String filename,
final boolean closeStreamOnPersist) {
this.fs = gridFS;
this.inputStream = inputStream;
this.filename = filename;
this.closeStreamOnPersist = closeStreamOnPersist;
this.id = new ObjectId();
this.chunkSize = GridFS.DEFAULT_CHUNKSIZE;
this.uploadDate = new Date();
this.buffer = new byte[(int) chunkSize];
}
/**
* Default constructor setting the GridFS file name and providing an input stream containing data to be written to the file.
*
* @param gridFS The GridFS connection handle.
* @param inputStream Stream used for reading data from.
* @param filename Name of the file to be created.
*/
protected GridFSInputFile(final GridFS gridFS, final InputStream inputStream, final String filename) {
this(gridFS, inputStream, filename, false);
}
/**
* Constructor that only provides a file name, but does not rely on the presence of an {@link java.io.InputStream}. An {@link
* java.io.OutputStream} can later be obtained for writing using the {@link #getOutputStream()} method.
*
* @param gridFS The GridFS connection handle.
* @param filename Name of the file to be created.
*/
protected GridFSInputFile(final GridFS gridFS, final String filename) {
this(gridFS, null, filename);
}
/**
* Minimal constructor that does not rely on the presence of an {@link java.io.InputStream}. An {@link java.io.OutputStream} can later
* be obtained for writing using the {@link #getOutputStream()} method.
*
* @param gridFS The GridFS connection handle.
*/
protected GridFSInputFile(final GridFS gridFS) {
this(gridFS, null, null);
}
/**
* Sets the ID of this GridFS file.
*
* @param id the file's ID.
*/
public void setId(final Object id) {
this.id = id;
}
/**
* Sets the file name on the GridFS entry.
*
* @param filename File name.
*/
public void setFilename(final String filename) {
this.filename = filename;
}
/**
* Sets the content type (MIME type) on the GridFS entry.
*
* @param contentType Content type.
*/
public void setContentType(final String contentType) {
this.contentType = contentType;
}
/**
* Set the chunk size. This must be called before saving any data.
*
* @param chunkSize The size in bytes.
*/
public void setChunkSize(final long chunkSize) {
if (outputStream != null || savedChunks) {
return;
}
this.chunkSize = chunkSize;
buffer = new byte[(int) this.chunkSize];
}
/**
* Calls {@link GridFSInputFile#save(long)} with the existing chunk size.
*
* @throws MongoException if there's a problem saving the file.
*/
@Override
public void save() {
save(chunkSize);
}
/**
* This method first calls saveChunks(long) if the file data has not been saved yet. Then it persists the file entry to GridFS.
*
* @param chunkSize Size of chunks for file in bytes.
* @throws MongoException if there's a problem saving the file.
*/
public void save(final long chunkSize) {
if (outputStream != null) {
throw new MongoException("cannot mix OutputStream and regular save()");
}
// note that chunkSize only changes chunkSize in case we actually save chunks
// otherwise there is a risk file and chunks are not compatible
if (!savedChunks) {
try {
saveChunks(chunkSize);
} catch (IOException ioe) {
throw new MongoException("couldn't save chunks", ioe);
}
}
super.save();
}
/**
* Saves all data into chunks from configured {@link java.io.InputStream} input stream to GridFS.
*
* @return Number of the next chunk.
* @throws IOException on problems reading the new entry's {@link java.io.InputStream}.
* @throws MongoException if there's a failure
* @see com.mongodb.gridfs.GridFSInputFile#saveChunks(long)
*/
public int saveChunks() throws IOException {
return saveChunks(chunkSize);
}
/**
* Saves all data into chunks from configured {@link java.io.InputStream} input stream to GridFS. A non-default chunk size can be
* specified. This method does NOT save the file object itself, one must call save() to do so.
*
* @param chunkSize Size of chunks for file in bytes.
* @return Number of the next chunk.
* @throws IOException on problems reading the new entry's {@link java.io.InputStream}.
* @throws MongoException if there's a failure
*/
public int saveChunks(final long chunkSize) throws IOException {
if (outputStream != null) {
throw new MongoException("Cannot mix OutputStream and regular save()");
}
if (savedChunks) {
throw new MongoException("Chunks already saved!");
}
if (chunkSize <= 0) {
throw new MongoException("chunkSize must be greater than zero");
}
if (this.chunkSize != chunkSize) {
this.chunkSize = chunkSize;
buffer = new byte[(int) this.chunkSize];
}
int bytesRead = 0;
while (bytesRead >= 0) {
currentBufferPosition = 0;
bytesRead = _readStream2Buffer();
dumpBuffer(true);
}
// only finish data, do not write file, in case one wants to change metadata
finishData();
return currentChunkNumber;
}
/**
* After retrieving this {@link java.io.OutputStream}, this object will be capable of accepting successively written data to the output
* stream. To completely persist this GridFS object, you must finally call the {@link java.io.OutputStream#close()} method on the output
* stream. Note that calling the save() and saveChunks() methods will throw Exceptions once you obtained the OutputStream.
*
* @return Writable stream object.
*/
public OutputStream getOutputStream() {
if (outputStream == null) {
outputStream = new GridFSOutputStream();
}
return outputStream;
}
/**
* Dumps a new chunk into the chunks collection. Depending on the flag, also partial buffers (at the end) are going to be written
* immediately.
*
* @param writePartial Write also partial buffers full.
* @throws MongoException if there's a failure
*/
private void dumpBuffer(final boolean writePartial) {
if ((currentBufferPosition < chunkSize) && !writePartial) {
// Bail out, chunk not complete yet
return;
}
if (currentBufferPosition == 0) {
// chunk is empty, may be last chunk
return;
}
byte[] writeBuffer = buffer;
if (currentBufferPosition != chunkSize) {
writeBuffer = new byte[currentBufferPosition];
System.arraycopy(buffer, 0, writeBuffer, 0, currentBufferPosition);
}
DBObject chunk = createChunk(id, currentChunkNumber, writeBuffer);
fs.getChunksCollection().save(chunk);
currentChunkNumber++;
totalBytes += writeBuffer.length;
currentBufferPosition = 0;
}
/**
* Creates a new chunk of this file. Can be over-ridden, if input files need to be split into chunks using a different mechanism.
*
* @param id the file ID
* @param currentChunkNumber the unique id for this chunk
* @param writeBuffer the byte array containing the data for this chunk
* @return a DBObject representing this chunk.
*/
protected DBObject createChunk(final Object id, final int currentChunkNumber, final byte[] writeBuffer) {
return new BasicDBObject("files_id", id)
.append("n", currentChunkNumber)
.append("data", writeBuffer);
}
/**
* Reads a buffer full from the {@link java.io.InputStream}.
*
* @return Number of bytes read from stream.
* @throws IOException if the reading from the stream fails.
*/
private int _readStream2Buffer() throws IOException {
int bytesRead = 0;
while (currentBufferPosition < chunkSize && bytesRead >= 0) {
bytesRead = inputStream.read(buffer, currentBufferPosition, (int) chunkSize - currentBufferPosition);
if (bytesRead > 0) {
currentBufferPosition += bytesRead;
} else if (bytesRead == 0) {
throw new RuntimeException("i'm doing something wrong");
}
}
return bytesRead;
}
/**
* Marks the data as fully written. This needs to be called before super.save()
*/
private void finishData() {
if (!savedChunks) {
length = totalBytes;
savedChunks = true;
try {
if (inputStream != null && closeStreamOnPersist) {
inputStream.close();
}
} catch (IOException e) {
//ignore
}
}
}
/**
* An output stream implementation that can be used to successively write to a GridFS file.
*/
private class GridFSOutputStream extends OutputStream {
@Override
public void write(final int b) throws IOException {
byte[] byteArray = new byte[1];
byteArray[0] = (byte) (b & 0xff);
write(byteArray, 0, 1);
}
@Override
public void write(final byte[] b, final int off, final int len) throws IOException {
int offset = off;
int length = len;
int toCopy = 0;
while (length > 0) {
toCopy = length;
if (toCopy > chunkSize - currentBufferPosition) {
toCopy = (int) chunkSize - currentBufferPosition;
}
System.arraycopy(b, offset, buffer, currentBufferPosition, toCopy);
currentBufferPosition += toCopy;
offset += toCopy;
length -= toCopy;
if (currentBufferPosition == chunkSize) {
dumpBuffer(false);
}
}
}
/**
* Processes/saves all data from {@link java.io.InputStream} and closes the potentially present {@link java.io.OutputStream}. The
* GridFS file will be persisted afterwards.
*/
@Override
public void close() {
// write last buffer if needed
dumpBuffer(true);
// finish stream
finishData();
// save file obj
GridFSInputFile.super.save();
}
}
}
| |
package org.hisp.dhis.user.action;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.opensymphony.xwork2.Action;
import org.hisp.dhis.i18n.I18n;
import org.hisp.dhis.user.CredentialsInfo;
import org.hisp.dhis.user.PasswordValidationResult;
import org.hisp.dhis.user.PasswordValidationService;
import org.hisp.dhis.user.User;
import org.hisp.dhis.user.UserCredentials;
import org.hisp.dhis.user.UserService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author Torgeir Lorange Ostby
*/
public class ValidateUserAction
implements Action
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private UserService userService;
public void setUserService( UserService userService )
{
this.userService = userService;
}
private I18n i18n;
public void setI18n( I18n i18n )
{
this.i18n = i18n;
}
@Autowired
private PasswordValidationService passwordValidationService;
// -------------------------------------------------------------------------
// Input
// -------------------------------------------------------------------------
private Integer id;
public void setId( Integer id )
{
this.id = id;
}
private String username;
public void setUsername( String username )
{
this.username = username;
}
private String openId;
public void setOpenId( String openId )
{
this.openId = openId;
}
private String ldapId;
public void setLdapId( String ldapId )
{
this.ldapId = ldapId;
}
private String inviteUsername;
public void setInviteUsername( String inviteUsername )
{
this.inviteUsername = inviteUsername;
}
private String rawPassword;
public void setRawPassword( String rawPassword )
{
this.rawPassword = rawPassword;
}
private String email;
public void setEmail( String email )
{
this.email = email;
}
// -------------------------------------------------------------------------
// Output
// -------------------------------------------------------------------------
private String message;
public String getMessage()
{
return message;
}
// -------------------------------------------------------------------------
// Action implementation
// -------------------------------------------------------------------------
@Override
public String execute()
throws Exception
{
if ( username != null )
{
UserCredentials match = userService.getUserCredentialsByUsername( username );
if ( match != null && (id == null || match.getId() != id) )
{
message = i18n.getString( "username_in_use" );
return ERROR;
}
}
if ( openId != null )
{
UserCredentials match = userService.getUserCredentialsByOpenId( openId );
if ( match != null && (id == null || match.getId() != id) )
{
message = i18n.getString( "openid_in_use" );
return ERROR;
}
}
if ( ldapId != null )
{
UserCredentials match = userService.getUserCredentialsByLdapId( ldapId );
if ( match != null && (id == null || match.getId() != id) )
{
message = i18n.getString( "ldap_in_use" );
return ERROR;
}
}
if ( inviteUsername != null )
{
UserCredentials match = userService.getUserCredentialsByUsername( inviteUsername );
if ( match != null && (id == null || match.getId() != id) )
{
message = i18n.getString( "username_in_use" );
return ERROR;
}
}
if( rawPassword != null && !rawPassword.isEmpty() )
{
PasswordValidationResult result;
CredentialsInfo credentialsInfo = new CredentialsInfo( username, rawPassword, email, true );
if ( id != null )
{
User user = userService.getUser( id );
if ( user != null )
{
credentialsInfo = new CredentialsInfo( user.getUsername(), rawPassword, user.getEmail(), false );
}
}
result = passwordValidationService.validate( credentialsInfo );
if ( !result.isValid() )
{
message = i18n.getString( result.getI18ErrorMessage() );
return ERROR;
}
}
message = i18n.getString( "everything_is_ok" );
return SUCCESS;
}
}
| |
package com.viesis.viescraft.network;
import com.viesis.viescraft.client.gui.GuiTileEntityAirshipWorkbench;
import com.viesis.viescraft.client.gui.GuiTileEntityBatteryCharger;
import com.viesis.viescraft.client.gui.v1.GuiEntityAirshipV1Default;
import com.viesis.viescraft.client.gui.v1.GuiEntityAirshipV1Module;
import com.viesis.viescraft.client.gui.v1.GuiEntityAirshipV1ModuleInventoryLarge;
import com.viesis.viescraft.client.gui.v1.GuiEntityAirshipV1ModuleInventorySmall;
import com.viesis.viescraft.client.gui.v2.GuiEntityAirshipV2Default;
import com.viesis.viescraft.client.gui.v2.GuiEntityAirshipV2Module;
import com.viesis.viescraft.client.gui.v2.GuiEntityAirshipV2ModuleInventoryLarge;
import com.viesis.viescraft.client.gui.v2.GuiEntityAirshipV2ModuleInventorySmall;
import com.viesis.viescraft.client.gui.v3.GuiEntityAirshipV3Default;
import com.viesis.viescraft.client.gui.v3.GuiEntityAirshipV3Module;
import com.viesis.viescraft.client.gui.v3.GuiEntityAirshipV3ModuleInventoryLarge;
import com.viesis.viescraft.client.gui.v3.GuiEntityAirshipV3ModuleInventorySmall;
import com.viesis.viescraft.client.gui.v4.GuiEntityAirshipV4Default;
import com.viesis.viescraft.client.gui.v4.GuiEntityAirshipV4Module;
import com.viesis.viescraft.client.gui.v4.GuiEntityAirshipV4ModuleInventoryLarge;
import com.viesis.viescraft.client.gui.v4.GuiEntityAirshipV4ModuleInventorySmall;
import com.viesis.viescraft.common.entity.airshipcolors.EntityAirshipV1Core;
import com.viesis.viescraft.common.entity.airshipcolors.EntityAirshipV2Core;
import com.viesis.viescraft.common.entity.airshipcolors.EntityAirshipV3Core;
import com.viesis.viescraft.common.entity.airshipcolors.EntityAirshipV4Core;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v1.ContainerAirshipV1Default;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v1.ContainerAirshipV1Module;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v1.ContainerAirshipV1ModuleInvLarge;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v1.ContainerAirshipV1ModuleInvSmall;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v2.ContainerAirshipV2Default;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v2.ContainerAirshipV2Module;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v2.ContainerAirshipV2ModuleInvLarge;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v2.ContainerAirshipV2ModuleInvSmall;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v3.ContainerAirshipV3Default;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v3.ContainerAirshipV3Module;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v3.ContainerAirshipV3ModuleInvLarge;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v3.ContainerAirshipV3ModuleInvSmall;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v4.ContainerAirshipV4Default;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v4.ContainerAirshipV4Module;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v4.ContainerAirshipV4ModuleInvLarge;
import com.viesis.viescraft.common.entity.airshipcolors.containers.v4.ContainerAirshipV4ModuleInvSmall;
import com.viesis.viescraft.common.tileentity.ContainerAirshipWorkbench;
import com.viesis.viescraft.common.tileentity.ContainerBatteryCharger;
import com.viesis.viescraft.common.tileentity.TileEntityAirshipWorkbench;
import com.viesis.viescraft.common.tileentity.TileEntityBatteryCharger;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.network.IGuiHandler;
public class GuiHandler implements IGuiHandler {
public static GuiHandler instance;
public static final int GUI_AIRSHIP_WORKBENCH = 50;
public static final int GUI_BATTERY_CHARGER = 51;
public static final int GUI_V1_DEFAULT = 1;
public static final int GUI_V2_DEFAULT = 2;
public static final int GUI_V3_DEFAULT = 3;
public static final int GUI_V4_DEFAULT = 4;
public static final int GUI_V1_MODULE = 5;
public static final int GUI_V2_MODULE = 6;
public static final int GUI_V3_MODULE = 7;
public static final int GUI_V4_MODULE = 8;
public static final int GUI_V1_MODULE_INVENTORY_SMALL = 9;
public static final int GUI_V2_MODULE_INVENTORY_SMALL = 10;
public static final int GUI_V3_MODULE_INVENTORY_SMALL = 11;
public static final int GUI_V4_MODULE_INVENTORY_SMALL = 12;
public static final int GUI_V1_MODULE_INVENTORY_LARGE = 13;
public static final int GUI_V2_MODULE_INVENTORY_LARGE = 14;
public static final int GUI_V3_MODULE_INVENTORY_LARGE = 15;
public static final int GUI_V4_MODULE_INVENTORY_LARGE = 16;
public GuiHandler()
{
instance = this;
}
@Override
public Object getServerGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z)
{
//Airship Workbench Container
if (ID == GUI_AIRSHIP_WORKBENCH)
{
return new ContainerAirshipWorkbench(player.inventory, world, (TileEntityAirshipWorkbench)world.getTileEntity(new BlockPos(x, y, z)));
}
//Airship Workbench Container
if (ID == GUI_BATTERY_CHARGER)
{
return new ContainerBatteryCharger(player.inventory, world, (TileEntityBatteryCharger)world.getTileEntity(new BlockPos(x, y, z)));
}
//Airship Container with no module
if (ID == GUI_V1_DEFAULT)
{
return new ContainerAirshipV1Default(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_DEFAULT)
{
return new ContainerAirshipV2Default(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_DEFAULT)
{
return new ContainerAirshipV3Default(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_DEFAULT)
{
return new ContainerAirshipV4Default(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
//Airship Module Container
if (ID == GUI_V1_MODULE)
{
return new ContainerAirshipV1Module(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_MODULE)
{
return new ContainerAirshipV2Module(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_MODULE)
{
return new ContainerAirshipV3Module(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_MODULE)
{
return new ContainerAirshipV4Module(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
//Airship Small Inventory Container
if (ID == GUI_V1_MODULE_INVENTORY_SMALL)
{
return new ContainerAirshipV1ModuleInvSmall(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_MODULE_INVENTORY_SMALL)
{
return new ContainerAirshipV2ModuleInvSmall(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_MODULE_INVENTORY_SMALL)
{
return new ContainerAirshipV3ModuleInvSmall(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_MODULE_INVENTORY_SMALL)
{
return new ContainerAirshipV4ModuleInvSmall(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
//Airship Large Inventory Container
if (ID == GUI_V1_MODULE_INVENTORY_LARGE)
{
return new ContainerAirshipV1ModuleInvLarge(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_MODULE_INVENTORY_LARGE)
{
return new ContainerAirshipV2ModuleInvLarge(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_MODULE_INVENTORY_LARGE)
{
return new ContainerAirshipV3ModuleInvLarge(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_MODULE_INVENTORY_LARGE)
{
return new ContainerAirshipV4ModuleInvLarge(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
return null;
}
@Override
public Object getClientGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z)
{
//Airship Workbench GUI
if (ID == GUI_AIRSHIP_WORKBENCH)
{
return new GuiTileEntityAirshipWorkbench(player.inventory, world, (TileEntityAirshipWorkbench)world.getTileEntity(new BlockPos(x, y, z)));
}
//Airship Workbench GUI
if (ID == GUI_BATTERY_CHARGER)
{
return new GuiTileEntityBatteryCharger(player.inventory, world, (TileEntityBatteryCharger)world.getTileEntity(new BlockPos(x, y, z)));
}
//Airship GUI with no module
if (ID == GUI_V1_DEFAULT)
{
return new GuiEntityAirshipV1Default(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_DEFAULT)
{
return new GuiEntityAirshipV2Default(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_DEFAULT)
{
return new GuiEntityAirshipV3Default(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_DEFAULT)
{
return new GuiEntityAirshipV4Default(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
//Airship Module GUI
if (ID == GUI_V1_MODULE)
{
return new GuiEntityAirshipV1Module(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_MODULE)
{
return new GuiEntityAirshipV2Module(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_MODULE)
{
return new GuiEntityAirshipV3Module(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_MODULE)
{
return new GuiEntityAirshipV4Module(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
//Airship GUI with Small Inventory Module
if (ID == GUI_V1_MODULE_INVENTORY_SMALL)
{
return new GuiEntityAirshipV1ModuleInventorySmall(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_MODULE_INVENTORY_SMALL)
{
return new GuiEntityAirshipV2ModuleInventorySmall(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_MODULE_INVENTORY_SMALL)
{
return new GuiEntityAirshipV3ModuleInventorySmall(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_MODULE_INVENTORY_SMALL)
{
return new GuiEntityAirshipV4ModuleInventorySmall(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
//Airship GUI with Large Inventory Module
if (ID == GUI_V1_MODULE_INVENTORY_LARGE)
{
return new GuiEntityAirshipV1ModuleInventoryLarge(player.inventory, (EntityAirshipV1Core)player.getRidingEntity());
}
if (ID == GUI_V2_MODULE_INVENTORY_LARGE)
{
return new GuiEntityAirshipV2ModuleInventoryLarge(player.inventory, (EntityAirshipV2Core)player.getRidingEntity());
}
if (ID == GUI_V3_MODULE_INVENTORY_LARGE)
{
return new GuiEntityAirshipV3ModuleInventoryLarge(player.inventory, (EntityAirshipV3Core)player.getRidingEntity());
}
if (ID == GUI_V4_MODULE_INVENTORY_LARGE)
{
return new GuiEntityAirshipV4ModuleInventoryLarge(player.inventory, (EntityAirshipV4Core)player.getRidingEntity());
}
return null;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util.newProjectWizard;
import com.google.common.annotations.VisibleForTesting;
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.ide.highlighter.ProjectFileType;
import com.intellij.ide.util.projectWizard.ModuleWizardStep;
import com.intellij.ide.util.projectWizard.ProjectBuilder;
import com.intellij.ide.util.projectWizard.WizardContext;
import com.intellij.ide.wizard.AbstractWizard;
import com.intellij.ide.wizard.CommitStepException;
import com.intellij.ide.wizard.StepWithSubSteps;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.components.StorageScheme;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.DumbModePermission;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.IdeBorderFactory;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.io.File;
/**
* @author Dmitry Avdeev
* Date: 19.09.13
*/
public abstract class AbstractProjectWizard extends AbstractWizard<ModuleWizardStep> {
protected final WizardContext myWizardContext;
@Nullable
private WizardDelegate myDelegate;
public AbstractProjectWizard(String title, Project project, String defaultPath) {
super(title, project);
myWizardContext = initContext(project, defaultPath, getDisposable());
myWizardContext.setWizard(this);
}
public AbstractProjectWizard(String title, Project project, Component dialogParent) {
super(title, dialogParent);
myWizardContext = initContext(project, null, getDisposable());
myWizardContext.setWizard(this);
}
@Override
protected String addStepComponent(Component component) {
if (component instanceof JComponent) {
((JComponent)component).setBorder(IdeBorderFactory.createEmptyBorder(0, 0, 0, 0));
}
return super.addStepComponent(component);
}
public abstract StepSequence getSequence();
private static WizardContext initContext(@Nullable Project project, @Nullable String defaultPath, Disposable parentDisposable) {
WizardContext context = new WizardContext(project, parentDisposable);
if (defaultPath != null) {
context.setProjectFileDirectory(defaultPath, true);
context.setProjectName(defaultPath.substring(FileUtil.toSystemIndependentName(defaultPath).lastIndexOf("/") + 1));
}
return context;
}
@Nullable
public static Sdk getNewProjectJdk(WizardContext context) {
if (context.getProjectJdk() != null) {
return context.getProjectJdk();
}
return getProjectSdkByDefault(context);
}
public static Sdk getProjectSdkByDefault(WizardContext context) {
final Project project = context.getProject() == null ? ProjectManager.getInstance().getDefaultProject() : context.getProject();
final Sdk projectJdk = ProjectRootManager.getInstance(project).getProjectSdk();
if (projectJdk != null) {
return projectJdk;
}
return null;
}
@NotNull
public String getNewProjectFilePath() {
if (myWizardContext.getProjectStorageFormat() == StorageScheme.DEFAULT) {
return myWizardContext.getProjectFileDirectory() + File.separator + myWizardContext.getProjectName() + ProjectFileType.DOT_DEFAULT_EXTENSION;
}
else {
return myWizardContext.getProjectFileDirectory();
}
}
@NotNull
public StorageScheme getStorageScheme() {
return myWizardContext.getProjectStorageFormat();
}
public ProjectBuilder getProjectBuilder() {
return myWizardContext.getProjectBuilder();
}
public String getProjectName() {
return myWizardContext.getProjectName();
}
@Nullable
public Sdk getNewProjectJdk() {
return getNewProjectJdk(myWizardContext);
}
@NotNull
public String getNewCompileOutput() {
final String projectFilePath = myWizardContext.getProjectFileDirectory();
@NonNls String path = myWizardContext.getCompilerOutputDirectory();
if (path == null) {
path = StringUtil.endsWithChar(projectFilePath, '/') ? projectFilePath + "out" : projectFilePath + "/out";
}
return path;
}
@Override
protected void updateStep() {
if (!mySteps.isEmpty()) {
getCurrentStepObject().updateStep();
}
super.updateStep();
myIcon.setIcon(null);
}
@Override
protected void dispose() {
StepSequence sequence = getSequence();
if (sequence != null) {
for (ModuleWizardStep step : sequence.getAllSteps()) {
step.disposeUIResources();
}
}
super.dispose();
}
@Override
protected final void doOKAction() {
final Ref<Boolean> result = Ref.create(false);
DumbService.allowStartingDumbModeInside(DumbModePermission.MAY_START_BACKGROUND, new Runnable() {
@Override
public void run() {
result.set(doFinishAction());
}
});
if (!result.get()) return;
super.doOKAction();
}
@VisibleForTesting
public boolean doFinishAction() {
if (myDelegate != null) {
myDelegate.doFinishAction();
return true;
}
int idx = getCurrentStep();
try {
do {
final ModuleWizardStep step = mySteps.get(idx);
if (step != getCurrentStepObject()) {
step.updateStep();
}
if (!commitStepData(step)) {
return false;
}
step.onStepLeaving();
try {
step._commit(true);
}
catch (CommitStepException e) {
handleCommitException(e);
return false;
}
if (!isLastStep(idx)) {
idx = getNextStep(idx);
}
else {
for (ModuleWizardStep wizardStep : mySteps) {
try {
wizardStep.onWizardFinished();
}
catch (CommitStepException e) {
handleCommitException(e);
return false;
}
}
break;
}
} while (true);
}
finally {
myCurrentStep = idx;
updateStep();
}
return true;
}
private void handleCommitException(CommitStepException e) {
String message = e.getMessage();
if (message != null) {
Messages.showErrorDialog(getCurrentStepComponent(), message);
}
}
protected boolean commitStepData(final ModuleWizardStep step) {
try {
if (!step.validate()) {
return false;
}
}
catch (ConfigurationException e) {
Messages.showErrorDialog(myContentPanel, e.getMessage(), e.getTitle());
return false;
}
step.updateDataModel();
return true;
}
@Override
public void doNextAction() {
if (myDelegate != null) {
myDelegate.doNextAction();
return;
}
final ModuleWizardStep step = getCurrentStepObject();
if (!commitStepData(step)) {
return;
}
step.onStepLeaving();
super.doNextAction();
}
@Override
protected String getHelpID() {
ModuleWizardStep step = getCurrentStepObject();
if (step != null) {
return step.getHelpId();
}
return null;
}
@TestOnly
public boolean isLast() {
return isLastStep();
}
@NonNls
public String getModuleFilePath() {
return myWizardContext.getProjectFileDirectory() + File.separator + myWizardContext.getProjectName() + ModuleFileType.DOT_DEFAULT_EXTENSION;
}
@Override
protected void doPreviousAction() {
if (myDelegate != null) {
myDelegate.doPreviousAction();
return;
}
final ModuleWizardStep step = getCurrentStepObject();
step.onStepLeaving();
if (step instanceof StepWithSubSteps) {
((StepWithSubSteps)step).doPreviousAction();
}
super.doPreviousAction();
}
@Override
public void doCancelAction() {
final ModuleWizardStep step = getCurrentStepObject();
step.onStepLeaving();
super.doCancelAction();
}
protected boolean isLastStep() {
return isLastStep(getCurrentStep());
}
private boolean isLastStep(int step) {
return getNextStep(step) == step && !isStepWithNotCompletedSubSteps(mySteps.get(step));
}
@Override
protected final int getNextStep(final int step) {
ModuleWizardStep nextStep = null;
final StepSequence stepSequence = getSequence();
if (stepSequence != null) {
ModuleWizardStep current = mySteps.get(step);
if (isStepWithNotCompletedSubSteps(current)) {
return step;
}
nextStep = stepSequence.getNextStep(current);
while (nextStep != null && !nextStep.isStepVisible()) {
nextStep = stepSequence.getNextStep(nextStep);
}
}
return nextStep == null ? step : mySteps.indexOf(nextStep);
}
@Override
protected final int getPreviousStep(final int step) {
ModuleWizardStep previousStep = null;
final StepSequence stepSequence = getSequence();
if (stepSequence != null) {
final ModuleWizardStep current = mySteps.get(step);
if (isNotFirstSubStepInStep(current)) {
return step;
}
previousStep = stepSequence.getPreviousStep(current);
while (previousStep != null && !previousStep.isStepVisible()) {
previousStep = stepSequence.getPreviousStep(previousStep);
}
}
return previousStep == null ? 0 : mySteps.indexOf(previousStep);
}
private static boolean isStepWithNotCompletedSubSteps(ModuleWizardStep current) {
return current instanceof StepWithSubSteps && !((StepWithSubSteps)current).isLast();
}
private static boolean isNotFirstSubStepInStep(ModuleWizardStep current) {
return current instanceof StepWithSubSteps && !((StepWithSubSteps)current).isFirst();
}
public void setDelegate(@Nullable WizardDelegate delegate) {
myDelegate = delegate;
}
}
| |
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'esoco-lib' project.
// Copyright 2019 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.lib.service;
import de.esoco.lib.app.Application;
import de.esoco.lib.app.CommandLine;
import de.esoco.lib.app.Service;
import de.esoco.lib.collection.CollectionUtil;
import de.esoco.lib.comm.Endpoint;
import de.esoco.lib.comm.EndpointFunction;
import de.esoco.lib.expression.monad.Option;
import de.esoco.lib.json.Json;
import de.esoco.lib.json.JsonObject;
import de.esoco.lib.logging.Log;
import de.esoco.lib.logging.LogExtent;
import de.esoco.lib.logging.LogLevel;
import de.esoco.lib.security.Security;
import de.esoco.lib.service.ModificationSyncEndpoint.SyncData;
import java.util.LinkedHashMap;
import java.util.Map;
import static de.esoco.lib.comm.CommunicationRelationTypes.ENDPOINT_ADDRESS;
import static de.esoco.lib.service.ModificationSyncEndpoint.getCurrentLocks;
import static de.esoco.lib.service.ModificationSyncEndpoint.releaseLock;
import static de.esoco.lib.service.ModificationSyncEndpoint.requestLock;
/********************************************************************
* Tests the functionality of {@link ModificationSyncEndpoint}.
*
* @author eso
*/
public class ModificationSyncServiceTool extends Application
{
//~ Enums ------------------------------------------------------------------
/********************************************************************
* Enumeration of available sync service commands.
*/
enum Command
{
LIST("Lists either all locks or only in a given context"),
LOCK("Locks a target in a context"),
UNLOCK("Removes a target lock in a context"),
RESET("Resets either all locks or only in the given context"),
LOGLEVEL("Queries or updates the log level of the target service");
//~ Instance fields ----------------------------------------------------
private final String sHelpText;
//~ Constructors -------------------------------------------------------
/***************************************
* Creates a new instance.
*
* @param sHelp The help text for the command
*/
private Command(String sHelp)
{
sHelpText = sHelp;
}
//~ Methods ------------------------------------------------------------
/***************************************
* Returns the command's help text.
*
* @return The help text
*/
public final String getHelpText()
{
return sHelpText;
}
}
//~ Instance fields --------------------------------------------------------
private Endpoint aSyncService;
private EndpointFunction<SyncData, String> fReleaseLock;
private EndpointFunction<SyncData, String> fRequestLock;
private Map<String, String> aCommandLineOptions = null;
//~ Static methods ---------------------------------------------------------
/***************************************
* Main method.
*
* @param rArgs The arguments
*/
public static void main(String[] rArgs)
{
try
{
new ModificationSyncServiceTool().run(rArgs);
}
catch (Exception e)
{
e.printStackTrace();
}
}
//~ Methods ----------------------------------------------------------------
/***************************************
* {@inheritDoc}
*/
@Override
protected String getAppDescription()
{
return "Sends a command to a ModificationSyncService running at an " +
"URL that must be set with -url. Use -h or --help for help.";
}
/***************************************
* {@inheritDoc}
*/
@Override
protected Map<String, String> getCommandLineOptions()
{
if (aCommandLineOptions == null)
{
aCommandLineOptions = new LinkedHashMap<>();
String sHelpInfo =
"Display this help or informations about a certain command";
aCommandLineOptions.put("h", sHelpInfo);
aCommandLineOptions.put("-help", sHelpInfo);
aCommandLineOptions.put(
"url",
"The URL of the sync service (mandatory)");
aCommandLineOptions.put(
"context",
"The context to which to apply a command");
aCommandLineOptions.put(
"target",
"The target to which to apply a command (in a certain context)");
for (Command eCommand : Command.values())
{
aCommandLineOptions.put(
eCommand.name().toLowerCase(),
eCommand.getHelpText());
}
}
return aCommandLineOptions;
}
/***************************************
* Handles all commands provided on the command line.
*
* @param rCommandLine The command line
* @param rContext The optional command context
* @param rTarget The optional command target
*/
protected void handleCommands(CommandLine rCommandLine,
String rContext,
String rTarget)
{
for (Command eCommand : Command.values())
{
String sCommand = eCommand.name().toLowerCase();
if (rCommandLine.hasOption(sCommand))
{
System.out.printf(
"Applying %s to %s\n",
eCommand,
aSyncService.get(ENDPOINT_ADDRESS));
switch (eCommand)
{
case LIST:
case RESET:
handleListAndReset(eCommand, rContext);
break;
case LOGLEVEL:
handleGetAndSetLogLevel(
rCommandLine.getString(sCommand));
break;
case LOCK:
case UNLOCK:
handleLockAndUnlock(eCommand, rContext, rTarget);
break;
default:
assert false : "Unhandled command " + eCommand;
}
}
}
}
/***************************************
* {@inheritDoc}
*/
@Override
protected void runApp() throws Exception
{
CommandLine rCommandLine = getCommandLine();
aSyncService = Endpoint.at(rCommandLine.requireString("url"));
fRequestLock = requestLock().from(aSyncService);
fReleaseLock = releaseLock().from(aSyncService);
// handle errors on application level
aSyncService.set(Log.LOG_EXTENT, LogExtent.NOTHING);
String aContext = rCommandLine.getString("context");
String aTarget = rCommandLine.getString("target");
handleCommands(rCommandLine, aContext, aTarget);
}
/***************************************
* Returns the client ID to be used for identification to the sync service.
*
* @return The client ID string
*/
private String getClientId()
{
return getClass().getSimpleName() +
Security.generateSha256Id().substring(0, 8);
}
/***************************************
* Queries the locks from the sync service and returns the result as parsed
* JSON data.
*
* @return A mapping from lock contexts to mappings from target IDs to lock
* holders
*/
private JsonObject getLocks()
{
return Json.parseObject(getCurrentLocks().from(aSyncService).receive());
}
/***************************************
* Handles the querying and setting of the sync service's log level.
*
* @param rNewLevel The optional new log level for setting
*/
private void handleGetAndSetLogLevel(String rNewLevel)
{
if (rNewLevel != null && LogLevel.valueOf(rNewLevel) != null)
{
Service.SET_LOG_LEVEL.on(aSyncService).send(Json.toJson(rNewLevel));
}
else
{
System.out.printf(
"Current log level: %s\n",
Service.GET_LOG_LEVEL.from(aSyncService).receive());
}
}
/***************************************
* Handles {@link Command#LIST} and {@link Command#RESET}.
*
* @param eCommand The command to handle
* @param rContext The context to apply the command to or none for all
*/
private void handleListAndReset(Command eCommand, String rContext)
{
JsonObject aLocks = getLocks();
if (aLocks.isEmpty())
{
System.out.printf("No lock contexts defined\n");
}
else if (rContext != null)
{
String sContext = rContext;
if (eCommand == Command.RESET)
{
unlockAll(sContext, aLocks.getObject(sContext));
}
else
{
printLocks(sContext, aLocks.getObject(sContext));
}
}
else
{
if (eCommand == Command.RESET)
{
for (String sContext : aLocks.getProperties().keySet())
{
unlockAll(sContext, aLocks.getObject(sContext));
}
}
else
{
for (String sContext : aLocks.getPropertyNames())
{
printLocks(sContext, aLocks.getObject(sContext));
}
}
}
}
/***************************************
* Handles {@link Command#LOCK} and {@link Command#UNLOCK}.
*
* @param eCommand The command to handle
* @param rContext The context to apply the command to
* @param rTarget The target to apply the command to
*/
private void handleLockAndUnlock(Command eCommand,
String rContext,
String rTarget)
{
if (rContext == null)
{
System.out.printf(
"Sync context must be provided (-context <context>)\n");
}
else if (rTarget == null)
{
System.out.printf(
"Sync target must be provided (-target <target>)\n");
}
else
{
SyncData aSyncData =
new SyncData(getClientId(), rContext, rTarget, true);
if (eCommand == Command.LOCK)
{
fRequestLock.send(aSyncData);
}
else
{
fReleaseLock.send(aSyncData);
}
}
}
/***************************************
* Prints out the locks of a certain context.
*
* @param sContext
* @param oContextLocks
*/
private void printLocks(String sContext, Option<JsonObject> oContextLocks)
{
oContextLocks.ifExists(
rLocks ->
{
if (rLocks.isEmpty())
{
System.out.printf("No locks in existing context %s\n", sContext);
}
else
{
System.out.printf(
"Locks for context %s:\n %s\n",
sContext,
CollectionUtil.toString(
rLocks.getProperties(),
": ",
"\n "));
}
});
}
/***************************************
* Unlocks all targets in a certain modification context.
*
* @param sContext The modification context
* @param oContextLocks The mapping from target IDs to lock holders
*/
private void unlockAll(String sContext, Option<JsonObject> oContextLocks)
{
String sClientId = getClientId();
oContextLocks.ifExists(
rLocks ->
{
for (String sTarget : rLocks.getProperties().keySet())
{
fReleaseLock.send(
new SyncData(sClientId, sContext, sTarget, true));
System.out.printf(
"Removed lock on %s from context %s (acquired by %s)\n",
sTarget,
sContext,
rLocks.getString(sTarget).orUse("unknown"));
}
});
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.ActionTestUtils;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.support.WriteResponse;
import org.elasticsearch.action.support.replication.ReplicationOperation.ReplicaResponse;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.test.ClusterServiceUtils;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.mockito.ArgumentCaptor;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Locale;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TransportWriteActionTests extends ESTestCase {
private static ThreadPool threadPool;
private ClusterService clusterService;
private IndexShard indexShard;
private Translog.Location location;
@BeforeClass
public static void beforeClass() {
threadPool = new TestThreadPool("ShardReplicationTests");
}
@Before
public void initCommonMocks() {
indexShard = mock(IndexShard.class);
location = mock(Translog.Location.class);
clusterService = createClusterService(threadPool);
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
<T> void assertListenerThrows(String msg, PlainActionFuture<T> listener, Class<?> klass) throws InterruptedException {
try {
listener.get();
fail(msg);
} catch (ExecutionException ex) {
assertThat(ex.getCause(), instanceOf(klass));
}
}
public void testPrimaryNoRefreshCall() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.NONE); // The default, but we'll set it anyway just to be explicit
TestAction testAction = new TestAction();
testAction.shardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(result -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
result.respond(listener);
assertNotNull(listener.response);
assertNull(listener.failure);
verify(indexShard, never()).refresh(any());
verify(indexShard, never()).addRefreshListener(any(), any());
}));
}
public void testReplicaNoRefreshCall() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.NONE); // The default, but we'll set it anyway just to be explicit
TestAction testAction = new TestAction();
TransportWriteAction.WriteReplicaResult<TestRequest> result =
testAction.shardOperationOnReplica(request, indexShard);
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
result.respond(listener);
assertNotNull(listener.response);
assertNull(listener.failure);
verify(indexShard, never()).refresh(any());
verify(indexShard, never()).addRefreshListener(any(), any());
}
public void testPrimaryImmediateRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
TestAction testAction = new TestAction();
testAction.shardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(result -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
result.respond(listener);
assertNotNull(listener.response);
assertNull(listener.failure);
assertTrue(listener.response.forcedRefresh);
verify(indexShard).refresh("refresh_flag_index");
verify(indexShard, never()).addRefreshListener(any(), any());
}));
}
public void testReplicaImmediateRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
TestAction testAction = new TestAction();
TransportWriteAction.WriteReplicaResult<TestRequest> result =
testAction.shardOperationOnReplica(request, indexShard);
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
result.respond(listener);
assertNotNull(listener.response);
assertNull(listener.failure);
verify(indexShard).refresh("refresh_flag_index");
verify(indexShard, never()).addRefreshListener(any(), any());
}
public void testPrimaryWaitForRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
TestAction testAction = new TestAction();
testAction.shardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(result -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
result.respond(listener);
assertNull(listener.response); // Haven't really responded yet
@SuppressWarnings({"unchecked", "rawtypes"})
ArgumentCaptor<Consumer<Boolean>> refreshListener = ArgumentCaptor.forClass((Class) Consumer.class);
verify(indexShard, never()).refresh(any());
verify(indexShard).addRefreshListener(any(), refreshListener.capture());
// Now we can fire the listener manually and we'll get a response
boolean forcedRefresh = randomBoolean();
refreshListener.getValue().accept(forcedRefresh);
assertNotNull(listener.response);
assertNull(listener.failure);
assertEquals(forcedRefresh, listener.response.forcedRefresh);
}));
}
public void testReplicaWaitForRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
TestAction testAction = new TestAction();
TransportWriteAction.WriteReplicaResult<TestRequest> result = testAction.shardOperationOnReplica(request, indexShard);
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
result.respond(listener);
assertNull(listener.response); // Haven't responded yet
@SuppressWarnings({ "unchecked", "rawtypes" })
ArgumentCaptor<Consumer<Boolean>> refreshListener = ArgumentCaptor.forClass((Class) Consumer.class);
verify(indexShard, never()).refresh(any());
verify(indexShard).addRefreshListener(any(), refreshListener.capture());
// Now we can fire the listener manually and we'll get a response
boolean forcedRefresh = randomBoolean();
refreshListener.getValue().accept(forcedRefresh);
assertNotNull(listener.response);
assertNull(listener.failure);
}
public void testDocumentFailureInShardOperationOnPrimary() throws Exception {
TestRequest request = new TestRequest();
TestAction testAction = new TestAction(true, true);
testAction.shardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(writePrimaryResult -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
writePrimaryResult.respond(listener);
assertNull(listener.response);
assertNotNull(listener.failure);
}));
}
public void testDocumentFailureInShardOperationOnReplica() throws Exception {
TestRequest request = new TestRequest();
TestAction testAction = new TestAction(randomBoolean(), true);
TransportWriteAction.WriteReplicaResult<TestRequest> writeReplicaResult =
testAction.shardOperationOnReplica(request, indexShard);
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
writeReplicaResult.respond(listener);
assertNull(listener.response);
assertNotNull(listener.failure);
}
public void testReplicaProxy() throws InterruptedException, ExecutionException {
CapturingTransport transport = new CapturingTransport();
TransportService transportService = transport.createTransportService(clusterService.getSettings(), threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet());
transportService.start();
transportService.acceptIncomingRequests();
ShardStateAction shardStateAction = new ShardStateAction(clusterService, transportService, null, null, threadPool);
TestAction action = new TestAction(Settings.EMPTY, "internal:testAction", transportService,
clusterService, shardStateAction, threadPool);
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = ClusterStateCreationUtils.stateWithActivePrimary(index, true, 1 + randomInt(3), randomInt(2));
logger.info("using state: {}", state);
ClusterServiceUtils.setState(clusterService, state);
final long primaryTerm = state.metaData().index(index).primaryTerm(0);
ReplicationOperation.Replicas<TestRequest> proxy = action.newReplicasProxy();
// check that at unknown node fails
PlainActionFuture<ReplicaResponse> listener = new PlainActionFuture<>();
ShardRoutingState routingState = randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.STARTED,
ShardRoutingState.RELOCATING);
proxy.performOn(
TestShardRouting.newShardRouting(shardId, "NOT THERE",
routingState == ShardRoutingState.RELOCATING ? state.nodes().iterator().next().getId() : null, false, routingState),
new TestRequest(),
primaryTerm, randomNonNegativeLong(), randomNonNegativeLong(), listener);
assertTrue(listener.isDone());
assertListenerThrows("non existent node should throw a NoNodeAvailableException", listener, NoNodeAvailableException.class);
final IndexShardRoutingTable shardRoutings = state.routingTable().shardRoutingTable(shardId);
final ShardRouting replica = randomFrom(shardRoutings.replicaShards().stream()
.filter(ShardRouting::assignedToNode).collect(Collectors.toList()));
listener = new PlainActionFuture<>();
proxy.performOn(replica, new TestRequest(), primaryTerm, randomNonNegativeLong(), randomNonNegativeLong(), listener);
assertFalse(listener.isDone());
CapturingTransport.CapturedRequest[] captures = transport.getCapturedRequestsAndClear();
assertThat(captures, arrayWithSize(1));
if (randomBoolean()) {
final TransportReplicationAction.ReplicaResponse response =
new TransportReplicationAction.ReplicaResponse(randomLong(), randomLong());
transport.handleResponse(captures[0].requestId, response);
assertTrue(listener.isDone());
assertThat(listener.get(), equalTo(response));
} else if (randomBoolean()) {
transport.handleRemoteError(captures[0].requestId, new ElasticsearchException("simulated"));
assertTrue(listener.isDone());
assertListenerThrows("listener should reflect remote error", listener, ElasticsearchException.class);
} else {
transport.handleError(captures[0].requestId, new TransportException("simulated"));
assertTrue(listener.isDone());
assertListenerThrows("listener should reflect remote error", listener, TransportException.class);
}
AtomicReference<Object> failure = new AtomicReference<>();
AtomicBoolean success = new AtomicBoolean();
proxy.failShardIfNeeded(replica, primaryTerm, "test", new ElasticsearchException("simulated"),
ActionListener.wrap(r -> success.set(true), failure::set));
CapturingTransport.CapturedRequest[] shardFailedRequests = transport.getCapturedRequestsAndClear();
// A write replication action proxy should fail the shard
assertEquals(1, shardFailedRequests.length);
CapturingTransport.CapturedRequest shardFailedRequest = shardFailedRequests[0];
ShardStateAction.FailedShardEntry shardEntry = (ShardStateAction.FailedShardEntry) shardFailedRequest.request;
// the shard the request was sent to and the shard to be failed should be the same
assertEquals(shardEntry.getShardId(), replica.shardId());
assertEquals(shardEntry.getAllocationId(), replica.allocationId().getId());
if (randomBoolean()) {
// simulate success
transport.handleResponse(shardFailedRequest.requestId, TransportResponse.Empty.INSTANCE);
assertTrue(success.get());
assertNull(failure.get());
} else if (randomBoolean()) {
// simulate the primary has been demoted
transport.handleRemoteError(shardFailedRequest.requestId,
new ShardStateAction.NoLongerPrimaryShardException(replica.shardId(),
"shard-failed-test"));
assertFalse(success.get());
assertNotNull(failure.get());
} else {
// simulated a node closing exception
transport.handleRemoteError(shardFailedRequest.requestId,
new NodeClosedException(state.nodes().getLocalNode()));
assertFalse(success.get());
assertNotNull(failure.get());
}
}
public void testConcurrentWriteReplicaResultCompletion() throws InterruptedException {
IndexShard replica = mock(IndexShard.class);
when(replica.getTranslogDurability()).thenReturn(Translog.Durability.ASYNC);
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
TransportWriteAction.WriteReplicaResult<TestRequest> replicaResult = new TransportWriteAction.WriteReplicaResult<>(
request, new Translog.Location(0, 0, 0), null, replica, logger);
CyclicBarrier barrier = new CyclicBarrier(2);
Runnable waitForBarrier = () -> {
try {
barrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
throw new AssertionError(e);
}
};
CountDownLatch completionLatch = new CountDownLatch(1);
threadPool.generic().execute(() -> {
waitForBarrier.run();
replicaResult.respond(ActionListener.wrap(completionLatch::countDown));
});
if (randomBoolean()) {
threadPool.generic().execute(() -> {
waitForBarrier.run();
replicaResult.onFailure(null);
});
} else {
threadPool.generic().execute(() -> {
waitForBarrier.run();
replicaResult.onSuccess(false);
});
}
assertTrue(completionLatch.await(30, TimeUnit.SECONDS));
}
private class TestAction extends TransportWriteAction<TestRequest, TestRequest, TestResponse> {
private final boolean withDocumentFailureOnPrimary;
private final boolean withDocumentFailureOnReplica;
protected TestAction() {
this(false, false);
}
protected TestAction(boolean withDocumentFailureOnPrimary, boolean withDocumentFailureOnReplica) {
super(Settings.EMPTY, "internal:test",
new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()), null, null, null, null,
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(), TestRequest::new,
TestRequest::new, ThreadPool.Names.SAME, false);
this.withDocumentFailureOnPrimary = withDocumentFailureOnPrimary;
this.withDocumentFailureOnReplica = withDocumentFailureOnReplica;
}
protected TestAction(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService, ShardStateAction shardStateAction, ThreadPool threadPool) {
super(settings, actionName, transportService, clusterService,
mockIndicesService(clusterService), threadPool, shardStateAction,
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(),
TestRequest::new, TestRequest::new, ThreadPool.Names.SAME, false);
this.withDocumentFailureOnPrimary = false;
this.withDocumentFailureOnReplica = false;
}
@Override
protected TestResponse newResponseInstance(StreamInput in) throws IOException {
return new TestResponse();
}
@Override
protected void shardOperationOnPrimary(
TestRequest request, IndexShard primary, ActionListener<PrimaryResult<TestRequest, TestResponse>> listener) {
ActionListener.completeWith(listener, () -> {
if (withDocumentFailureOnPrimary) {
return new WritePrimaryResult<>(request, null, null, new RuntimeException("simulated"), primary, logger);
} else {
return new WritePrimaryResult<>(request, new TestResponse(), location, null, primary, logger);
}
});
}
@Override
protected WriteReplicaResult<TestRequest> shardOperationOnReplica(TestRequest request, IndexShard replica) throws Exception {
final WriteReplicaResult<TestRequest> replicaResult;
if (withDocumentFailureOnReplica) {
replicaResult = new WriteReplicaResult<>(request, null, new RuntimeException("simulated"), replica, logger);
} else {
replicaResult = new WriteReplicaResult<>(request, location, null, replica, logger);
}
return replicaResult;
}
}
final IndexService mockIndexService(final IndexMetaData indexMetaData, ClusterService clusterService) {
final IndexService indexService = mock(IndexService.class);
when(indexService.getShard(anyInt())).then(invocation -> {
int shard = (Integer) invocation.getArguments()[0];
final ShardId shardId = new ShardId(indexMetaData.getIndex(), shard);
if (shard > indexMetaData.getNumberOfShards()) {
throw new ShardNotFoundException(shardId);
}
return mockIndexShard(shardId, clusterService);
});
return indexService;
}
final IndicesService mockIndicesService(ClusterService clusterService) {
final IndicesService indicesService = mock(IndicesService.class);
when(indicesService.indexServiceSafe(any(Index.class))).then(invocation -> {
Index index = (Index)invocation.getArguments()[0];
final ClusterState state = clusterService.state();
final IndexMetaData indexSafe = state.metaData().getIndexSafe(index);
return mockIndexService(indexSafe, clusterService);
});
when(indicesService.indexService(any(Index.class))).then(invocation -> {
Index index = (Index) invocation.getArguments()[0];
final ClusterState state = clusterService.state();
if (state.metaData().hasIndex(index.getName())) {
return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService);
} else {
return null;
}
});
return indicesService;
}
private final AtomicInteger count = new AtomicInteger(0);
private final AtomicBoolean isRelocated = new AtomicBoolean(false);
private IndexShard mockIndexShard(ShardId shardId, ClusterService clusterService) {
final IndexShard indexShard = mock(IndexShard.class);
doAnswer(invocation -> {
ActionListener<Releasable> callback = (ActionListener<Releasable>) invocation.getArguments()[0];
count.incrementAndGet();
callback.onResponse(count::decrementAndGet);
return null;
}).when(indexShard).acquirePrimaryOperationPermit(any(ActionListener.class), anyString(), anyObject());
doAnswer(invocation -> {
long term = (Long)invocation.getArguments()[0];
ActionListener<Releasable> callback = (ActionListener<Releasable>) invocation.getArguments()[1];
final long primaryTerm = indexShard.getPendingPrimaryTerm();
if (term < primaryTerm) {
throw new IllegalArgumentException(String.format(Locale.ROOT, "%s operation term [%d] is too old (current [%d])",
shardId, term, primaryTerm));
}
count.incrementAndGet();
callback.onResponse(count::decrementAndGet);
return null;
}).when(indexShard)
.acquireReplicaOperationPermit(anyLong(), anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject());
when(indexShard.routingEntry()).thenAnswer(invocationOnMock -> {
final ClusterState state = clusterService.state();
final RoutingNode node = state.getRoutingNodes().node(state.nodes().getLocalNodeId());
final ShardRouting routing = node.getByShardId(shardId);
if (routing == null) {
throw new ShardNotFoundException(shardId, "shard is no longer assigned to current node");
}
return routing;
});
when(indexShard.isRelocatedPrimary()).thenAnswer(invocationOnMock -> isRelocated.get());
doThrow(new AssertionError("failed shard is not supported")).when(indexShard).failShard(anyString(), any(Exception.class));
when(indexShard.getPendingPrimaryTerm()).thenAnswer(i ->
clusterService.state().metaData().getIndexSafe(shardId.getIndex()).primaryTerm(shardId.id()));
return indexShard;
}
private static class TestRequest extends ReplicatedWriteRequest<TestRequest> {
TestRequest(StreamInput in) throws IOException {
super(in);
}
TestRequest() {
super(new ShardId("test", "test", 1));
}
@Override
public String toString() {
return "TestRequest{}";
}
}
private static class TestResponse extends ReplicationResponse implements WriteResponse {
boolean forcedRefresh;
@Override
public void setForcedRefresh(boolean forcedRefresh) {
this.forcedRefresh = forcedRefresh;
}
}
private static class CapturingActionListener<R> implements ActionListener<R> {
private R response;
private Exception failure;
@Override
public void onResponse(R response) {
this.response = response;
}
@Override
public void onFailure(Exception failure) {
this.failure = failure;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.spi.security.authentication.external.impl.principal;
import java.security.Principal;
import java.security.acl.Group;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Service;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.commons.PropertiesUtil;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.spi.commit.MoveTracker;
import org.apache.jackrabbit.oak.spi.commit.ValidatorProvider;
import org.apache.jackrabbit.oak.spi.lifecycle.RepositoryInitializer;
import org.apache.jackrabbit.oak.spi.security.ConfigurationBase;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.SecurityConfiguration;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncHandler;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.DefaultSyncConfigImpl;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.ExternalLoginModuleFactory;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.SyncHandlerMapping;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfiguration;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalManagerImpl;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider;
import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration;
import org.apache.jackrabbit.oak.spi.xml.ProtectedItemImporter;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.osgi.util.tracker.ServiceTracker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of the {@code PrincipalConfiguration} interface that provides
* principal management for {@link Group principals} associated with
* {@link org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentity external identities}
* managed outside of the scope of the repository by an
* {@link org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProvider}.
*
* @since Oak 1.5.3
* @see <a href="https://issues.apache.org/jira/browse/OAK-4101">OAK-4101</a>
*/
@Component(
metatype = true,
label = "Apache Jackrabbit Oak External PrincipalConfiguration",
immediate = true
)
@Service({PrincipalConfiguration.class, SecurityConfiguration.class})
public class ExternalPrincipalConfiguration extends ConfigurationBase implements PrincipalConfiguration {
private static final Logger log = LoggerFactory.getLogger(ExternalPrincipalConfiguration.class);
private SyncConfigTracker syncConfigTracker;
private SyncHandlerMappingTracker syncHandlerMappingTracker;
@SuppressWarnings("UnusedDeclaration")
public ExternalPrincipalConfiguration() {
super();
}
public ExternalPrincipalConfiguration(SecurityProvider securityProvider) {
super(securityProvider, securityProvider.getParameters(NAME));
}
//---------------------------------------------< PrincipalConfiguration >---
@Nonnull
@Override
public PrincipalManager getPrincipalManager(Root root, NamePathMapper namePathMapper) {
return new PrincipalManagerImpl(getPrincipalProvider(root, namePathMapper));
}
@Nonnull
@Override
public PrincipalProvider getPrincipalProvider(Root root, NamePathMapper namePathMapper) {
if (dynamicMembershipEnabled()) {
UserConfiguration uc = getSecurityProvider().getConfiguration(UserConfiguration.class);
return new ExternalGroupPrincipalProvider(root, uc, namePathMapper, syncConfigTracker.getAutoMembership());
} else {
return EmptyPrincipalProvider.INSTANCE;
}
}
//----------------------------------------------< SecurityConfiguration >---
@Nonnull
@Override
public String getName() {
return NAME;
}
@Nonnull
@Override
public RepositoryInitializer getRepositoryInitializer() {
return new ExternalIdentityRepositoryInitializer();
}
@Nonnull
@Override
public List<? extends ValidatorProvider> getValidators(@Nonnull String workspaceName, @Nonnull Set<Principal> principals, @Nonnull MoveTracker moveTracker) {
return ImmutableList.of(new ExternalIdentityValidatorProvider(principals));
}
@Nonnull
@Override
public List<ProtectedItemImporter> getProtectedItemImporters() {
return ImmutableList.<ProtectedItemImporter>of(new ExternalIdentityImporter());
}
//----------------------------------------------------< SCR integration >---
@SuppressWarnings("UnusedDeclaration")
@Activate
private void activate(BundleContext bundleContext, Map<String, Object> properties) {
setParameters(ConfigurationParameters.of(properties));
syncHandlerMappingTracker = new SyncHandlerMappingTracker(bundleContext);
syncHandlerMappingTracker.open();
syncConfigTracker = new SyncConfigTracker(bundleContext, syncHandlerMappingTracker);
syncConfigTracker.open();
}
@SuppressWarnings("UnusedDeclaration")
@Deactivate
private void deactivate() {
if (syncConfigTracker != null) {
syncConfigTracker.close();
}
if (syncHandlerMappingTracker != null) {
syncHandlerMappingTracker.close();
}
}
//------------------------------------------------------------< private >---
private boolean dynamicMembershipEnabled() {
return syncConfigTracker != null && syncConfigTracker.isEnabled;
}
/**
* Implementation of the {@code PrincipalProvider} interface that never
* returns any principals.
*/
private static final class EmptyPrincipalProvider implements PrincipalProvider {
private static final PrincipalProvider INSTANCE = new EmptyPrincipalProvider();
private EmptyPrincipalProvider() {}
@Override
public Principal getPrincipal(@Nonnull String principalName) {
return null;
}
@Nonnull
@Override
public Set<Group> getGroupMembership(@Nonnull Principal principal) {
return ImmutableSet.of();
}
@Nonnull
@Override
public Set<? extends Principal> getPrincipals(@Nonnull String userID) {
return ImmutableSet.of();
}
@Nonnull
@Override
public Iterator<? extends Principal> findPrincipals(@Nullable String nameHint, int searchType) {
return Iterators.emptyIterator();
}
@Nonnull
@Override
public Iterator<? extends Principal> findPrincipals(int searchType) {
return Iterators.emptyIterator();
}
}
/**
* {@code ServiceTracker} to detect any {@link SyncHandler} that has
* dynamic membership enabled.
*/
private static final class SyncConfigTracker extends ServiceTracker {
private final SyncHandlerMappingTracker mappingTracker;
private Set<ServiceReference> enablingRefs = new HashSet<ServiceReference>();
private boolean isEnabled = false;
public SyncConfigTracker(@Nonnull BundleContext context, @Nonnull SyncHandlerMappingTracker mappingTracker) {
super(context, SyncHandler.class.getName(), null);
this.mappingTracker = mappingTracker;
}
@Override
public Object addingService(ServiceReference reference) {
if (hasDynamicMembership(reference)) {
enablingRefs.add(reference);
isEnabled = true;
}
return super.addingService(reference);
}
@Override
public void modifiedService(ServiceReference reference, Object service) {
if (hasDynamicMembership(reference)) {
enablingRefs.add(reference);
isEnabled = true;
} else {
enablingRefs.remove(reference);
isEnabled = !enablingRefs.isEmpty();
}
super.modifiedService(reference, service);
}
@Override
public void removedService(ServiceReference reference, Object service) {
enablingRefs.remove(reference);
isEnabled = !enablingRefs.isEmpty();
super.removedService(reference, service);
}
private static boolean hasDynamicMembership(ServiceReference reference) {
return PropertiesUtil.toBoolean(reference.getProperty(DefaultSyncConfigImpl.PARAM_USER_DYNAMIC_MEMBERSHIP), DefaultSyncConfigImpl.PARAM_USER_DYNAMIC_MEMBERSHIP_DEFAULT);
}
private Map<String, String[]> getAutoMembership() {
Map<String, String[]> autoMembership = new HashMap<String, String[]>();
for (ServiceReference ref : enablingRefs) {
String syncHandlerName = PropertiesUtil.toString(ref.getProperty(DefaultSyncConfigImpl.PARAM_NAME), DefaultSyncConfigImpl.PARAM_NAME_DEFAULT);
String[] membership = PropertiesUtil.toStringArray(ref.getProperty(DefaultSyncConfigImpl.PARAM_GROUP_AUTO_MEMBERSHIP), new String[0]);
for (String idpName : mappingTracker.getIdpNames(syncHandlerName)) {
String[] previous = autoMembership.put(idpName, membership);
if (previous != null) {
String msg = (Arrays.equals(previous, membership)) ? "Duplicate" : "Colliding";
log.debug(msg + " auto-membership configuration for IDP '{}'; replacing previous values {} by {} defined by SyncHandler '{}'",
idpName, Arrays.toString(previous), Arrays.toString(membership), syncHandlerName);
}
}
}
return autoMembership;
}
}
/**
* {@code ServiceTracker} to detect any {@link SyncHandler} that has
* dynamic membership enabled.
*/
private static final class SyncHandlerMappingTracker extends ServiceTracker {
private Map<ServiceReference, String[]> referenceMap = new HashMap<ServiceReference, String[]>();
public SyncHandlerMappingTracker(@Nonnull BundleContext context) {
super(context, SyncHandlerMapping.class.getName(), null);
}
@Override
public Object addingService(ServiceReference reference) {
addMapping(reference);
return super.addingService(reference);
}
@Override
public void modifiedService(ServiceReference reference, Object service) {
addMapping(reference);
super.modifiedService(reference, service);
}
@Override
public void removedService(ServiceReference reference, Object service) {
referenceMap.remove(reference);
super.removedService(reference, service);
}
private void addMapping(ServiceReference reference) {
String idpName = PropertiesUtil.toString(reference.getProperty(ExternalLoginModuleFactory.PARAM_IDP_NAME), null);
String syncHandlerName = PropertiesUtil.toString(reference.getProperty(ExternalLoginModuleFactory.PARAM_SYNC_HANDLER_NAME), null);
if (idpName != null && syncHandlerName != null) {
referenceMap.put(reference, new String[]{syncHandlerName, idpName});
} else {
log.warn("Ignoring SyncHandlerMapping with incomplete mapping of IDP '{}' and SyncHandler '{}'", idpName, syncHandlerName);
}
}
private Iterable<String> getIdpNames(@Nonnull final String syncHandlerName) {
return Iterables.filter(Iterables.transform(referenceMap.values(), new Function<String[], String>() {
@Nullable
@Override
public String apply(@Nullable String[] input) {
if (input != null && input.length == 2) {
if (syncHandlerName.equals(input[0])) {
return input[1];
} // else: different sync-handler
} else {
log.warn("Unexpected value of reference map. Expected String[] with length = 2");
}
return null;
}
}
), Predicates.notNull());
}
}
}
| |
/*
* Copyright 2009-2010 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.provider;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Dialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.List;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.configure.ui.XPathSelectorDialog;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.validator.XpathValidator;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.Activator;
/**
* A SWT based editor dialog to be used for editing namespaced properties.
*/
public class NamespacedPropertyEditorDialog extends Dialog {
/**
* Dialog shell.
*/
private Shell dialogShell;
/**
* Group box for separating property value edit area.
*/
private Group propertyGroupBox;
/**
* Text field used for manipulating property value.
*/
private Text propertyTextField;
/**
* Button used to fire up the xpath editor.
*/
private Button selectXpathButton;
/**
* Group box for separating namespaces edit area.
*/
private Group namespacesGroupBox;
/**
* Namespace 'Prefix' label.
*/
private Label nsPrefixLabel;
/**
* Namespace prefix text field.
*/
private Text nsPrefixTextField;
/**
* Namespace 'URI' label.
*/
private Label nsUriLabel;
/**
* Namespace URI text field.
*/
private Text nsUriTextField;
/**
* Namespace list box.
*/
private List nsListBox;
/**
* Add namespace button.
*/
private Button addButton;
/**
* Edit namespace button.
*/
private Button editButton;
/**
* Remove namespace button.
*/
private Button removeButton;
/**
* Cancel button.
*/
private Button cancelButton;
/**
* Ok button.
*/
private Button okButton;
/**
* {@link NamespacedProperty} being edited.
*/
private NamespacedProperty nsProperty;
/**
* Namespaces collected.
*/
private Map<String, String> collectedNamespaces;
/**
* Format string used for displaying namespaces.
*/
private static final String namespaceDisplayFormat = "xmlns:%s=\"%s\"";
/**
* Regex pattern used to identify a namespace string.
*/
private static final Pattern namespaceDisplayPattern = Pattern.compile("xmlns:([^=]++)=.*+");
/**
* Status indicating whether this dialog was saved or cancelled.
*/
private boolean saved;
private static IDeveloperStudioLog log = Logger.getLog(Activator.PLUGIN_ID);
/**
* Constructs a new dialog.
*
* @param parent parent shell.
* @param style style.
* @param property namespaced property to be manipulated.
*/
public NamespacedPropertyEditorDialog(Shell parent, int style, NamespacedProperty property) {
super(parent, style);
this.nsProperty = property;
this.collectedNamespaces = new HashMap<String, String>();
}
/**
* Main function used for testing purposes.
*
* @param args arguments.
*/
public static void main(String[] args) {
Display display = Display.getDefault();
Shell shell = new Shell(display);
NamespacedPropertyEditorDialog dialog = new NamespacedPropertyEditorDialog(shell, SWT.NULL,
EsbFactory.eINSTANCE.createNamespacedProperty());
dialog.open();
}
/**
* Creates ui components and opens the dialog.
*/
public void open() {
Shell parentShell = getParent();
dialogShell = new Shell(parentShell, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL);
// Configure dialog shell internal layout.
FormLayout dialogShellLayout = new FormLayout();
dialogShellLayout.marginHeight = 5;
dialogShellLayout.marginWidth = 5;
dialogShell.setLayout(dialogShellLayout);
// Construct and layout property edit box.
propertyGroupBox = new Group(dialogShell, SWT.NONE);
{
propertyGroupBox.setText("Property");
FormData groupBoxLayoutData = new FormData();
groupBoxLayoutData.top = new FormAttachment(0);
groupBoxLayoutData.left = new FormAttachment(0);
groupBoxLayoutData.right = new FormAttachment(100);
propertyGroupBox.setLayoutData(groupBoxLayoutData);
// Configure group box internal layout.
FormLayout groupBoxLayout = new FormLayout();
groupBoxLayout.marginWidth = 5;
groupBoxLayout.marginHeight = 5;
propertyGroupBox.setLayout(groupBoxLayout);
// Xpath editor launch button.
selectXpathButton = new Button(propertyGroupBox, SWT.NONE);
selectXpathButton.setText("Select XPath");
FormData selectXpathButtonLayoutData = new FormData();
selectXpathButtonLayoutData.right = new FormAttachment(100);
selectXpathButtonLayoutData.top = new FormAttachment(0);
selectXpathButton.setLayoutData(selectXpathButtonLayoutData);
// Property editor text field.
propertyTextField = new Text(propertyGroupBox, SWT.BORDER);
FormData textFieldLayoutData = new FormData();
textFieldLayoutData.right = new FormAttachment(selectXpathButton, -5);
textFieldLayoutData.top = new FormAttachment(selectXpathButton, 0, SWT.CENTER);
textFieldLayoutData.left = new FormAttachment(0);
propertyTextField.setLayoutData(textFieldLayoutData);
}
// OK button.
okButton = new Button(dialogShell, SWT.NONE);
{
okButton.setText("OK");
FormData okButtonLayoutData = new FormData();
okButtonLayoutData.right = new FormAttachment(100);
okButtonLayoutData.bottom = new FormAttachment(100);
okButtonLayoutData.width = 80;
okButton.setLayoutData(okButtonLayoutData);
}
// Cancel button.
cancelButton = new Button(dialogShell, SWT.NONE);
{
cancelButton.setText("Cancel");
FormData cancelButtonLayoutData = new FormData();
cancelButtonLayoutData.top = new FormAttachment(okButton, 0, SWT.CENTER);
cancelButtonLayoutData.right = new FormAttachment(okButton, -5);
cancelButtonLayoutData.width = 80;
cancelButton.setLayoutData(cancelButtonLayoutData);
}
// Construct and layout namespace edit box.
namespacesGroupBox = new Group(dialogShell, SWT.NONE);
{
namespacesGroupBox.setText("Namespaces");
FormData groupBoxLayoutData = new FormData();
groupBoxLayoutData.top = new FormAttachment(propertyGroupBox, 5);
groupBoxLayoutData.left = new FormAttachment(0);
groupBoxLayoutData.right = new FormAttachment(100);
groupBoxLayoutData.bottom = new FormAttachment(okButton, -10);
namespacesGroupBox.setLayoutData(groupBoxLayoutData);
// Configure group box internal layout.
FormLayout groupBoxLayout = new FormLayout();
groupBoxLayout.marginWidth = 5;
groupBoxLayout.marginHeight = 5;
namespacesGroupBox.setLayout(groupBoxLayout);
// Namespace prefix label.
nsPrefixLabel = new Label(namespacesGroupBox, SWT.NONE);
{
nsPrefixLabel.setText("Prefix:");
FormData nsPrefixLabelLayoutData = new FormData();
nsPrefixLabelLayoutData.top = new FormAttachment(0);
nsPrefixLabelLayoutData.left = new FormAttachment(0);
nsPrefixLabel.setLayoutData(nsPrefixLabelLayoutData);
}
// Namespace prefix text field.
nsPrefixTextField = new Text(namespacesGroupBox, SWT.BORDER);
{
FormData nsPrefixTextFieldLayoutData = new FormData();
nsPrefixTextFieldLayoutData.top = new FormAttachment(nsPrefixLabel, 0, SWT.CENTER);
nsPrefixTextFieldLayoutData.left = new FormAttachment(nsPrefixLabel, 5);
nsPrefixTextFieldLayoutData.width = 100;
nsPrefixTextField.setLayoutData(nsPrefixTextFieldLayoutData);
}
// Namespace URI label.
nsUriLabel = new Label(namespacesGroupBox, SWT.NONE);
{
nsUriLabel.setText("URI:");
FormData nsUriLabelLayoutData = new FormData();
nsUriLabelLayoutData.top = new FormAttachment(nsPrefixTextField, 0, SWT.CENTER);
nsUriLabelLayoutData.left = new FormAttachment(nsPrefixTextField, 5);
nsUriLabel.setLayoutData(nsUriLabelLayoutData);
}
// Add namespace button.
addButton = new Button(namespacesGroupBox, SWT.NONE);
{
addButton.setText("Add");
FormData addButtonLayoutData = new FormData();
addButtonLayoutData.top = new FormAttachment(nsUriLabel, 0, SWT.CENTER);
addButtonLayoutData.right = new FormAttachment(100);
addButtonLayoutData.width = 80;
addButton.setLayoutData(addButtonLayoutData);
}
// Namespace URI input text field.
nsUriTextField = new Text(namespacesGroupBox, SWT.BORDER);
{
FormData nsUriTextFieldLayoutData = new FormData();
nsUriTextFieldLayoutData.top = new FormAttachment(nsUriLabel, 0, SWT.CENTER);
nsUriTextFieldLayoutData.left = new FormAttachment(nsUriLabel, 5);
nsUriTextFieldLayoutData.right = new FormAttachment(addButton, -5);
nsUriTextField.setLayoutData(nsUriTextFieldLayoutData);
}
// Edit namespace button.
editButton = new Button(namespacesGroupBox, SWT.NONE);
{
editButton.setText("Edit");
FormData editButtonLayoutData = new FormData();
editButtonLayoutData.top = new FormAttachment(addButton, 10);
editButtonLayoutData.right = new FormAttachment(100);
editButtonLayoutData.left = new FormAttachment(addButton, 0, SWT.LEFT);
editButton.setLayoutData(editButtonLayoutData);
}
// Remove namespace button.
removeButton = new Button(namespacesGroupBox, SWT.NONE);
{
removeButton.setText("Remove");
FormData removeButtonLayoutData = new FormData();
removeButtonLayoutData.top = new FormAttachment(editButton, 5);
removeButtonLayoutData.right = new FormAttachment(100);
removeButtonLayoutData.left = new FormAttachment(editButton, 0, SWT.LEFT);
removeButton.setLayoutData(removeButtonLayoutData);
}
// Namespaces list box.
nsListBox = new List(namespacesGroupBox, SWT.BORDER);
{
FormData nsListBoxFormData = new FormData();
nsListBoxFormData.top = new FormAttachment(editButton, 0, SWT.TOP);
nsListBoxFormData.left = new FormAttachment(0);
nsListBoxFormData.right = new FormAttachment(addButton, -5);
nsListBoxFormData.bottom = new FormAttachment(100);
nsListBox.setLayoutData(nsListBoxFormData);
}
}
loadConfiguration();
initActions();
setTabOrder();
// Open dialog.
dialogShell.layout();
dialogShell.pack();
dialogShell.setSize(640, 415);
centerDialog();
dialogShell.open();
Display display = dialogShell.getDisplay();
while (!dialogShell.isDisposed()) {
if (!display.readAndDispatch())
display.sleep();
}
}
private void loadConfiguration() {
dialogShell.setText(String.format("Namespaced Property Editor", nsProperty.getPrettyName()));
propertyGroupBox.setText(nsProperty.getPrettyName());
if (!StringUtils.isBlank(nsProperty.getPropertyValue())) {
propertyTextField.setText(nsProperty.getPropertyValue());
}
// Load namespaces.
for (Entry<String, String> nsEntry : nsProperty.getNamespaces().entrySet()) {
addNamespace(nsEntry.getKey(), nsEntry.getValue());
}
}
private void initActions() {
selectXpathButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
XPathSelectorDialog xpathEditorDialog = new XPathSelectorDialog(dialogShell);
xpathEditorDialog.open();
if (!StringUtils.isBlank(xpathEditorDialog.getSelectedXpath())) {
propertyTextField.setText(xpathEditorDialog.getSelectedXpath());
}
collectedNamespaces.clear();
nsListBox.removeAll();
for (Entry<String, String> nsEntry: xpathEditorDialog.getNameSpaces().entrySet()) {
addNamespace(nsEntry.getKey(), nsEntry.getValue());
}
}
});
addButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
String prefix = nsPrefixTextField.getText();
String uri = nsUriTextField.getText();
if (XpathValidator.isValidNamespace(dialogShell, collectedNamespaces, prefix, uri)) {
addNamespace(prefix, uri);
nsPrefixTextField.setText("");
nsUriTextField.setText("");
nsPrefixTextField.setFocus();
}
}
});
removeButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
for (String selection : nsListBox.getSelection()) {
removeNamespace(selection);
}
}
});
editButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
String[] selection = nsListBox.getSelection();
if (selection.length > 0) {
String selectedNamespace = selection[0];
String prefix = extractPrefix(selectedNamespace);
String uri = collectedNamespaces.get(prefix);
if (null != uri) {
collectedNamespaces.remove(prefix);
nsPrefixTextField.setText(prefix);
nsUriTextField.setText(uri);
nsPrefixTextField.setFocus();
}
nsListBox.remove(selectedNamespace);
}
}
});
okButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
if (XpathValidator.isValidConfiguration(dialogShell, propertyTextField.getText(), collectedNamespaces)) {
try {
saveConfiguration();
setSaved(true);
} catch (Exception ex) {
log.error("Error while saving namespace property",ex);
}
dialogShell.dispose();
}
}
});
cancelButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
setSaved(false);
dialogShell.dispose();
}
});
}
private void addNamespace(String prefix, String uri) {
collectedNamespaces.put(prefix, uri);
String namespaceDisplayValue = String.format(namespaceDisplayFormat,
prefix, uri);
nsListBox.add(namespaceDisplayValue);
}
private void removeNamespace(String namespace) {
nsListBox.remove(namespace);
String prefix = extractPrefix(namespace);
if (!StringUtils.isBlank(prefix)) {
collectedNamespaces.remove(prefix);
}
}
private String extractPrefix(String namespace) {
Matcher matcher = namespaceDisplayPattern.matcher(namespace);
if (matcher.find()) {
return matcher.group(1);
}
return null;
}
private void centerDialog() {
Rectangle parentBounds = getParent().getBounds();
Rectangle dialogBounds = dialogShell.getBounds();
int centerX, centerY;
centerX = (parentBounds.width - dialogBounds.width)/2 + parentBounds.x;
centerY = (parentBounds.height - dialogBounds.height)/2 + parentBounds.y;
dialogShell.setLocation(new Point(centerX, centerY));
}
private void setTabOrder() {
Control[] tabOrder = new Control[] { propertyTextField, selectXpathButton };
propertyGroupBox.setTabList(tabOrder);
tabOrder = new Control[] { nsPrefixTextField, nsUriTextField, addButton };
namespacesGroupBox.setTabList(tabOrder);
tabOrder = new Control[] { propertyGroupBox, namespacesGroupBox, okButton };
dialogShell.setTabList(tabOrder);
}
private void saveConfiguration() throws Exception {
nsProperty.setPropertyValue(propertyTextField.getText());
nsProperty.getNamespaces().clear();
nsProperty.getNamespaces().putAll(collectedNamespaces);
}
private void setSaved(boolean saved) {
this.saved = saved;
}
public boolean isSaved() {
return saved;
}
}
| |
/*L
* Copyright SAIC
* Copyright SAIC-Frederick
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cananolab/LICENSE.txt for details.
*/
package gov.nih.nci.cananolab.restful.sample;
import gov.nih.nci.cananolab.domain.common.Organization;
import gov.nih.nci.cananolab.dto.common.DataReviewStatusBean;
import gov.nih.nci.cananolab.dto.common.PointOfContactBean;
import gov.nih.nci.cananolab.dto.particle.DataAvailabilityBean;
import gov.nih.nci.cananolab.dto.particle.SampleBean;
import gov.nih.nci.cananolab.exception.DuplicateEntriesException;
import gov.nih.nci.cananolab.exception.NoAccessException;
import gov.nih.nci.cananolab.exception.NotExistException;
import gov.nih.nci.cananolab.exception.SampleException;
import gov.nih.nci.cananolab.restful.core.BaseAnnotationBO;
import gov.nih.nci.cananolab.restful.sample.InitSampleSetup;
import gov.nih.nci.cananolab.restful.util.InputValidationUtil;
import gov.nih.nci.cananolab.restful.util.PropertyUtil;
import gov.nih.nci.cananolab.restful.view.SimpleSampleBean;
import gov.nih.nci.cananolab.restful.view.edit.SampleEditGeneralBean;
import gov.nih.nci.cananolab.restful.view.edit.SimpleAccessBean;
import gov.nih.nci.cananolab.restful.view.edit.SimpleAddressBean;
import gov.nih.nci.cananolab.restful.view.edit.SimpleOrganizationBean;
import gov.nih.nci.cananolab.restful.view.edit.SimplePointOfContactBean;
import gov.nih.nci.cananolab.security.AccessControlInfo;
import gov.nih.nci.cananolab.security.CananoUserDetails;
import gov.nih.nci.cananolab.security.enums.AccessTypeEnum;
import gov.nih.nci.cananolab.security.enums.CaNanoRoleEnum;
import gov.nih.nci.cananolab.security.enums.SecureClassesEnum;
import gov.nih.nci.cananolab.security.service.SpringSecurityAclService;
import gov.nih.nci.cananolab.security.service.UserService;
import gov.nih.nci.cananolab.security.utils.SpringSecurityUtil;
import gov.nih.nci.cananolab.service.curation.CurationService;
import gov.nih.nci.cananolab.service.sample.DataAvailabilityService;
import gov.nih.nci.cananolab.service.sample.SampleService;
import gov.nih.nci.cananolab.ui.form.SampleForm;
import gov.nih.nci.cananolab.util.Comparators;
import gov.nih.nci.cananolab.util.StringUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.validator.EmailValidator;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
/**
* Class migrated from SampleAction, to support sample related rest services.
*
* @author yangs8
*
*/
@Transactional(readOnly=false, propagation=Propagation.REQUIRED)
@Component("sampleBO")
public class SampleBO extends BaseAnnotationBO {
private static Logger logger = Logger.getLogger(SampleBO.class);
@Autowired
private DataAvailabilityService dataAvailabilityServiceDAO;
@Autowired
private CurationService curationServiceDAO;
@Autowired
private SampleService sampleService;
@Autowired
private SpringSecurityAclService springSecurityAclService;
@Autowired
private UserService userService;
@Autowired
private UserDetailsService userDetailsService;
/**
*
* Method to support the Update button on Update Sample page. Given the current implementation, when it gets here,
* a saveAccess or savePOC has been called and the sample's id has been generated.
* <br><br>
* Revisit if the above workflow changes.
*
* @param simpleEditBean
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean update(SampleEditGeneralBean simpleEditBean, HttpServletRequest request) throws Exception
{
long sampleId = simpleEditBean.getSampleId();
if (sampleId <= 0)
throw new Exception("No valid sample id found. Unable to update sample");
SampleBean sampleBean = (SampleBean) this.findMatchSampleInSession(request, sampleId);
if (sampleBean == null) {
System.out.println("No Sample in session");
return wrapErrorInEditBean("No valid sample in session matching given sample id. Unable to update the sample.");
}
// transfer keyword and sample name from simple Edit bean
simpleEditBean.populateDataForSavingSample(sampleBean);
saveSample(request, sampleBean);
// retract from public if updating an existing public record and not curator
CananoUserDetails userDetails = SpringSecurityUtil.getPrincipal();
if (!userDetails.isCurator() && springSecurityAclService.checkObjectPublic(Long.valueOf(sampleId), SecureClassesEnum.SAMPLE.getClazz()))
{
retractFromPublic(request, sampleBean.getDomain().getId(), sampleBean.getDomain().getName(), "sample", SecureClassesEnum.SAMPLE.getClazz());
springSecurityAclService.retractObjectFromPublic(Long.valueOf(sampleId), SecureClassesEnum.SAMPLE.getClazz());
return wrapErrorInEditBean(PropertyUtil.getProperty("sample", "message.updateSample.retractFromPublic"));
}
//request.getSession().setAttribute("updateSample", "true");
return summaryEdit(String.valueOf(sampleBean.getDomain().getId()), request);
}
/**
* This is to support the "Submit" button in new sample submission page.
* <br><br>
* 2 scenarios when we get here: 1) new sample has been created successfully when saving a primary POC
* from new sample submission page, in such case, the sample in session has an id. 2) new sample failed to be
* created when saving a primary POC from new sample submission page. In such case, the sample in session
* doesn't have an id.
*
*
* @param simpleEditBean
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean submit(SampleEditGeneralBean simpleEditBean, HttpServletRequest request) throws Exception
{
long sampleId = simpleEditBean.getSampleId();
Boolean newSample = (sampleId <= 0);
SampleBean sampleBean = (SampleBean) this.findMatchSampleInSession(request, sampleId);
if (sampleBean == null) {
System.out.println("No Sample in session");
return wrapErrorInEditBean("No valid sample in session matching given sample id. Unable to update the sample.");
}
// transfer keyword and sample name from simple Edit bean
simpleEditBean.populateDataForSavingSample(sampleBean);
saveSample(request, sampleBean);
request.getSession().setAttribute("updateSample", "true");
return summaryEdit(String.valueOf(sampleBean.getDomain().getId()), request);
}
private void saveSample(HttpServletRequest request, SampleBean sampleBean) throws Exception {
sampleBean.setupDomain(SpringSecurityUtil.getLoggedInUserName());
// persist in the database
sampleService.saveSample(sampleBean);
// ActionMessages messages = new ActionMessages();
// ActionMessage msg = null;
String updateSample = (String) request.getSession().getAttribute("updateSample");
if (!StringUtils.isEmpty(updateSample)) {
// msg = new ActionMessage("message.updateSample");
// messages.add(ActionMessages.GLOBAL_MESSAGE, msg);
// saveMessages(request, messages);
}
}
/**
* Handle view sample request on sample search result page (read-only view).
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public SimpleSampleBean summaryView(String sampleId, HttpServletRequest request) throws Exception
{
SampleForm form = new SampleForm();
form.setSampleId(sampleId);
SimpleSampleBean simpleBean = new SimpleSampleBean();
SampleBean sampleBean = setupSampleById(sampleId, request);
if (hasNullPOC(request, sampleBean, simpleBean.getErrors())) {
return simpleBean;
}
form.setSampleBean(sampleBean);
request.getSession().setAttribute("theSample", sampleBean);
simpleBean.transferSampleBeanForSummaryView(sampleBean);
return simpleBean;
}
private void checkOpenForms(SampleForm theForm, HttpServletRequest request) throws Exception {
String dispatch = request.getParameter("dispatch");
String browserDispatch = getBrowserDispatch(request);
HttpSession session = request.getSession();
Boolean openPOC = false;
if (dispatch.equals("input") && browserDispatch.equals("savePointOfContact")) {
openPOC = true;
}
session.setAttribute("openPOC", openPOC);
super.checkOpenAccessForm(request);
}
/**
*
* @param request
* @param sampleBean
* @param errors
* @return
* @throws Exception
*/
private Boolean hasNullPOC(HttpServletRequest request, SampleBean sampleBean, List<String> errors) throws Exception {
//What's the buss logic here?
if (sampleBean.getPrimaryPOCBean().getDomain() == null) {
sampleService.deleteSample(sampleBean.getDomain().getName());
if (sampleBean.getPrimaryPOCBean().getDomain() == null) {
errors.add(PropertyUtil.getProperty("sample", "message.sample.null.POC.delete"));
} else
errors.add("Sample invalid");
return true;
}
return false;
}
/**
* Handle edit sample request on sample search result page (curator view).
*
* After a savePOC, saveAccess or updateDataAvailability operation, this method will
* be called again to retrieve the updated sample data
*
*
* @param sampleId
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean summaryEdit(String sampleId, HttpServletRequest request) throws Exception {
SampleEditGeneralBean sampleEdit = new SampleEditGeneralBean();
SampleBean sampleBean = setupSampleById(sampleId, request);
if (hasNullPOC(request, sampleBean, sampleEdit.getErrors())) {
return sampleEdit;
}
InitSampleSetup.getInstance().setPOCDropdowns(request, sampleService);
SortedSet<String> organizationNames = sampleService.getAllOrganizationNames();
request.getSession().setAttribute("allOrganizationNames", organizationNames);
sampleEdit.setOrganizationNamesForUser(new ArrayList<String>(organizationNames));
SortedSet<String> roles = (SortedSet<String>)request.getSession().getAttribute("contactRoles");
sampleEdit.setContactRoles(new ArrayList<String>(roles));
Set<DataAvailabilityBean> selectedSampleDataAvailability = dataAvailabilityServiceDAO.findDataAvailabilityBySampleId(sampleBean.getDomain().getId().toString());
String[] availableEntityNames = null;
if (selectedSampleDataAvailability != null
&& !selectedSampleDataAvailability.isEmpty()
&& selectedSampleDataAvailability.size() > 0) {
sampleBean.setHasDataAvailability(true);
sampleBean.setDataAvailability(selectedSampleDataAvailability);
calculateDataAvailabilityScore(sampleBean, selectedSampleDataAvailability, request);
int idx = 0;
availableEntityNames = new String[selectedSampleDataAvailability.size()];
for (DataAvailabilityBean bean : selectedSampleDataAvailability) {
availableEntityNames[idx++] = bean.getAvailableEntityName().toLowerCase();
}
}
//Set collaboration group names in session for later use. The list should never change
//unless a curator added new group
if (request.getSession().getAttribute("allGroupNames") == null) {
List<String> availGroupNames = userService.getGroupsAccessibleToUser("");
request.getSession().setAttribute("allGroupNames", availGroupNames);
}
transferSampleBeanData(request, curationServiceDAO, sampleBean, availableEntityNames, sampleEdit);
//request.getSession().setAttribute("updateSample", "true");
//need to save sampleBean in session for other edit feature.
//new in rest implement
request.getSession().setAttribute("theSample", sampleBean);
return sampleEdit;
}
public void transferSampleBeanData(HttpServletRequest request,
CurationService curatorService, SampleBean sampleBean, String[] availableEntityNames, SampleEditGeneralBean sampleEdit)
throws Exception {
logger.debug("Start transferming data to simple bean");
sampleEdit.setSampleName(sampleBean.getDomain().getName());
sampleEdit.setSampleId(sampleBean.getDomain().getId());
//this.userIsCurator = sampleBean.getUser().isCurator();
sampleEdit.setIsPublic(springSecurityAclService.checkObjectPublic(sampleBean.getDomain().getId(), SecureClassesEnum.SAMPLE.getClazz()));
logger.debug("Transferming POC");
sampleEdit.transferPointOfContactData(sampleBean);
logger.debug("Transferming POC");
sampleEdit.setKeywords(new ArrayList<String>(sampleBean.getKeywordSet()));
logger.debug("Transferming Access");
sampleEdit.transferAccessibilityData(sampleBean);
logger.debug("Done Transferming acess");
sampleEdit.transferDataAvailability(request, sampleBean, availableEntityNames);
logger.debug("Transferming lookup");
setupLookups(request);
logger.debug("Done Transferming lookup");
logger.debug("Transferming GroupName");
sampleEdit.setupGroupNamesForNewAccess(request, userService);
logger.debug("Done Transferming GroupName");
logger.debug("Transferming FilteredUsersParamForNewAccess");
setupFilteredUsersParamForNewAccess(request, sampleBean.getDomain().getCreatedBy(), sampleEdit);
logger.debug("Done Transferming FilteredUsersParamForNewAccess");
logger.debug("Transferming REview button");
sampleEdit.setupReviewButton(request, curatorService, sampleBean, springSecurityAclService);
logger.debug("Done Transferming REview button");
logger.debug("Transferming role map");
sampleEdit.setupRoleNameMap();
logger.debug("Done Transferming role map");
}
/**
* Logic for DWRAccessibilityManager.getMatchedUsers()
*
* @param request
* @param dataOwner
*/
private void setupFilteredUsersParamForNewAccess(HttpServletRequest request, String dataOwner, SampleEditGeneralBean sampleEdit)
{
try {
String loggedInUserName = SpringSecurityUtil.getLoggedInUserName();
List<CananoUserDetails> matchedUsers = userService.loadUsers("");
List<CananoUserDetails> updatedUsers = new ArrayList<CananoUserDetails>();
// remove current user and curators from the list and also data owner from the list if owner is not the current user
for (CananoUserDetails userDetail: matchedUsers)
{
if (!loggedInUserName.equals(userDetail.getUsername()) && userDetail.isCurator() && !userDetail.getUsername().equalsIgnoreCase(dataOwner))
{
updatedUsers.add(userDetail);
}
}
sampleEdit.setFilteredUsers(new HashMap<String, String>());
for (CananoUserDetails u :updatedUsers) {
sampleEdit.getFilteredUsers().put(u.getUsername(), u.getDisplayName());
}
} catch (Exception e) {
logger.error("Got error while setting up params for adding access", e);
}
}
private void setAccesses(HttpServletRequest request, SampleBean sampleBean) throws Exception
{
springSecurityAclService.loadAccessControlInfoForObject(sampleBean.getDomain().getId(), SecureClassesEnum.SAMPLE.getClazz(),
sampleBean);
}
/**
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public SampleEditGeneralBean setupNew(HttpServletRequest request) throws Exception
{
SampleEditGeneralBean sampleEdit = new SampleEditGeneralBean();
request.getSession().removeAttribute("theSample");
sampleEdit.setupLookups(request, sampleService);
try {
InitSampleSetup.getInstance().setPOCDropdowns(request, sampleService);
SortedSet<String> organizationNames = sampleService.getAllOrganizationNames();
request.getSession().setAttribute("allOrganizationNames", organizationNames);
sampleEdit.setOrganizationNamesForUser(new ArrayList<String>(organizationNames));
SortedSet<String> roles = (SortedSet<String>)request.getSession().getAttribute("contactRoles");
sampleEdit.setContactRoles(new ArrayList<String>(roles));
} catch (Exception e) {
logger.error("Got error while setting up POC lookup for sample edit");
}
return sampleEdit;
}
/**
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public void setupClone(SampleForm form, HttpServletRequest request, HttpServletResponse response) throws Exception
{
// DynaValidatorForm theForm = (DynaValidatorForm) form;
SampleBean sampleBean = (SampleBean) form.getSampleBean();
if (request.getParameter("cloningSample") != null) {
String cloningSampleName = request.getParameter("cloningSample");
sampleBean.setCloningSampleName(cloningSampleName);
sampleBean.getDomain().setName(null);
} else {
sampleBean.setCloningSampleName(null);
sampleBean.getDomain().setName(null);
}
// saveToken(request);
// return mapping.findForward("cloneInput");
}
/**
* Retrieve all POCs and Groups for POC drop-down on sample edit page.
*
* @param request
* @param sampleOrg
* @throws Exception
*/
private void setupLookups(HttpServletRequest request) throws Exception {
InitSampleSetup.getInstance().setPOCDropdowns(request, sampleService);
SortedSet<String> organizationNames = sampleService.getAllOrganizationNames();
request.getSession().setAttribute("allOrganizationNames", organizationNames);
}
/**
* Get the POC list from simpleSampleBean, find the dirty one to do an add/update.
*
* @param simpleSampleBean
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean savePointOfContactList(SampleEditGeneralBean simpleSampleBean, HttpServletRequest request)
throws Exception
{
List<SimplePointOfContactBean> pocList = simpleSampleBean.getPointOfContacts();
if (pocList == null || pocList.size() == 0)
return this.wrapErrorInEditBean("POC list is empty. Unable to update POC");
SimplePointOfContactBean thePOC = findDirtyPOC(pocList);
if (thePOC == null)
return this.wrapErrorInEditBean("Unable to find the dirty POC to update");
return savePointOfContact(simpleSampleBean, thePOC, request);
}
/**
* Find the "dirty" SimplePointOfContactBean from a list
* @param pocList
* @return
*/
protected SimplePointOfContactBean findDirtyPOC(List<SimplePointOfContactBean> pocList)
{
if (pocList != null)
{
for (SimplePointOfContactBean poc : pocList) {
if (poc.isDirty())
return poc;
}
}
return null;
}
/**
* Find the "dirty" SimpleAccessBean from a list
* @param accessList
* @return
*/
protected SimpleAccessBean findDirtyAccess(Map<String, List<SimpleAccessBean>> accessMap) {
if (accessMap == null)
return null;
Iterator<String> ite = accessMap.keySet().iterator();
while (ite.hasNext()) {
List<SimpleAccessBean> accesses = accessMap.get(ite.next());
for (SimpleAccessBean access : accesses) {
if (access.isDirty())
return access;
}
}
return null;
}
protected void removeMatchingPOC(SampleBean sample, SimplePointOfContactBean simplePOC) {
List<PointOfContactBean> otherPOCs = sample.getOtherPOCBeans();
if (otherPOCs != null) {
for (PointOfContactBean poc : otherPOCs) {
if (poc.getDomain().getId() == simplePOC.getId()) {
logger.debug("Removing poc " + poc.getDisplayName() + " from sample " + sample.getDomain().getName());
sample.removePointOfContact(poc);
logger.debug("POC removed");
break;
}
}
}
}
/**
* Save a new or existing POC with updates.
*
* For Rest call: 1. Update Sample: when add POC and save are clicked
* 2. Update Sample: when edit POC and save are clicked
* 3. Submit Sample: when add POC and save are clicked. In this case, sample only has a name.
*
* Updating an existing POC with a new organization name is the equivalent of creating new and deleting old
*
* @param simplePOC
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean savePointOfContact(SampleEditGeneralBean simpleSampleBean,
SimplePointOfContactBean simplePOC, HttpServletRequest request) throws Exception
{
logger.debug("========== Start saving POC");
List<String> errors = validatePointOfContactInput(simplePOC);
if (errors.size() > 0) {
return wrapErrorsInEditBean(errors, "POC");
}
SampleBean sample = (SampleBean)request.getSession().getAttribute("theSample");
long sampleId = simpleSampleBean.getSampleId();
String sampleName = simpleSampleBean.getSampleName();
boolean newSample = false;
if (sample == null) {
if (sampleName == null || sampleName.length() == 0)
return this.wrapErrorInEditBean("Sample object in session is not valid for sample update operation");
else { //add poc in submit new sample workflow
sample = new SampleBean();
sample.getDomain().setName(sampleName);
newSample = true;
}
} else if (sampleId <= 0) {
sample.getDomain().setName(sampleName);
newSample = true;
} else {
if (sample.getDomain().getId() != sampleId)
return this.wrapErrorInEditBean("Current sample id doesn't match sample id in session");
}
logger.debug("========== Resolving Input");
PointOfContactBean thePOC = resolveThePOCToSaveFromInput(sample, simplePOC, SpringSecurityUtil.getLoggedInUserName());
Long oldPOCId = thePOC.getDomain().getId();
determinePrimaryPOC(thePOC, sample, newSample);
// have to save POC separately because the same organizations can not be
// saved in the same session
sampleService.savePointOfContact(thePOC);
sample.addPointOfContact(thePOC, oldPOCId);
logger.debug("========== Done saving POC");
// if the oldPOCId is different from the one after POC save
if (oldPOCId != null && !oldPOCId.equals(thePOC.getDomain().getId())) {
// update characterization POC associations
sampleService.updatePOCAssociatedWithCharacterizations(sample.getDomain().getName(), oldPOCId, thePOC.getDomain().getId());
}
try {
// save sample
logger.debug("========== Saving Sample with POC");
saveSample(request, sample);
logger.debug("========== Done Saving Sample with POC");
} catch (NoAccessException e) {
if (newSample)
simpleSampleBean.getPointOfContacts().clear();
request.getSession().setAttribute("theSample", sample);
simpleSampleBean.getErrors().add("User has no access to edit this sample");
simpleSampleBean.transferPointOfContactData(sample);;
return simpleSampleBean;
} catch (DuplicateEntriesException e) {
if (newSample)
simpleSampleBean.getPointOfContacts().clear();
request.getSession().setAttribute("theSample", sample);
simpleSampleBean.getErrors().add(PropertyUtil.getProperty("sample", "error.duplicateSample"));
simpleSampleBean.transferPointOfContactData(sample);;
return simpleSampleBean;
//return this.wrapErrorInEditBean(PropertyUtil.getProperty("sample", "error.duplicateSample"));
} catch (Exception e) {
if (newSample)
simpleSampleBean.getPointOfContacts().clear();
request.getSession().setAttribute("theSample", sample);
simpleSampleBean.getErrors().add(e.getMessage());
simpleSampleBean.transferPointOfContactData(sample);;
return simpleSampleBean;
}
if (newSample)
this.setAccesses(request, sample); //this will assign default curator access to this sample.
InitSampleSetup.getInstance().persistPOCDropdowns(request, sample, sampleService);
logger.debug("========== Populating UpdateSample data");
return summaryEdit(sample.getDomain().getId().toString(), request);
}
protected void determinePrimaryPOC(PointOfContactBean thePOC, SampleBean sample, boolean newSample)
{
if (newSample == true) {
if (sample.getDomain().getPrimaryPointOfContact() == null)
thePOC.setPrimaryStatus(true);
}
}
/**
* Delete a non-primary POC from a sample
*
* @param simpleEditBean
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean deletePointOfContact(SimplePointOfContactBean simplePOC, HttpServletRequest request)
throws Exception {
long sampleId = simplePOC.getSampleId();
SampleBean sample = (SampleBean) this.findMatchSampleInSession(request, sampleId);
if (sample == null) {
System.out.println("No Sample in session");
return wrapErrorInEditBean("No valid sample in session matching given sample id. Unable to update delete POC to the sample.");
}
if (simplePOC.isPrimaryContact())
return wrapErrorInEditBean(PropertyUtil.getProperty("sample", "message.deletePrimaryPOC"));
removeMatchingPOC(sample, simplePOC);
saveSample(request, sample);
// String updateSample = (String) request.getSession().getAttribute(
// "updateSample");
// if (updateSample == null) {
// // forward = mapping.findForward("createInput");
// setupLookups(request);
// } else {
// request.setAttribute("sampleId", sample.getDomain().getId()
// .toString());
// // forward = summaryEdit(mapping, form, request, response);
// }
return summaryEdit(String.valueOf(sample.getDomain().getId()), request);
}
/**
* Make a copy of an existing sample based on sample name.
*
* @param simpleEditBean
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean clone(SampleEditGeneralBean simpleEditBean, HttpServletRequest request) throws Exception
{
String newNameForClone = simpleEditBean.getNewSampleName();
String orgSampleName = simpleEditBean.getSampleName();
String error = validateSampleName(newNameForClone);
if (error.length() > 0)
return this.wrapErrorInEditBean(error);
SampleBean clonedSampleBean = null;
try {
clonedSampleBean = sampleService.cloneSample(orgSampleName, newNameForClone);
} catch (NotExistException e) {
error = PropertyUtil.getPropertyReplacingToken("sample", "error.cloneSample.noOriginalSample", "0", orgSampleName);
return wrapErrorInEditBean(error);
} catch (DuplicateEntriesException e) {
error = PropertyUtil.getProperty("sample", "error.cloneSample.duplicateSample");
return wrapErrorInEditBean(error);
} catch (SampleException e) {
error = PropertyUtil.getProperty("sample", "error.cloneSample");
return wrapErrorInEditBean(error);
}
//what's this for?
request.setAttribute("sampleId", clonedSampleBean.getDomain().getId().toString());
return summaryEdit(String.valueOf(clonedSampleBean.getDomain().getId()), request);
}
public String delete(String sampleId, HttpServletRequest request) throws Exception {
SampleBean sampleBean = findMatchSampleInSession(request, Long.parseLong(sampleId));
if (sampleBean == null)
return "Error: unable to find a valid sample in session with id . Sample deletion failed";
String sampleName = sampleBean.getDomain().getName();
// remove all access associated with sample takes too long. Set up the
// delete job in scheduler
//InitSampleSetup.getInstance().updateCSMCleanupEntriesInContext(sampleBean.getDomain(), request, sampleService);
// update data review status to "DELETED"
updateReviewStatusTo(DataReviewStatusBean.DELETED_STATUS, request,
sampleBean.getDomain().getId().toString(), sampleBean.getDomain().getName(), "sample");
if (sampleBean.getHasDataAvailability()) {
dataAvailabilityServiceDAO.deleteDataAvailability(sampleBean.getDomain().getId().toString());
}
sampleService.deleteSample(sampleBean.getDomain().getName());
springSecurityAclService.deleteAccessObject(Long.parseLong(sampleId), SecureClassesEnum.SAMPLE.getClazz());
request.getSession().removeAttribute("theSample");
String msg = PropertyUtil.getPropertyReplacingToken("sample", "message.deleteSample", "0", sampleName);
return msg;
}
/**
* generate data availability for the sample
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public void generateDataAvailability(SampleForm form, HttpServletRequest request, HttpServletResponse response) throws Exception
{
// DynaValidatorForm theForm = (DynaValidatorForm) form;
SampleBean sampleBean = (SampleBean) form.getSampleBean();
Set<DataAvailabilityBean> dataAvailability = dataAvailabilityServiceDAO.saveDataAvailability(sampleBean);
sampleBean.setDataAvailability(dataAvailability);
sampleBean.setHasDataAvailability(true);
calculateDataAvailabilityScore(sampleBean, dataAvailability, request);
/*
* Map<String, List<DataAvailabilityBean>> dataAvailabilityMapPerPage =
* (Map<String, List<DataAvailabilityBean>>) request
* .getSession().getAttribute("dataAvailabilityMapPerPage");
*
* if (dataAvailabilityMapPerPage != null) {
* dataAvailabilityMapPerPage.remove(sampleBean.getDomain().getId()
* .toString());
* dataAvailabilityMapPerPage.put(sampleBean.getDomain().getId()
* .toString(), dataAvailability);
*
* request.getSession().setAttribute("dataAvailabilityMapPerPage",
* dataAvailabilityMapPerPage); }
*/
request.setAttribute("onloadJavascript", "manageDataAvailability('"
+ sampleBean.getDomain().getId()
+ "', 'sample', 'dataAvailabilityView')");
// return mapping.findForward("summaryEdit");
}
/**
* update data availability for the sample. This is to support the "Regenerate" button
* on Sample Edit page.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public SampleEditGeneralBean updateDataAvailability(String sampleId, HttpServletRequest request) throws Exception
{
SampleBean sampleBean = findMatchSampleInSession(request, Long.parseLong(sampleId));
if (sampleBean == null) {
SampleEditGeneralBean simpleBean = new SampleEditGeneralBean();
simpleBean.getErrors().add("No valid sample in session matching given sample id. Unable to update data availabilty.");
return simpleBean;
}
Set<DataAvailabilityBean> dataAvailability = dataAvailabilityServiceDAO.saveDataAvailability(sampleBean);
sampleBean.setDataAvailability(dataAvailability);
// recalculate the score
calculateDataAvailabilityScore(sampleBean, dataAvailability, request);
return this.summaryEdit(sampleId, request);
}
/**
* delete data availability for the sample
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public SampleEditGeneralBean deleteDataAvailability(SampleEditGeneralBean simpleEditBean, HttpServletRequest request)
throws Exception {
long sampleId = simpleEditBean.getSampleId();
SampleBean sampleBean = (SampleBean) this.findMatchSampleInSession(request, sampleId);
if (sampleBean == null) {
System.out.println("No Sample in session");
return wrapErrorInEditBean("No valid sample in session matching given sample id. Unable to delete Data Availability to sample.");
}
dataAvailabilityServiceDAO.deleteDataAvailability(sampleBean.getDomain().getId().toString());
sampleBean.setHasDataAvailability(false);
sampleBean.setDataAvailability(new HashSet<DataAvailabilityBean>());
return summaryEdit(String.valueOf(sampleBean.getDomain().getId()), request);
}
/**
* Support viewDataAvailability rest service
*
* @param sampleId
* @param request
* @return
* @throws Exception
*/
public SimpleSampleBean dataAvailabilityView(String sampleId, HttpServletRequest request) throws Exception
{
SampleBean sampleBean = setupSampleById(sampleId, request);
Set<DataAvailabilityBean> dataAvailability = dataAvailabilityServiceDAO.findDataAvailabilityBySampleId(sampleBean.getDomain().getId().toString());
//sampleBean.setDataAvailability(dataAvailability);
String[] availEntityNames = null;
if (!dataAvailability.isEmpty() && dataAvailability.size() > 0) {
sampleBean.setHasDataAvailability(true);
calculateDataAvailabilityScore(sampleBean, dataAvailability, request);
availEntityNames = new String[dataAvailability.size()];
int i = 0;
for (DataAvailabilityBean bean : dataAvailability) {
availEntityNames[i++] = bean.getAvailableEntityName().toLowerCase();
}
}
SimpleSampleBean simpleBean = transferDataAvailabilityToSimpleSampleBean(sampleBean, request, availEntityNames);
return simpleBean;
}
protected SimpleSampleBean transferDataAvailabilityToSimpleSampleBean(SampleBean sampleBean, HttpServletRequest request, String[] availEntityNames)
{
SimpleSampleBean simpleBean = new SimpleSampleBean();
simpleBean.transferSampleBeanForDataAvailability(sampleBean, request);
simpleBean.setAvailableEntityNames(availEntityNames);
return simpleBean;
}
private void calculateDataAvailabilityScore(SampleBean sampleBean, Set<DataAvailabilityBean> dataAvailability, HttpServletRequest request)
{
ServletContext appContext = request.getSession().getServletContext();
SortedSet<String> minchar = (SortedSet<String>) appContext.getAttribute("MINChar");
Map<String, String> attributes = (Map<String, String>) appContext.getAttribute("caNano2MINChar");
sampleBean.calculateDataAvailabilityScore(dataAvailability, minchar, attributes);
}
/**
* Save access info for a sample
* @param simpleAccess
* @param request
* @return
* @throws Exception
*/
public SampleEditGeneralBean saveAccess(SampleEditGeneralBean simpleEditBean, HttpServletRequest request) throws Exception
{
SampleBean sample = (SampleBean)request.getSession().getAttribute("theSample");
if (sample == null) {
throw new Exception("Sample object is not valid in session for saving /updating access");
}
AccessControlInfo theAccess = simpleEditBean.getTheAccess();
List<String> errors = validateAccess(request, theAccess);
if (errors.size() > 0) {
return this.wrapErrorsInEditBean(errors);
}
// if sample is public, the access is not public, retract public
// privilege would be handled in the service method
sampleService.assignAccessibility(theAccess, sample.getDomain());
// update status to retracted if the access is not public and sample is public
if (!CaNanoRoleEnum.ROLE_ANONYMOUS.toString().equalsIgnoreCase(theAccess.getRecipient()) &&
springSecurityAclService.checkObjectPublic(sample.getDomain().getId(), SecureClassesEnum.SAMPLE.getClazz()))
{
updateReviewStatusTo(DataReviewStatusBean.RETRACTED_STATUS, request, sample.getDomain().getId().toString(), sample
.getDomain().getName(), "sample");
springSecurityAclService.retractObjectFromPublic(sample.getDomain().getId(), SecureClassesEnum.SAMPLE.getClazz());
}
// if access is public, pending review status, update review status to public
if (CaNanoRoleEnum.ROLE_ANONYMOUS.toString().equalsIgnoreCase(theAccess.getRecipient())) {
this.switchPendingReviewToPublic(request, sample.getDomain().getId().toString());
}
simpleEditBean.populateDataForSavingSample(sample);
saveSample(request, sample);
//if (request.getSession().getAttribute("allGroupNames") == null) {
//refresh groupNames in case the new access was a "other"
if (AccessTypeEnum.GROUP.getAccessType().equalsIgnoreCase(theAccess.getAccessType()))
{
List<String> groupNames = (List<String>) request.getSession().getAttribute("allGroupNames");
if (groupNames == null || !groupNames.contains(theAccess.getRecipient())) {
List<String> availGroupNames = userService.getGroupsAccessibleToUser("");
request.getSession().setAttribute("allGroupNames", availGroupNames);
}
}
return summaryEdit(sample.getDomain().getId().toString(), request);
}
public SampleEditGeneralBean deleteAccess(SampleEditGeneralBean simpleEditBean, HttpServletRequest request)
throws Exception
{
long sampleId = simpleEditBean.getSampleId();
SampleBean sample = (SampleBean) this.findMatchSampleInSession(request, sampleId);
if (sample == null) {
System.out.println("No Sample in session");
return wrapErrorInEditBean("No valid sample in session matching given sample id. Unable to update delete accecc to the sample.");
}
//SimpleAccessBean simpleAccess = this.findDirtyAccess(simpleEditBean.getAccessToSample());
AccessControlInfo theAccess = simpleEditBean.getTheAccess();
sample.setTheAccess(theAccess);
//this.populateAccessBeanWithInput(simpleAccess, theAccess, user.getLoginName());
sampleService.removeAccessibility(theAccess, sample.getDomain());
return summaryEdit(String.valueOf(sample.getDomain().getId()), request);
}
protected void removePublicAccess(SampleForm theForm, HttpServletRequest request) throws Exception
{
SampleBean sample = (SampleBean) theForm.getSampleBean();
springSecurityAclService.retractObjectFromPublic(sample.getDomain().getId(), SecureClassesEnum.SAMPLE.getClazz());
}
protected List<String> validatePointOfContactInput(SimplePointOfContactBean simplePOC)
{
List<String> errors = new ArrayList<String>();
if (simplePOC == null) {
errors.add("Input point of contact object invalid"); //shouldn't happen
return errors;
}
SimpleOrganizationBean simpleOrg = simplePOC.getOrganization();
if (simpleOrg != null) {
String orgName = simpleOrg.getName();
if (orgName == null || !InputValidationUtil.isTextFieldWhiteList(orgName))
errors.add(PropertyUtil.getProperty("sample", "organization.name.invalid"));
} else
errors.add("Organization Name is a required field");
SimpleAddressBean addrBean = simplePOC.getAddress();
if (addrBean != null) {
String val = addrBean.getLine1();
if (val != null && val.length() > 0 && !InputValidationUtil.isTextFieldWhiteList(val))
errors.add(PropertyUtil.getProperty("sample", "organization.address1.invalid"));
val = addrBean.getLine2();
if (val != null && val.length() > 0 && !InputValidationUtil.isTextFieldWhiteList(val))
errors.add(PropertyUtil.getProperty("sample", "organization.address2.invalid"));
val = addrBean.getCity();
if (val != null && val.length() > 0 && !InputValidationUtil.isRelaxedAlphabetic(val))
errors.add(PropertyUtil.getProperty("sample", "organization.city.invalid"));
val = addrBean.getStateProvince();
if (val != null && val.length() > 0 && !InputValidationUtil.isRelaxedAlphabetic(val))
errors.add(PropertyUtil.getProperty("sample", "organization.state.invalid"));
val = addrBean.getCountry();
if (val != null && val.length() > 0 && !InputValidationUtil.isRelaxedAlphabetic(val))
errors.add(PropertyUtil.getProperty("sample", "organization.country.invalid"));
val = addrBean.getZip();
if (val != null && val.length() > 0 && !InputValidationUtil.isZipValid(addrBean.getZip()))
errors.add(PropertyUtil.getProperty("sample", "postalCode.invalid"));
}
String name = simplePOC.getFirstName();
if (name != null && name.length() > 0 && !InputValidationUtil.isRelaxedAlphabetic(name))
errors.add(PropertyUtil.getProperty("sample", "firstName.invalid"));
name = simplePOC.getLastName();
if (name != null && name.length() > 0 && !InputValidationUtil.isRelaxedAlphabetic(name))
errors.add(PropertyUtil.getProperty("sample", "lastName.invalid"));
name = simplePOC.getMiddleInitial();
if (name != null && name.length() > 0 && !InputValidationUtil.isRelaxedAlphabetic(name))
errors.add(PropertyUtil.getProperty("sample", "middleInitial.invalid"));
String phone = simplePOC.getPhoneNumber();
if ( phone.length() > 0 && !InputValidationUtil.isPhoneValid(phone))
errors.add(PropertyUtil.getProperty("sample", "phone.invalid"));
//
String email = simplePOC.getEmail();
EmailValidator emailValidator = EmailValidator.getInstance();
if (email != null && email.length() > 0 && !emailValidator.isValid(email))
errors.add("Email is invalid");
return errors;
}
protected String validateSampleName(String sampleName) {
return (!InputValidationUtil.isTextFieldWhiteList(sampleName)) ?
PropertyUtil.getProperty("sample", "cloningSample.name.invalid") : "";
}
protected PointOfContactBean resolveThePOCToSaveFromInput(SampleBean sample, SimplePointOfContactBean simplePOC, String createdBy) {
// PointOfContactBean newPOC = new PointOfContactBean();
// return getPointOfContactBeanFromInput(newPOC, simplePOC, createdBy);
PointOfContactBean newPOC = new PointOfContactBean();
PointOfContactBean primary = sample.getPrimaryPOCBean();
List<PointOfContactBean> others = sample.getOtherPOCBeans();
long pocId = simplePOC.getId();
if (primary == null || pocId == 0) { //new sample for submission or adding new POC
return getPointOfContactBeanFromInput(newPOC, simplePOC, createdBy);
}
if (primary.getDomain().getId().longValue() == simplePOC.getId()) {
newPOC.getDomain().setCreatedBy(primary.getDomain().getCreatedBy());
return getPointOfContactBeanFromInput(newPOC, simplePOC, createdBy);
}
if (others != null) {
for (PointOfContactBean poc : others) {
if (pocId == poc.getDomain().getId().longValue()) {
newPOC.getDomain().setCreatedBy(poc.getDomain().getCreatedBy());
return getPointOfContactBeanFromInput(newPOC, simplePOC, createdBy);
}
}
}
return null;
}
protected PointOfContactBean getPointOfContactBeanFromInput(PointOfContactBean pocBean, SimplePointOfContactBean simplePOC, String createdBy)
{
pocBean.setupDomain(createdBy);
Organization org = pocBean.getDomain().getOrganization();
if (org == null)
org = new Organization();
SimpleOrganizationBean simpleOrg = simplePOC.getOrganization();
org.setName(simpleOrg.getName());
if (simpleOrg.getId() > 0)
org.setId(simpleOrg.getId());
SimpleAddressBean addrBean = simplePOC.getAddress();
if (addrBean == null) {
addrBean = new SimpleAddressBean();
}
org.setCity(addrBean.getCity());
org.setCountry(addrBean.getCountry());
org.setPostalCode(addrBean.getZip());
org.setStreetAddress1(addrBean.getLine1());
org.setStreetAddress2(addrBean.getLine2());
org.setState(addrBean.getStateProvince());
pocBean.getDomain().setOrganization(org);
pocBean.getDomain().setRole(simplePOC.getRole());
if (simplePOC.getId() > 0)
pocBean.getDomain().setId(simplePOC.getId());
//pocBean.setupDomain(createdBy);
pocBean.getDomain().setFirstName(simplePOC.getFirstName());
pocBean.getDomain().setLastName(simplePOC.getLastName());
pocBean.getDomain().setMiddleInitial(simplePOC.getMiddleInitial());
pocBean.getDomain().setPhone(simplePOC.getPhoneNumber());
pocBean.getDomain().setEmail(simplePOC.getEmail());
pocBean.setPrimaryStatus(simplePOC.isPrimaryContact());
return pocBean;
}
protected SampleBean findMatchSampleInSession(HttpServletRequest request, long sampleId)
{
SampleBean sampleBean = (SampleBean) request.getSession().getAttribute("theSample");
if (sampleBean == null) {
logger.error("No sample in session"); //should not happen
return null;
}
Long domainSampleId = sampleBean.getDomain().getId();
if (domainSampleId == null) {
if (sampleId == 0)
return sampleBean; // from a failed save, incomplete sampleBean
else {
logger.error("Sample in session doesn't seem to be valid");
return null;
}
}
if (sampleId != domainSampleId.longValue()) {
logger.error("The given sample id doesn't match the sample id in session");
return null;
}
return sampleBean;
}
protected SampleEditGeneralBean wrapErrorInEditBean(String error)
{
SampleEditGeneralBean simpleBean = new SampleEditGeneralBean();
simpleBean.getErrors().add(error);
return simpleBean;
}
protected SampleEditGeneralBean wrapErrorsInEditBean(List<String> errors)
{
SampleEditGeneralBean simpleBean = new SampleEditGeneralBean();
simpleBean.setErrors(errors);
return simpleBean;
}
protected SampleEditGeneralBean wrapErrorsInEditBean(List<String> errors, String errorType)
{
SampleEditGeneralBean simpleBean = new SampleEditGeneralBean();
simpleBean.setErrors(errors);
simpleBean.setErrorType(errorType);
return simpleBean;
}
public List<String> getMatchedSampleNames(HttpServletRequest request, String searchStr) throws Exception
{
String[] nameArray = new String[] { "" };
try {
List<String> names = sampleService.findSampleNamesBy(searchStr);
Collections.sort(names, new Comparators.SortableNameComparator());
if (!names.isEmpty()) {
nameArray = names.toArray(new String[names.size()]);
}
} catch (Exception e) {
logger.error("Problem getting matched sample names", e);
}
List<String> names = new ArrayList(Arrays.asList(nameArray));
return names;
}
@Override
public String submitForReview(HttpServletRequest request, DataReviewStatusBean dataReviewStatusBean) throws Exception
{
String message = super.submitForReview(request, dataReviewStatusBean);
return (message.equals("success")) ? PropertyUtil.getProperty("sample", "message.submitReview")
: "Error while submitting your sample for review";
}
public String getCurrentSampleNameInSession(HttpServletRequest request, String sampleId) throws Exception
{
if (sampleId == null)
throw new Exception("Input sample id is null");
SampleBean sampleBean = (SampleBean) request.getSession().getAttribute("theSample");
if (sampleBean == null)
throw new Exception("No sample in session matching sample id: " + sampleId);
Long id = sampleBean.getDomain().getId();
if (id == null)
throw new Exception("Sample in session has null id");
if (Long.parseLong(sampleId) != id.longValue())
throw new Exception("Sample in session doesn't match input sample id");
return sampleBean.getDomain().getName();
}
public boolean isSampleEditableByCurrentUser(HttpServletRequest request, String sampleId) throws Exception
{
if (!SpringSecurityUtil.isUserLoggedIn())
return false;
if (sampleId == null || sampleId.length() == 0)
return false;
boolean isEditable = springSecurityAclService.currentUserHasWritePermission(Long.valueOf(sampleId), SecureClassesEnum.SAMPLE.getClazz());
return isEditable;
}
public String deleteSampleById(String sampleId, HttpServletRequest request) throws Exception
{
SampleBean sampleBean = sampleService.findSampleById(sampleId, true);
if (sampleBean == null)
return "Error: unable to find a valid sample in session with id . Sample deletion failed";
String sampleName = sampleBean.getDomain().getName();
// remove all access associated with sample takes too long. Set up the
// delete job in scheduler
//InitSampleSetup.getInstance().updateCSMCleanupEntriesInContext(sampleBean.getDomain(), request, sampleService);
springSecurityAclService.deleteAccessObject(Long.parseLong(sampleId), SecureClassesEnum.SAMPLE.getClazz());
// update data review status to "DELETED"
updateReviewStatusTo(DataReviewStatusBean.DELETED_STATUS, request,
sampleBean.getDomain().getId().toString(), sampleBean.getDomain().getName(), "sample");
if (sampleBean.getHasDataAvailability()) {
dataAvailabilityServiceDAO.deleteDataAvailability(sampleBean.getDomain().getId().toString());
}
sampleService.deleteSample(sampleBean.getDomain().getName());
request.getSession().removeAttribute("theSample");
String msg = PropertyUtil.getPropertyReplacingToken("sample", "message.deleteSample", "0", sampleName);
return msg;
}
@Override
public CurationService getCurationServiceDAO() {
return this.curationServiceDAO;
}
@Override
public SampleService getSampleService() {
return this.sampleService;
}
@Override
public SpringSecurityAclService getSpringSecurityAclService() {
return springSecurityAclService;
}
@Override
public UserDetailsService getUserDetailsService() {
return userDetailsService;
}
}
| |
/*
* Copyright (c) 2011-2015, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.core.encoding;
import boofcv.struct.image.*;
/**
* Used to convert NV21 image format used in Android into BoofCV standard image types. NV21 is an encoding of a
* YUV image [1] (more specifically YUV 4:2:0) where Y is encoded in the first block and UV are interlaced together.
* The UV planes are at 1/2 resolution.
*
* @author Peter Abeles
*/
public class ConvertNV21 {
/**
* Converts a NV21 encoded byte array into a BoofCV formatted image.
*
* @param data (input) NV21 byte array
* @param width (input) image width
* @param height (input) image height
* @param output (output) BoofCV image
*/
public static void nv21ToBoof(byte[] data, int width, int height, ImageBase output) {
if( output instanceof MultiSpectral) {
MultiSpectral ms = (MultiSpectral) output;
if (ms.getBandType() == ImageUInt8.class) {
ConvertNV21.nv21ToMsRgb_U8(data, width, height, ms);
} else if (ms.getBandType() == ImageFloat32.class) {
ConvertNV21.nv21ToMsRgb_F32(data, width, height , ms);
} else {
throw new IllegalArgumentException("Unsupported output band format");
}
} else if( output instanceof ImageSingleBand) {
if (output.getClass() == ImageUInt8.class) {
nv21ToGray(data, width, height, (ImageUInt8) output);
} else if (output.getClass() == ImageFloat32.class) {
nv21ToGray(data, width, height, (ImageFloat32) output);
} else {
throw new IllegalArgumentException("Unsupported output type");
}
} else if( output instanceof ImageInterleaved ) {
if( output.getClass() == InterleavedU8.class ) {
ConvertNV21.nv21ToInterleaved(data, width, height, (InterleavedU8) output);
} else if( output.getClass() == InterleavedF32.class ) {
ConvertNV21.nv21ToInterleaved(data, width, height, (InterleavedF32) output);
} else {
throw new IllegalArgumentException("Unsupported output type");
}
} else {
throw new IllegalArgumentException("Boofcv image type not yet supported");
}
}
/**
* Converts an NV21 image into a gray scale image. Image type is determined at runtime.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
* @param outputType Output: Type of output image
* @param <T> Output image type
* @return Gray scale image
*/
public static <T extends ImageSingleBand>
T nv21ToGray( byte[] data , int width , int height ,
T output , Class<T> outputType ) {
if( outputType == ImageUInt8.class ) {
return (T)nv21ToGray(data,width,height,(ImageUInt8)output);
} else if( outputType == ImageFloat32.class ) {
return (T)nv21ToGray(data,width,height,(ImageFloat32)output);
} else {
throw new IllegalArgumentException("Unsupported BoofCV Image Type "+outputType.getSimpleName());
}
}
/**
* Converts an NV21 image into a gray scale U8 image.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
* @return Gray scale image
*/
public static ImageUInt8 nv21ToGray( byte[] data , int width , int height , ImageUInt8 output ) {
if( output != null ) {
if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
} else {
output = new ImageUInt8(width,height);
}
ImplConvertNV21.nv21ToGray(data, output);
return output;
}
/**
* Converts an NV21 image into a gray scale F32 image.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
* @return Gray scale image
*/
public static ImageFloat32 nv21ToGray( byte[] data , int width , int height , ImageFloat32 output ) {
if( output != null ) {
if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
} else {
output = new ImageFloat32(width,height);
}
ImplConvertNV21.nv21ToGray(data, output);
return output;
}
/**
* Converts an NV21 image into a {@link MultiSpectral} YUV image.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
* @param outputType Output: Type of output image
* @param <T> Output image type
*/
public static <T extends ImageSingleBand>
MultiSpectral<T> nv21ToMsYuv( byte[] data , int width , int height ,
MultiSpectral<T> output , Class<T> outputType ) {
if( outputType == ImageUInt8.class ) {
return (MultiSpectral)nv21ToMsYuv_U8(data,width,height,(MultiSpectral)output);
} else if( outputType == ImageFloat32.class ) {
return (MultiSpectral)nv21ToMsYuv_F32(data,width,height,(MultiSpectral)output);
} else {
throw new IllegalArgumentException("Unsupported BoofCV Image Type "+outputType.getSimpleName());
}
}
/**
* Converts an NV21 image into a {@link MultiSpectral} YUV image with U8 bands.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
*/
public static MultiSpectral<ImageUInt8> nv21ToMsYuv_U8( byte[] data , int width , int height ,
MultiSpectral<ImageUInt8> output ) {
if( output == null ) {
output = new MultiSpectral<ImageUInt8>(ImageUInt8.class,width,height,3);
} else if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
else if( output.getNumBands() != 3 )
throw new IllegalArgumentException("three bands expected");
ImplConvertNV21.nv21ToMultiYuv_U8(data,output);
return output;
}
/**
* Converts an NV21 image into a {@link MultiSpectral} RGB image with U8 bands.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
*/
public static MultiSpectral<ImageUInt8> nv21ToMsRgb_U8( byte[] data , int width , int height ,
MultiSpectral<ImageUInt8> output ) {
if( output == null ) {
output = new MultiSpectral<ImageUInt8>(ImageUInt8.class,width,height,3);
} else if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
else if( output.getNumBands() != 3 )
throw new IllegalArgumentException("three bands expected");
ImplConvertNV21.nv21ToMultiRgb_U8(data, output);
return output;
}
/**
* Converts an NV21 image into a {@link InterleavedU8} RGB image.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
*/
public static InterleavedU8 nv21ToInterleaved( byte[] data , int width , int height ,
InterleavedU8 output ) {
if( output == null ) {
output = new InterleavedU8(width,height,3);
} else if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
else if( output.getNumBands() != 3 )
throw new IllegalArgumentException("three bands expected");
ImplConvertNV21.nv21ToInterleaved_U8(data, output);
return output;
}
/**
* Converts an NV21 image into a {@link MultiSpectral} YUV image with F32 bands.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
*/
public static MultiSpectral<ImageFloat32> nv21ToMsYuv_F32( byte[] data , int width , int height ,
MultiSpectral<ImageFloat32> output ) {
if( output == null ) {
output = new MultiSpectral<ImageFloat32>(ImageFloat32.class,width,height,3);
} else if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
else if( output.getNumBands() != 3 )
throw new IllegalArgumentException("three bands expected");
ImplConvertNV21.nv21ToMultiYuv_F32(data, output);
return output;
}
/**
* Converts an NV21 image into a {@link MultiSpectral} RGB image with F32 bands.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
*/
public static MultiSpectral<ImageFloat32> nv21ToMsRgb_F32( byte[] data , int width , int height ,
MultiSpectral<ImageFloat32> output ) {
if( output == null ) {
output = new MultiSpectral<ImageFloat32>(ImageFloat32.class,width,height,3);
} else if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
else if( output.getNumBands() != 3 )
throw new IllegalArgumentException("three bands expected");
ImplConvertNV21.nv21ToMultiRgb_F32(data, output);
return output;
}
/**
* Converts an NV21 image into a {@link InterleavedF32} RGB image.
*
* @param data Input: NV21 image data
* @param width Input: NV21 image width
* @param height Input: NV21 image height
* @param output Output: Optional storage for output image. Can be null.
*/
public static InterleavedF32 nv21ToInterleaved( byte[] data , int width , int height ,
InterleavedF32 output ) {
if( output == null ) {
output = new InterleavedF32(width,height,3);
} else if( output.width != width || output.height != height )
throw new IllegalArgumentException("output width and height must be "+width+" "+height);
else if( output.getNumBands() != 3 )
throw new IllegalArgumentException("three bands expected");
ImplConvertNV21.nv21ToInterleaved_F32(data, output);
return output;
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.contextualsearch;
import android.util.Pair;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.chrome.browser.compositor.bottombar.OverlayPanel.PanelState;
import org.chromium.chrome.browser.compositor.bottombar.OverlayPanel.StateChangeReason;
import org.chromium.chrome.browser.preferences.PrefServiceBridge;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Centralizes UMA data collection for Contextual Search. All calls must be made from the UI thread.
*/
public class ContextualSearchUma {
// Constants to use for the original selection gesture
private static final boolean LONG_PRESS = false;
private static final boolean TAP = true;
// Constants used to log UMA "enum" histograms about the Contextual Search's preference state.
private static final int PREFERENCE_UNINITIALIZED = 0;
private static final int PREFERENCE_ENABLED = 1;
private static final int PREFERENCE_DISABLED = 2;
private static final int PREFERENCE_HISTOGRAM_BOUNDARY = 3;
// Constants used to log UMA "enum" histograms about whether search results were seen.
private static final int RESULTS_SEEN = 0;
private static final int RESULTS_NOT_SEEN = 1;
private static final int RESULTS_SEEN_BOUNDARY = 2;
// Constants used to log UMA "enum" histograms about whether the selection is valid.
private static final int SELECTION_VALID = 0;
private static final int SELECTION_INVALID = 1;
private static final int SELECTION_BOUNDARY = 2;
// Constants used to log UMA "enum" histograms about a request's outcome.
private static final int REQUEST_NOT_FAILED = 0;
private static final int REQUEST_FAILED = 1;
private static final int REQUEST_BOUNDARY = 2;
// Constants used to log UMA "enum" histograms about the panel's state transitions.
// Entry code: first entry into CLOSED.
private static final int ENTER_CLOSED_FROM_OTHER = 0;
private static final int ENTER_CLOSED_FROM_PEEKED_BACK_PRESS = 1;
private static final int ENTER_CLOSED_FROM_PEEKED_BASE_PAGE_SCROLL = 2;
private static final int ENTER_CLOSED_FROM_PEEKED_TEXT_SELECT_TAP = 3;
private static final int ENTER_CLOSED_FROM_EXPANDED_BACK_PRESS = 4;
private static final int ENTER_CLOSED_FROM_EXPANDED_BASE_PAGE_TAP = 5;
private static final int ENTER_CLOSED_FROM_EXPANDED_FLING = 6;
private static final int ENTER_CLOSED_FROM_MAXIMIZED_BACK_PRESS = 7;
private static final int ENTER_CLOSED_FROM_MAXIMIZED_FLING = 8;
private static final int ENTER_CLOSED_FROM_MAXIMIZED_TAB_PROMOTION = 9;
private static final int ENTER_CLOSED_FROM_MAXIMIZED_SERP_NAVIGATION = 10;
private static final int ENTER_CLOSED_FROM_BOUNDARY = 11;
// Entry code: first entry into PEEKED.
private static final int ENTER_PEEKED_FROM_OTHER = 0;
private static final int ENTER_PEEKED_FROM_CLOSED_TEXT_SELECT_TAP = 1;
private static final int ENTER_PEEKED_FROM_CLOSED_EXT_SELECT_LONG_PRESS = 2;
private static final int ENTER_PEEKED_FROM_PEEKED_TEXT_SELECT_TAP = 3;
private static final int ENTER_PEEKED_FROM_PEEKED_TEXT_SELECT_LONG_PRESS = 4;
private static final int ENTER_PEEKED_FROM_EXPANDED_SEARCH_BAR_TAP = 5;
private static final int ENTER_PEEKED_FROM_EXPANDED_SWIPE = 6;
private static final int ENTER_PEEKED_FROM_EXPANDED_FLING = 7;
private static final int ENTER_PEEKED_FROM_MAXIMIZED_SWIPE = 8;
private static final int ENTER_PEEKED_FROM_MAXIMIZED_FLING = 9;
private static final int ENTER_PEEKED_FROM_BOUNDARY = 10;
// Entry code: first entry into EXPANDED.
private static final int ENTER_EXPANDED_FROM_OTHER = 0;
private static final int ENTER_EXPANDED_FROM_PEEKED_SEARCH_BAR_TAP = 1;
private static final int ENTER_EXPANDED_FROM_PEEKED_SWIPE = 2;
private static final int ENTER_EXPANDED_FROM_PEEKED_FLING = 3;
private static final int ENTER_EXPANDED_FROM_MAXIMIZED_SWIPE = 4;
private static final int ENTER_EXPANDED_FROM_MAXIMIZED_FLING = 5;
private static final int ENTER_EXPANDED_FROM_BOUNDARY = 6;
// Entry code: first entry into MAXIMIZED.
private static final int ENTER_MAXIMIZED_FROM_OTHER = 0;
private static final int ENTER_MAXIMIZED_FROM_PEEKED_SWIPE = 1;
private static final int ENTER_MAXIMIZED_FROM_PEEKED_FLING = 2;
private static final int ENTER_MAXIMIZED_FROM_EXPANDED_SWIPE = 3;
private static final int ENTER_MAXIMIZED_FROM_EXPANDED_FLING = 4;
private static final int ENTER_MAXIMIZED_FROM_EXPANDED_SERP_NAVIGATION = 5;
private static final int ENTER_MAXIMIZED_FROM_BOUNDARY = 6;
// Exit code: first exit from CLOSED (or UNDEFINED).
private static final int EXIT_CLOSED_TO_OTHER = 0;
private static final int EXIT_CLOSED_TO_PEEKED_TEXT_SELECT_TAP = 1;
private static final int EXIT_CLOSED_TO_PEEKED_TEXT_SELECT_LONG_PRESS = 2;
private static final int EXIT_CLOSED_TO_BOUNDARY = 3;
// Exit code: first exit from PEEKED.
private static final int EXIT_PEEKED_TO_OTHER = 0;
private static final int EXIT_PEEKED_TO_CLOSED_BACK_PRESS = 1;
private static final int EXIT_PEEKED_TO_CLOSED_BASE_PAGE_SCROLL = 2;
private static final int EXIT_PEEKED_TO_CLOSED_TEXT_SELECT_TAP = 3;
private static final int EXIT_PEEKED_TO_PEEKED_TEXT_SELECT_TAP = 4;
private static final int EXIT_PEEKED_TO_PEEKED_TEXT_SELECT_LONG_PRESS = 5;
private static final int EXIT_PEEKED_TO_EXPANDED_SEARCH_BAR_TAP = 6;
private static final int EXIT_PEEKED_TO_EXPANDED_SWIPE = 7;
private static final int EXIT_PEEKED_TO_EXPANDED_FLING = 8;
private static final int EXIT_PEEKED_TO_MAXIMIZED_SWIPE = 9;
private static final int EXIT_PEEKED_TO_MAXIMIZED_FLING = 10;
private static final int EXIT_PEEKED_TO_BOUNDARY = 11;
// Exit code: first exit from EXPANDED.
private static final int EXIT_EXPANDED_TO_OTHER = 0;
private static final int EXIT_EXPANDED_TO_CLOSED_BACK_PRESS = 1;
private static final int EXIT_EXPANDED_TO_CLOSED_BASE_PAGE_TAP = 2;
private static final int EXIT_EXPANDED_TO_CLOSED_FLING = 3;
private static final int EXIT_EXPANDED_TO_PEEKED_SEARCH_BAR_TAP = 4;
private static final int EXIT_EXPANDED_TO_PEEKED_SWIPE = 5;
private static final int EXIT_EXPANDED_TO_PEEKED_FLING = 6;
private static final int EXIT_EXPANDED_TO_MAXIMIZED_SWIPE = 7;
private static final int EXIT_EXPANDED_TO_MAXIMIZED_FLING = 8;
private static final int EXIT_EXPANDED_TO_MAXIMIZED_SERP_NAVIGATION = 9;
private static final int EXIT_EXPANDED_TO_BOUNDARY = 10;
// Exit code: first exit from MAXIMIZED.
private static final int EXIT_MAXIMIZED_TO_OTHER = 0;
private static final int EXIT_MAXIMIZED_TO_CLOSED_BACK_PRESS = 1;
private static final int EXIT_MAXIMIZED_TO_CLOSED_FLING = 2;
private static final int EXIT_MAXIMIZED_TO_CLOSED_TAB_PROMOTION = 3;
private static final int EXIT_MAXIMIZED_TO_CLOSED_SERP_NAVIGATION = 4;
private static final int EXIT_MAXIMIZED_TO_PEEKED_SWIPE = 5;
private static final int EXIT_MAXIMIZED_TO_PEEKED_FLING = 6;
private static final int EXIT_MAXIMIZED_TO_EXPANDED_SWIPE = 7;
private static final int EXIT_MAXIMIZED_TO_EXPANDED_FLING = 8;
private static final int EXIT_MAXIMIZED_TO_BOUNDARY = 9;
// Constants used to log UMA "enum" histograms with details about whether search results
// were seen, and what the original triggering gesture was.
private static final int RESULTS_SEEN_FROM_TAP = 0;
private static final int RESULTS_NOT_SEEN_FROM_TAP = 1;
private static final int RESULTS_SEEN_FROM_LONG_PRESS = 2;
private static final int RESULTS_NOT_SEEN_FROM_LONG_PRESS = 3;
private static final int RESULTS_BY_GESTURE_BOUNDARY = 4;
// Constants used to log UMA "enum" histograms with details about the Peek Promo Outcome.
private static final int PEEK_PROMO_OUTCOME_SEEN_OPENED = 0;
private static final int PEEK_PROMO_OUTCOME_SEEN_NOT_OPENED = 1;
private static final int PEEK_PROMO_OUTCOME_NOT_SEEN_OPENED = 2;
private static final int PEEK_PROMO_OUTCOME_NOT_SEEN_NOT_OPENED = 3;
private static final int PEEK_PROMO_OUTCOME_BOUNDARY = 4;
// Constants used to log UMA "enum" histograms with details about whether search results
// were seen, and what the original triggering gesture was.
private static final int PROMO_ENABLED_FROM_TAP = 0;
private static final int PROMO_DISABLED_FROM_TAP = 1;
private static final int PROMO_UNDECIDED_FROM_TAP = 2;
private static final int PROMO_ENABLED_FROM_LONG_PRESS = 3;
private static final int PROMO_DISABLED_FROM_LONG_PRESS = 4;
private static final int PROMO_UNDECIDED_FROM_LONG_PRESS = 5;
private static final int PROMO_BY_GESTURE_BOUNDARY = 6;
// Constants used to log UMA "enum" histograms with summary counts for SERP loading times.
private static final int PREFETCHED_PARIALLY_LOADED = 0;
private static final int PREFETCHED_FULLY_LOADED = 1;
private static final int NOT_PREFETCHED = 2;
private static final int PREFETCH_BOUNDARY = 3;
// Constants used to log UMA "enum" histograms for HTTP / HTTPS.
private static final int PROTOCOL_IS_HTTP = 0;
private static final int PROTOCOL_NOT_HTTP = 1;
private static final int PROTOCOL_BOUNDARY = 2;
// Constants used to log UMA "enum" histograms for single / multi-word.
private static final int RESOLVED_SINGLE_WORD = 0;
private static final int RESOLVED_MULTI_WORD = 1;
private static final int RESOLVED_BOUNDARY = 2;
// Constants used to log UMA "enum" histograms for partially / fully loaded.
private static final int PARTIALLY_LOADED = 0;
private static final int FULLY_LOADED = 1;
private static final int LOADED_BOUNDARY = 2;
// Constants used to log UMA "enum" histograms for triggering the Translate Onebox.
private static final int DID_FORCE_TRANSLATE = 0;
private static final int WOULD_FORCE_TRANSLATE = 1;
private static final int FORCE_TRANSLATE_BOUNDARY = 2;
// Constants used to log UMA "enum" histograms with details about whether the search
// provider sprite icon was animated, whether search results were seen and the triggering
// gesture. All new values should be inserted right before ICON_SPRITE_BOUNDARY.
private static final int ICON_SPRITE_ANIMATED_RESULTS_SEEN_FROM_TAP = 0;
private static final int ICON_SPRITE_ANIMATED_RESULTS_NOT_SEEN_FROM_TAP = 1;
private static final int ICON_SPRITE_NOT_ANIMATED_RESULTS_SEEN_FROM_TAP = 2;
private static final int ICON_SPRITE_NOT_ANIMATED_RESULTS_NOT_SEEN_FROM_TAP = 3;
private static final int ICON_SPRITE_ANIMATED_RESULTS_SEEN_FROM_LONG_PRESS = 4;
private static final int ICON_SPRITE_ANIMATED_RESULTS_NOT_SEEN_FROM_LONG_PRESS = 5;
private static final int ICON_SPRITE_NOT_ANIMATED_RESULTS_SEEN_FROM_LONG_PRESS = 6;
private static final int ICON_SPRITE_NOT_ANIMATED_RESULTS_NOT_SEEN_FROM_LONG_PRESS = 7;
private static final int ICON_SPRITE_BOUNDARY = 8;
/**
* Key used in maps from {state, reason} to state entry (exit) logging code.
*/
static class StateChangeKey {
final PanelState mState;
final StateChangeReason mReason;
final int mHashCode;
StateChangeKey(PanelState state, StateChangeReason reason) {
mState = state;
mReason = reason;
mHashCode = 31 * state.hashCode() + reason.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof StateChangeKey)) {
return false;
}
if (obj == this) {
return true;
}
StateChangeKey other = (StateChangeKey) obj;
return mState.equals(other.mState) && mReason.equals(other.mReason);
}
@Override
public int hashCode() {
return mHashCode;
}
}
static class IconSpriteAnimationKey {
final boolean mWasIconSpriteAnimated;
final boolean mWasPanelSeen;
final boolean mWasTap;
final int mHashCode;
IconSpriteAnimationKey(boolean wasIconSpriteAnimated, boolean wasPanelSeen,
boolean wasTap) {
mWasIconSpriteAnimated = wasIconSpriteAnimated;
mWasPanelSeen = wasPanelSeen;
mWasTap = wasTap;
// HashCode logic generated by Eclipse.
final int prime = 31;
int result = 1;
result = prime * result + (mWasIconSpriteAnimated ? 1231 : 1237);
result = prime * result + (mWasPanelSeen ? 1231 : 1237);
result = prime * result + (mWasTap ? 1231 : 1237);
mHashCode = result;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof IconSpriteAnimationKey)) {
return false;
}
if (obj == this) {
return true;
}
IconSpriteAnimationKey other = (IconSpriteAnimationKey) obj;
return other.mWasIconSpriteAnimated == mWasIconSpriteAnimated
&& other.mWasPanelSeen == mWasPanelSeen
&& other.mWasTap == mWasTap;
}
@Override
public int hashCode() {
return mHashCode;
}
}
// TODO(donnd): switch from using Maps to some method that does not require creation of a key.
// Entry code map: first entry into CLOSED.
private static final Map<StateChangeKey, Integer> ENTER_CLOSED_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.BACK_PRESS),
ENTER_CLOSED_FROM_PEEKED_BACK_PRESS);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.BASE_PAGE_SCROLL),
ENTER_CLOSED_FROM_PEEKED_BASE_PAGE_SCROLL);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.TEXT_SELECT_TAP),
ENTER_CLOSED_FROM_PEEKED_TEXT_SELECT_TAP);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.BACK_PRESS),
ENTER_CLOSED_FROM_EXPANDED_BACK_PRESS);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.BASE_PAGE_TAP),
ENTER_CLOSED_FROM_EXPANDED_BASE_PAGE_TAP);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.FLING),
ENTER_CLOSED_FROM_EXPANDED_FLING);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.BACK_PRESS),
ENTER_CLOSED_FROM_MAXIMIZED_BACK_PRESS);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.FLING),
ENTER_CLOSED_FROM_MAXIMIZED_FLING);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.TAB_PROMOTION),
ENTER_CLOSED_FROM_MAXIMIZED_TAB_PROMOTION);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.SERP_NAVIGATION),
ENTER_CLOSED_FROM_MAXIMIZED_SERP_NAVIGATION);
ENTER_CLOSED_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// Entry code map: first entry into PEEKED.
private static final Map<StateChangeKey, Integer> ENTER_PEEKED_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
// Note: we don't distinguish entering PEEKED from UNDEFINED / CLOSED.
codes.put(new StateChangeKey(PanelState.UNDEFINED, StateChangeReason.TEXT_SELECT_TAP),
ENTER_PEEKED_FROM_CLOSED_TEXT_SELECT_TAP);
codes.put(new StateChangeKey(PanelState.UNDEFINED,
StateChangeReason.TEXT_SELECT_LONG_PRESS),
ENTER_PEEKED_FROM_CLOSED_EXT_SELECT_LONG_PRESS);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.TEXT_SELECT_TAP),
ENTER_PEEKED_FROM_CLOSED_TEXT_SELECT_TAP);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.TEXT_SELECT_LONG_PRESS),
ENTER_PEEKED_FROM_CLOSED_EXT_SELECT_LONG_PRESS);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.TEXT_SELECT_TAP),
ENTER_PEEKED_FROM_PEEKED_TEXT_SELECT_TAP);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.TEXT_SELECT_LONG_PRESS),
ENTER_PEEKED_FROM_PEEKED_TEXT_SELECT_LONG_PRESS);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.SEARCH_BAR_TAP),
ENTER_PEEKED_FROM_EXPANDED_SEARCH_BAR_TAP);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.SWIPE),
ENTER_PEEKED_FROM_EXPANDED_SWIPE);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.FLING),
ENTER_PEEKED_FROM_EXPANDED_FLING);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.SWIPE),
ENTER_PEEKED_FROM_MAXIMIZED_SWIPE);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.FLING),
ENTER_PEEKED_FROM_MAXIMIZED_FLING);
ENTER_PEEKED_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// Entry code map: first entry into EXPANDED.
private static final Map<StateChangeKey, Integer> ENTER_EXPANDED_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.SEARCH_BAR_TAP),
ENTER_EXPANDED_FROM_PEEKED_SEARCH_BAR_TAP);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.SWIPE),
ENTER_EXPANDED_FROM_PEEKED_SWIPE);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.FLING),
ENTER_EXPANDED_FROM_PEEKED_FLING);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.SWIPE),
ENTER_EXPANDED_FROM_MAXIMIZED_SWIPE);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.FLING),
ENTER_EXPANDED_FROM_MAXIMIZED_FLING);
ENTER_EXPANDED_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// Entry code map: first entry into MAXIMIZED.
private static final Map<StateChangeKey, Integer> ENTER_MAXIMIZED_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.SWIPE),
ENTER_MAXIMIZED_FROM_PEEKED_SWIPE);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.FLING),
ENTER_MAXIMIZED_FROM_PEEKED_FLING);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.SWIPE),
ENTER_MAXIMIZED_FROM_EXPANDED_SWIPE);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.FLING),
ENTER_MAXIMIZED_FROM_EXPANDED_FLING);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.SERP_NAVIGATION),
ENTER_MAXIMIZED_FROM_EXPANDED_SERP_NAVIGATION);
ENTER_MAXIMIZED_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// Exit code map: first exit from CLOSED.
private static final Map<StateChangeKey, Integer> EXIT_CLOSED_TO_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.TEXT_SELECT_TAP),
EXIT_CLOSED_TO_PEEKED_TEXT_SELECT_TAP);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.TEXT_SELECT_LONG_PRESS),
EXIT_CLOSED_TO_PEEKED_TEXT_SELECT_LONG_PRESS);
EXIT_CLOSED_TO_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// Exit code map: first exit from PEEKED.
private static final Map<StateChangeKey, Integer> EXIT_PEEKED_TO_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.BACK_PRESS),
EXIT_PEEKED_TO_CLOSED_BACK_PRESS);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.BASE_PAGE_SCROLL),
EXIT_PEEKED_TO_CLOSED_BASE_PAGE_SCROLL);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.BASE_PAGE_TAP),
EXIT_PEEKED_TO_CLOSED_TEXT_SELECT_TAP);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.TEXT_SELECT_TAP),
EXIT_PEEKED_TO_PEEKED_TEXT_SELECT_TAP);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.TEXT_SELECT_LONG_PRESS),
EXIT_PEEKED_TO_PEEKED_TEXT_SELECT_LONG_PRESS);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.SEARCH_BAR_TAP),
EXIT_PEEKED_TO_EXPANDED_SEARCH_BAR_TAP);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.SWIPE),
EXIT_PEEKED_TO_EXPANDED_SWIPE);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.FLING),
EXIT_PEEKED_TO_EXPANDED_FLING);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.SWIPE),
EXIT_PEEKED_TO_MAXIMIZED_SWIPE);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.FLING),
EXIT_PEEKED_TO_MAXIMIZED_FLING);
EXIT_PEEKED_TO_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// Exit code map: first exit from EXPANDED.
private static final Map<StateChangeKey, Integer> EXIT_EXPANDED_TO_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.BACK_PRESS),
EXIT_EXPANDED_TO_CLOSED_BACK_PRESS);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.BASE_PAGE_TAP),
EXIT_EXPANDED_TO_CLOSED_BASE_PAGE_TAP);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.FLING),
EXIT_EXPANDED_TO_CLOSED_FLING);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.SEARCH_BAR_TAP),
EXIT_EXPANDED_TO_PEEKED_SEARCH_BAR_TAP);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.SWIPE),
EXIT_EXPANDED_TO_PEEKED_SWIPE);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.FLING),
EXIT_EXPANDED_TO_PEEKED_FLING);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.SWIPE),
EXIT_EXPANDED_TO_MAXIMIZED_SWIPE);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.FLING),
EXIT_EXPANDED_TO_MAXIMIZED_FLING);
codes.put(new StateChangeKey(PanelState.MAXIMIZED, StateChangeReason.SERP_NAVIGATION),
EXIT_EXPANDED_TO_MAXIMIZED_SERP_NAVIGATION);
EXIT_EXPANDED_TO_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// Exit code map: first exit from MAXIMIZED.
private static final Map<StateChangeKey, Integer> EXIT_MAXIMIZED_TO_STATE_CHANGE_CODES;
static {
Map<StateChangeKey, Integer> codes = new HashMap<StateChangeKey, Integer>();
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.BACK_PRESS),
EXIT_MAXIMIZED_TO_CLOSED_BACK_PRESS);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.FLING),
EXIT_MAXIMIZED_TO_CLOSED_FLING);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.TAB_PROMOTION),
EXIT_MAXIMIZED_TO_CLOSED_TAB_PROMOTION);
codes.put(new StateChangeKey(PanelState.CLOSED, StateChangeReason.SERP_NAVIGATION),
EXIT_MAXIMIZED_TO_CLOSED_SERP_NAVIGATION);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.SWIPE),
EXIT_MAXIMIZED_TO_PEEKED_SWIPE);
codes.put(new StateChangeKey(PanelState.PEEKED, StateChangeReason.FLING),
EXIT_MAXIMIZED_TO_PEEKED_FLING);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.SWIPE),
EXIT_MAXIMIZED_TO_EXPANDED_SWIPE);
codes.put(new StateChangeKey(PanelState.EXPANDED, StateChangeReason.FLING),
EXIT_MAXIMIZED_TO_EXPANDED_FLING);
EXIT_MAXIMIZED_TO_STATE_CHANGE_CODES = Collections.unmodifiableMap(codes);
}
// "Seen by gesture" code map: logged on first exit from expanded panel, or promo,
// broken down by gesture.
private static final Map<Pair<Boolean, Boolean>, Integer> SEEN_BY_GESTURE_CODES;
static {
final boolean unseen = false;
final boolean seen = true;
Map<Pair<Boolean, Boolean>, Integer> codes = new HashMap<Pair<Boolean, Boolean>, Integer>();
codes.put(new Pair<Boolean, Boolean>(seen, TAP), RESULTS_SEEN_FROM_TAP);
codes.put(new Pair<Boolean, Boolean>(unseen, TAP), RESULTS_NOT_SEEN_FROM_TAP);
codes.put(new Pair<Boolean, Boolean>(seen, LONG_PRESS), RESULTS_SEEN_FROM_LONG_PRESS);
codes.put(new Pair<Boolean, Boolean>(unseen, LONG_PRESS), RESULTS_NOT_SEEN_FROM_LONG_PRESS);
SEEN_BY_GESTURE_CODES = Collections.unmodifiableMap(codes);
}
// "Promo outcome by gesture" code map: logged on exit from promo, broken down by gesture.
private static final Map<Pair<Integer, Boolean>, Integer> PROMO_BY_GESTURE_CODES;
static {
Map<Pair<Integer, Boolean>, Integer> codes =
new HashMap<Pair<Integer, Boolean>, Integer>();
codes.put(new Pair<Integer, Boolean>(PREFERENCE_ENABLED, TAP), PROMO_ENABLED_FROM_TAP);
codes.put(new Pair<Integer, Boolean>(PREFERENCE_DISABLED, TAP), PROMO_DISABLED_FROM_TAP);
codes.put(new Pair<Integer, Boolean>(PREFERENCE_UNINITIALIZED, TAP),
PROMO_UNDECIDED_FROM_TAP);
codes.put(new Pair<Integer, Boolean>(PREFERENCE_ENABLED, LONG_PRESS),
PROMO_ENABLED_FROM_LONG_PRESS);
codes.put(new Pair<Integer, Boolean>(PREFERENCE_DISABLED, LONG_PRESS),
PROMO_DISABLED_FROM_LONG_PRESS);
codes.put(new Pair<Integer, Boolean>(PREFERENCE_UNINITIALIZED, LONG_PRESS),
PROMO_UNDECIDED_FROM_LONG_PRESS);
PROMO_BY_GESTURE_CODES = Collections.unmodifiableMap(codes);
}
// Icon sprite animation code mapped: logged when ending a contextual search.
private static final Map<IconSpriteAnimationKey, Integer> ICON_SPRITE_ANIMATION_CODES;
static {
Map<IconSpriteAnimationKey, Integer> codes = new HashMap<IconSpriteAnimationKey, Integer>();
codes.put(new IconSpriteAnimationKey(true, true, true),
ICON_SPRITE_ANIMATED_RESULTS_SEEN_FROM_TAP);
codes.put(new IconSpriteAnimationKey(true, false, true),
ICON_SPRITE_ANIMATED_RESULTS_NOT_SEEN_FROM_TAP);
codes.put(new IconSpriteAnimationKey(false, true, true),
ICON_SPRITE_NOT_ANIMATED_RESULTS_SEEN_FROM_TAP);
codes.put(new IconSpriteAnimationKey(false, false, true),
ICON_SPRITE_NOT_ANIMATED_RESULTS_NOT_SEEN_FROM_TAP);
codes.put(new IconSpriteAnimationKey(true, true, false),
ICON_SPRITE_ANIMATED_RESULTS_SEEN_FROM_LONG_PRESS);
codes.put(new IconSpriteAnimationKey(true, false, false),
ICON_SPRITE_ANIMATED_RESULTS_NOT_SEEN_FROM_LONG_PRESS);
codes.put(new IconSpriteAnimationKey(false, true, false),
ICON_SPRITE_NOT_ANIMATED_RESULTS_SEEN_FROM_LONG_PRESS);
codes.put(new IconSpriteAnimationKey(false, false, false),
ICON_SPRITE_NOT_ANIMATED_RESULTS_NOT_SEEN_FROM_LONG_PRESS);
ICON_SPRITE_ANIMATION_CODES = Collections.unmodifiableMap(codes);
}
/**
* Logs the state of the Contextual Search preference. This function should be called if the
* Contextual Search feature is active, and will track the different preference settings
* (disabled, enabled or uninitialized). Calling more than once is fine.
*/
public static void logPreferenceState() {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchPreferenceState",
getPreferenceValue(), PREFERENCE_HISTOGRAM_BOUNDARY);
}
/**
* Logs the given number of promo taps remaining. Should be called only for users that
* are still undecided.
* @param promoTapsRemaining The number of taps remaining (should not be negative).
*/
public static void logPromoTapsRemaining(int promoTapsRemaining) {
if (promoTapsRemaining >= 0) {
RecordHistogram.recordCountHistogram("Search.ContextualSearchPromoTapsRemaining",
promoTapsRemaining);
}
}
/**
* Logs the historic number of times that a Tap gesture triggered the peeking promo
* for users that have never opened the panel. This should be called periodically for
* undecided users only.
* @param promoTaps The historic number of taps that have caused the peeking bar for the promo,
* for users that have never opened the panel.
*/
public static void logPromoTapsForNeverOpened(int promoTaps) {
RecordHistogram.recordCountHistogram("Search.ContextualSearchPromoTapsForNeverOpened",
promoTaps);
}
/**
* Logs the historic number of times that a Tap gesture triggered the peeking promo before
* the user ever opened the panel. This should be called periodically for all users.
* @param promoTaps The historic number of taps that have caused the peeking bar for the promo
* before the first open of the panel, for all users that have ever opened the panel.
*/
public static void logPromoTapsBeforeFirstOpen(int promoTaps) {
RecordHistogram.recordCountHistogram("Search.ContextualSearchPromoTapsBeforeFirstOpen",
promoTaps);
}
/**
* Records the total count of times the promo panel has *ever* been opened. This should only
* be called when the user is still undecided.
* @param count The total historic count of times the panel has ever been opened for the
* current user.
*/
public static void logPromoOpenCount(int count) {
RecordHistogram.recordCountHistogram("Search.ContextualSearchPromoOpenCount", count);
}
/**
* Logs the number of taps that have been counted since the user last opened the panel, for
* undecided users.
* @param tapsSinceOpen The number of taps to log.
*/
public static void logTapsSinceOpenForUndecided(int tapsSinceOpen) {
RecordHistogram.recordCountHistogram("Search.ContextualSearchTapsSinceOpenUndecided",
tapsSinceOpen);
}
/**
* Logs the number of taps that have been counted since the user last opened the panel, for
* decided users.
* @param tapsSinceOpen The number of taps to log.
*/
public static void logTapsSinceOpenForDecided(int tapsSinceOpen) {
RecordHistogram.recordCountHistogram("Search.ContextualSearchTapsSinceOpenDecided",
tapsSinceOpen);
}
/**
* Logs whether the Search Term was single or multiword.
* @param isSingleWord Whether the resolved search term is a single word or not.
*/
public static void logSearchTermResolvedWords(boolean isSingleWord) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchResolvedTermWords",
isSingleWord ? RESOLVED_SINGLE_WORD : RESOLVED_MULTI_WORD, RESOLVED_BOUNDARY);
}
/**
* Logs whether the base page was using the HTTP protocol or not.
* @param isHttpBasePage Whether the base page was using the HTTP protocol or not (should
* be false for HTTPS or other URIs).
*/
public static void logBasePageProtocol(boolean isHttpBasePage) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchBasePageProtocol",
isHttpBasePage ? PROTOCOL_IS_HTTP : PROTOCOL_NOT_HTTP, PROTOCOL_BOUNDARY);
}
/**
* Logs changes to the Contextual Search preference, aside from those resulting from the first
* run flow.
* @param enabled Whether the preference is being enabled or disabled.
*/
public static void logPreferenceChange(boolean enabled) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchPreferenceStateChange",
enabled ? PREFERENCE_ENABLED : PREFERENCE_DISABLED, PREFERENCE_HISTOGRAM_BOUNDARY);
}
/**
* Logs the number of times the Peek Promo was seen.
* @param count Number of times the Peek Promo was seen.
* @param hasOpenedPanel Whether the Panel was opened.
*/
public static void logPeekPromoShowCount(int count, boolean hasOpenedPanel) {
RecordHistogram.recordCountHistogram("Search.ContextualSearchPeekPromoCount", count);
if (hasOpenedPanel) {
RecordHistogram.recordCountHistogram(
"Search.ContextualSearchPeekPromoCountUntilOpened", count);
}
}
/**
* Logs the Peek Promo Outcome.
* @param wasPromoSeen Whether the Peek Promo was seen.
* @param wouldHaveShownPromo Whether the Promo would have shown.
* @param hasOpenedPanel Whether the Panel was opened.
*/
public static void logPeekPromoOutcome(boolean wasPromoSeen, boolean wouldHaveShownPromo,
boolean hasOpenedPanel) {
int outcome = -1;
if (wasPromoSeen) {
outcome = hasOpenedPanel
? PEEK_PROMO_OUTCOME_SEEN_OPENED : PEEK_PROMO_OUTCOME_SEEN_NOT_OPENED;
} else if (wouldHaveShownPromo) {
outcome = hasOpenedPanel
? PEEK_PROMO_OUTCOME_NOT_SEEN_OPENED : PEEK_PROMO_OUTCOME_NOT_SEEN_NOT_OPENED;
}
if (outcome != -1) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchPeekPromoOutcome",
outcome, PEEK_PROMO_OUTCOME_BOUNDARY);
}
}
/**
* Logs the outcome of the promo (first run flow).
* Logs multiple histograms; with and without the originating gesture.
* @param wasTap Whether the gesture that originally caused the panel to show was a Tap.
*/
public static void logPromoOutcome(boolean wasTap) {
int preferenceCode = getPreferenceValue();
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchFirstRunFlowOutcome",
preferenceCode, PREFERENCE_HISTOGRAM_BOUNDARY);
int preferenceByGestureCode = getPromoByGestureStateCode(preferenceCode, wasTap);
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchPromoOutcomeByGesture",
preferenceByGestureCode, PROMO_BY_GESTURE_BOUNDARY);
}
/**
* Logs the duration of a Contextual Search panel being viewed by the user.
* @param wereResultsSeen Whether search results were seen.
* @param isChained Whether the Contextual Search ended with the start of another.
* @param durationMs The duration of the contextual search in milliseconds.
*/
public static void logDuration(boolean wereResultsSeen, boolean isChained, long durationMs) {
if (wereResultsSeen) {
RecordHistogram.recordTimesHistogram("Search.ContextualSearchDurationSeen",
durationMs, TimeUnit.MILLISECONDS);
} else if (isChained) {
RecordHistogram.recordTimesHistogram("Search.ContextualSearchDurationUnseenChained",
durationMs, TimeUnit.MILLISECONDS);
} else {
RecordHistogram.recordTimesHistogram("Search.ContextualSearchDurationUnseen",
durationMs, TimeUnit.MILLISECONDS);
}
}
/**
* Log the duration of finishing loading the SERP after the panel is opened.
* @param wasPrefetch Whether the request was prefetch-enabled or not.
* @param durationMs The duration of loading the SERP till completely loaded, in milliseconds.
* Note that this value will be 0 when the SERP is prefetched and the user waits a
* while before opening the panel.
*/
public static void logSearchPanelLoadDuration(boolean wasPrefetch, long durationMs) {
if (wasPrefetch) {
RecordHistogram.recordMediumTimesHistogram("Search.ContextualSearchDurationPrefetched",
durationMs, TimeUnit.MILLISECONDS);
} else {
RecordHistogram.recordMediumTimesHistogram(
"Search.ContextualSearchDurationNonPrefetched", durationMs,
TimeUnit.MILLISECONDS);
}
// Also record a summary histogram with counts for each possibility.
int code = !wasPrefetch ? NOT_PREFETCHED
: (durationMs == 0 ? PREFETCHED_FULLY_LOADED : PREFETCHED_PARIALLY_LOADED);
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchPrefetchSummary",
code, PREFETCH_BOUNDARY);
}
/**
* Logs the duration from starting a search until the Search Term is resolved.
* @param durationMs The duration to record.
*/
public static void logSearchTermResolutionDuration(long durationMs) {
RecordHistogram.recordMediumTimesHistogram(
"Search.ContextualSearchResolutionDuration", durationMs, TimeUnit.MILLISECONDS);
}
/**
* Logs the duration from starting a prefetched search until the panel navigates to the results
* and they start becoming viewable. Should be called only for searches that are prefetched.
* @param durationMs The duration to record.
* @param didResolve Whether a Search Term resolution was required as part of the loading.
*/
public static void logPrefetchedSearchNavigatedDuration(long durationMs, boolean didResolve) {
String histogramName = didResolve ? "Search.ContextualSearchResolvedSearchDuration"
: "Search.ContextualSearchLiteralSearchDuration";
RecordHistogram.recordMediumTimesHistogram(
histogramName, durationMs, TimeUnit.MILLISECONDS);
}
/**
* Logs whether the promo was seen.
* Logs multiple histograms, with and without the original triggering gesture.
* @param wasPanelSeen Whether the panel was seen.
* @param wasTap Whether the gesture that originally caused the panel to show was a Tap.
*/
public static void logPromoSeen(boolean wasPanelSeen, boolean wasTap) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchFirstRunPanelSeen",
wasPanelSeen ? RESULTS_SEEN : RESULTS_NOT_SEEN, RESULTS_SEEN_BOUNDARY);
logHistogramByGesture(wasPanelSeen, wasTap, "Search.ContextualSearchPromoSeenByGesture");
}
/**
* Logs whether search results were seen.
* Logs multiple histograms; with and without the original triggering gesture.
* @param wasPanelSeen Whether the panel was seen.
* @param wasTap Whether the gesture that originally caused the panel to show was a Tap.
*/
public static void logResultsSeen(boolean wasPanelSeen, boolean wasTap) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchResultsSeen",
wasPanelSeen ? RESULTS_SEEN : RESULTS_NOT_SEEN, RESULTS_SEEN_BOUNDARY);
logHistogramByGesture(wasPanelSeen, wasTap, "Search.ContextualSearchResultsSeenByGesture");
}
/**
* Logs whether search results were seen, whether the search provider icon sprite was animated
* when the panel first appeared, and the triggering gesture.
* @param wasIconSpriteAnimated Whether the search provider icon sprite was animated when the
* the panel first appeared.
* @param wasPanelSeen Whether the panel was seen.
* @param wasTap Whether the gesture that originally caused the panel to show was a Tap.
*/
public static void logIconSpriteAnimated(boolean wasIconSpriteAnimated, boolean wasPanelSeen,
boolean wasTap) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchIconSpriteAnimated",
ICON_SPRITE_ANIMATION_CODES.get(new IconSpriteAnimationKey(wasIconSpriteAnimated,
wasPanelSeen, wasTap)),
ICON_SPRITE_BOUNDARY);
}
/**
* Logs whether a selection is valid.
* @param isSelectionValid Whether the selection is valid.
*/
public static void logSelectionIsValid(boolean isSelectionValid) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchSelectionValid",
isSelectionValid ? SELECTION_VALID : SELECTION_INVALID, SELECTION_BOUNDARY);
}
/**
* Logs whether a normal priority search request failed.
* @param isFailure Whether the request failed.
*/
public static void logNormalPrioritySearchRequestOutcome(boolean isFailure) {
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchNormalPrioritySearchRequestStatus",
isFailure ? REQUEST_FAILED : REQUEST_NOT_FAILED, REQUEST_BOUNDARY);
}
/**
* Logs whether a low priority search request failed.
* @param isFailure Whether the request failed.
*/
public static void logLowPrioritySearchRequestOutcome(boolean isFailure) {
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchLowPrioritySearchRequestStatus",
isFailure ? REQUEST_FAILED : REQUEST_NOT_FAILED, REQUEST_BOUNDARY);
}
/**
* Logs whether a fallback search request failed.
* @param isFailure Whether the request failed.
*/
public static void logFallbackSearchRequestOutcome(boolean isFailure) {
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchFallbackSearchRequestStatus",
isFailure ? REQUEST_FAILED : REQUEST_NOT_FAILED, REQUEST_BOUNDARY);
}
/**
* Logs whether the SERP was fully loaded when an opened panel was closed.
* @param fullyLoaded Whether the SERP had finished loading before the panel was closed.
*/
public static void logSerpLoadedOnClose(boolean fullyLoaded) {
RecordHistogram.recordEnumeratedHistogram("Search.ContextualSearchSerpLoadedOnClose",
fullyLoaded ? FULLY_LOADED : PARTIALLY_LOADED, LOADED_BOUNDARY);
}
/**
* Logs how a state was entered for the first time within a Contextual Search.
* @param fromState The state to transition from.
* @param toState The state to transition to.
* @param reason The reason for the state transition.
*/
public static void logFirstStateEntry(PanelState fromState, PanelState toState,
StateChangeReason reason) {
int code;
switch (toState) {
case CLOSED:
code = getStateChangeCode(fromState, reason,
ENTER_CLOSED_STATE_CHANGE_CODES, ENTER_CLOSED_FROM_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchEnterClosed",
code, ENTER_CLOSED_FROM_BOUNDARY);
break;
case PEEKED:
code = getStateChangeCode(fromState, reason,
ENTER_PEEKED_STATE_CHANGE_CODES, ENTER_PEEKED_FROM_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchEnterPeeked",
code, ENTER_PEEKED_FROM_BOUNDARY);
break;
case EXPANDED:
code = getStateChangeCode(fromState, reason,
ENTER_EXPANDED_STATE_CHANGE_CODES, ENTER_EXPANDED_FROM_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchEnterExpanded",
code, ENTER_EXPANDED_FROM_BOUNDARY);
break;
case MAXIMIZED:
code = getStateChangeCode(fromState, reason,
ENTER_MAXIMIZED_STATE_CHANGE_CODES, ENTER_MAXIMIZED_FROM_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchEnterMaximized",
code, ENTER_MAXIMIZED_FROM_BOUNDARY);
break;
default:
break;
}
}
/**
* Logs how a state was exited for the first time within a Contextual Search.
* @param fromState The state to transition from.
* @param toState The state to transition to.
* @param reason The reason for the state transition.
*/
public static void logFirstStateExit(PanelState fromState, PanelState toState,
StateChangeReason reason) {
int code;
switch (fromState) {
case UNDEFINED:
case CLOSED:
code = getStateChangeCode(toState, reason,
EXIT_CLOSED_TO_STATE_CHANGE_CODES, EXIT_CLOSED_TO_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchExitClosed", code, EXIT_CLOSED_TO_BOUNDARY);
break;
case PEEKED:
code = getStateChangeCode(toState, reason,
EXIT_PEEKED_TO_STATE_CHANGE_CODES, EXIT_PEEKED_TO_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchExitPeeked", code, EXIT_PEEKED_TO_BOUNDARY);
break;
case EXPANDED:
code = getStateChangeCode(toState, reason,
EXIT_EXPANDED_TO_STATE_CHANGE_CODES, EXIT_EXPANDED_TO_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchExitExpanded", code, EXIT_EXPANDED_TO_BOUNDARY);
break;
case MAXIMIZED:
code = getStateChangeCode(toState, reason,
EXIT_MAXIMIZED_TO_STATE_CHANGE_CODES, EXIT_MAXIMIZED_TO_OTHER);
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchExitMaximized", code, EXIT_MAXIMIZED_TO_BOUNDARY);
break;
default:
break;
}
}
/**
* Logs that the conditions are right to force the translation one-box, and whether it
* was actually forced or not.
* @param didForceTranslate Whether the translation onebox was forced.
*/
public static void logTranslateOnebox(boolean didForceTranslate) {
int code = didForceTranslate ? DID_FORCE_TRANSLATE : WOULD_FORCE_TRANSLATE;
RecordHistogram.recordEnumeratedHistogram(
"Search.ContextualSearchShouldTranslate", code, FORCE_TRANSLATE_BOUNDARY);
}
/**
* Gets the state-change code for the given parameters by doing a lookup in the given map.
* @param state The panel state.
* @param reason The reason the state changed.
* @param stateChangeCodes The map of state and reason to code.
* @param defaultCode The code to return if the given values are not found in the map.
* @return The code to write into an enum histogram, based on the given map.
*/
private static int getStateChangeCode(PanelState state, StateChangeReason reason,
Map<StateChangeKey, Integer> stateChangeCodes, int defaultCode) {
Integer code = stateChangeCodes.get(new StateChangeKey(state, reason));
if (code != null) {
return code;
}
return defaultCode;
}
/**
* Gets the panel-seen code for the given parameters by doing a lookup in the seen-by-gesture
* map.
* @param wasPanelSeen Whether the panel was seen.
* @param wasTap Whether the gesture that originally caused the panel to show was a Tap.
* @return The code to write into a panel-seen histogram.
*/
private static int getPanelSeenByGestureStateCode(boolean wasPanelSeen, boolean wasTap) {
return SEEN_BY_GESTURE_CODES.get(new Pair<Boolean, Boolean>(wasPanelSeen, wasTap));
}
/**
* Gets the promo-outcome code for the given parameter by doing a lookup in the
* promo-by-gesture map.
* @param preferenceValue The code for the current preference value.
* @param wasTap Whether the gesture that originally caused the panel to show was a Tap.
* @return The code to write into a promo-outcome histogram.
*/
private static int getPromoByGestureStateCode(int preferenceValue, boolean wasTap) {
return PROMO_BY_GESTURE_CODES.get(new Pair<Integer, Boolean>(preferenceValue, wasTap));
}
/**
* @return The code for the Contextual Search preference.
*/
private static int getPreferenceValue() {
PrefServiceBridge preferences = PrefServiceBridge.getInstance();
if (preferences.isContextualSearchUninitialized()) {
return PREFERENCE_UNINITIALIZED;
} else if (preferences.isContextualSearchDisabled()) {
return PREFERENCE_DISABLED;
}
return PREFERENCE_ENABLED;
}
/**
* Logs to a seen-by-gesture histogram of the given name.
* @param wasPanelSeen Whether the panel was seen.
* @param wasTap Whether the gesture that originally caused the panel to show was a Tap.
* @param histogramName The full name of the histogram to log to.
*/
private static void logHistogramByGesture(boolean wasPanelSeen, boolean wasTap,
String histogramName) {
RecordHistogram.recordEnumeratedHistogram(histogramName,
getPanelSeenByGestureStateCode(wasPanelSeen, wasTap),
RESULTS_BY_GESTURE_BOUNDARY);
}
}
| |
package hu.rycus.google.parser.csv;
import com.google.api.client.util.ObjectParser;
import hu.rycus.google.parser.csv.reflect.ClassParser;
import hu.rycus.google.parser.csv.reflect.ParseException;
import java.io.*;
import java.lang.reflect.Array;
import java.lang.reflect.Type;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class CsvParser implements ObjectParser {
private static final ParserRegistry registry = new ParserRegistry();
private final char separator;
private final char delimiter;
public CsvParser() {
this(',', '"');
}
public CsvParser(final char separator, final char delimiter) {
this.separator = separator;
this.delimiter = delimiter;
}
@Override
public <T> T parseAndClose(final InputStream inputStream, final Charset charset, final Class<T> aClass) throws IOException {
try {
return parseAndClose(new InputStreamReader(inputStream, charset), aClass);
} finally {
inputStream.close();
}
}
@Override
public Object parseAndClose(final InputStream inputStream, final Charset charset, final Type type) throws IOException {
try {
return parseAndClose(new InputStreamReader(inputStream, charset), type);
} finally {
inputStream.close();
}
}
@Override
public Object parseAndClose(final Reader reader, final Type type) throws IOException {
try {
if (!(type instanceof Class)) {
throw new IOException("Class argument expected for Type");
}
return parseAndClose(reader, (Class) type);
} finally {
reader.close();
}
}
@Override
public <T> T parseAndClose(final Reader source, final Class<T> aClass) throws IOException {
if (!aClass.isArray()) {
throw new IOException(String.format("Array type required for parsing CSV files (%s found)", aClass));
}
try {
final ClassParser parser;
try {
parser = registry.get(aClass.getComponentType());
} catch (ParseException ex) {
throw new IOException(ex);
}
final BufferedReader reader = new BufferedReader(source);
final Session session = new Session();
final String header = trimHeader(reader.readLine());
try {
session.init(header);
} catch (ParseException ex) {
throw new IOException("Failed to parse CSV header: " + header, ex);
}
final List<Object> items = new LinkedList<>();
String line;
while ((line = reader.readLine()) != null) {
try {
final Object parsed = parser.parseObject(session.toValues(line));
items.add(parsed);
} catch (ParseException ex) {
throw new IOException(ex);
}
}
final int length = items.size();
@SuppressWarnings("unchecked")
final T array = (T) Array.newInstance(aClass.getComponentType(), length);
for (int index = 0; index < items.size(); index++) {
Array.set(array, index, items.get(index));
}
return array;
} finally {
source.close();
}
}
private static String trimHeader(final String original) {
if (original != null && original.charAt(0) == 0xFEFF) {
return original.substring(1);
} else {
return original;
}
}
class Session {
private final StringBuilder buffer = new StringBuilder();
private String[] headers;
void init(final String header) throws ParseException {
this.headers = getTokens(trimHeader(header));
}
Map<String, String> toValues(final String line) throws ParseException {
final String[] params = getTokens(line);
final Map<String, String> values = new HashMap<>();
for (int idx = 0; idx < Math.min(headers.length, params.length); idx++) {
values.put(headers[idx], params[idx]);
}
return values;
}
String[] getTokens(final String source) throws ParseException {
final List<String> tokens = new LinkedList<>();
buffer.setLength(0);
final int length = source.length();
boolean inString = false;
boolean wasDelimiter = false;
for (int index = 0; index < length; index++) {
final char ch = source.charAt(index);
if (ch == delimiter) {
if (wasDelimiter && !inString) {
buffer.append(ch);
}
inString = !inString;
wasDelimiter = true;
} else {
if (ch == separator) {
if (inString) {
buffer.append(ch);
} else {
tokens.add(buffer.toString());
buffer.setLength(0);
}
} else {
if (wasDelimiter && !inString) {
buffer.append(delimiter);
}
buffer.append(ch);
}
wasDelimiter = false;
}
}
tokens.add(buffer.toString());
buffer.setLength(0);
return tokens.toArray(new String[tokens.size()]);
}
}
private static class ParserRegistry {
private final ConcurrentHashMap<Class<?>, ClassParser> classParsers = new ConcurrentHashMap<>();
public ClassParser get(final Class<?> aClass) throws ParseException {
final ClassParser parser = classParsers.get(aClass);
if (parser != null) {
return parser;
} else {
final ClassParser newParser = new ClassParser(aClass);
newParser.parseMetadata();
classParsers.putIfAbsent(aClass, newParser);
return newParser;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.hpl.jena.rdf.arp;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import junit.framework.Assert;
import junit.framework.TestSuite;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.apache.jena.iri.IRI;
import com.hp.hpl.jena.rdf.arp.ALiteral;
import com.hp.hpl.jena.rdf.arp.ARPEventHandler;
import com.hp.hpl.jena.rdf.arp.AResource;
import com.hp.hpl.jena.rdf.arp.NTriple;
import com.hp.hpl.jena.rdf.arp.impl.ARPResource;
import com.hp.hpl.jena.rdf.arp.impl.ARPSaxErrorHandler;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.RDFErrorHandler;
import com.hp.hpl.jena.shared.wg.TestInputStreamFactory;
/**
* A version of the test suite which uses the
* ARP internal N-triple writer, and not the
* Jena N-triple writer.
* @author Jeremy Carroll
*
*
*/
class NTripleTestSuite extends WGTestSuite {
NTripleTestSuite(TestInputStreamFactory fact, String name, boolean b) {
super(fact, name, b);
}
static TestSuite suite(IRI testDir, String d, String nm) {
return new NTripleTestSuite(
new TestInputStreamFactory(testDir, d),
nm,
true);
}
static TestSuite suite(IRI testDir, IRI d, String nm) {
return new NTripleTestSuite(
new TestInputStreamFactory(testDir, d),
nm,
true);
}
static class SimulatedException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = -4804213791508445759L;
}
static class TestHandler
extends ARPSaxErrorHandler
implements ARPEventHandler, org.xml.sax.ErrorHandler {
TestHandler(RDFErrorHandler eh) {
this(eh, 0);
}
TestHandler(RDFErrorHandler eh, int cnt) {
super(eh);
countDown = cnt;
xCountDown = cnt;
}
final int xCountDown;
Set<AResource> anon = new HashSet<AResource>();
Set<AResource> oldAnon = new HashSet<AResource>();
int state = 1; // 1 begin, 2 in RDF, 3 after RDF, 4 at end-of-file.
int countDown;
@Override
public void statement(AResource subj, AResource pred, AResource obj) {
Assert.assertEquals(state, 2);
seeing(subj);
seeing(obj);
if (--countDown == 0)
throw new SimulatedException();
}
/**
* @param subj
*/
private void seeing(AResource subj) {
if (subj.isAnonymous())
anon.add(subj);
Assert.assertFalse("bnode reuse?", oldAnon.contains(subj));
}
/**
* @param subj
*/
private void seen(AResource subj) {
if (!anon.contains(subj)) {
if (ARPResource.DEBUG) {
((RuntimeException)subj.getUserData()).printStackTrace();
}
Assert.assertFalse(
"end-scope called twice for a bnode: "
+ subj.getAnonymousID(),
oldAnon.contains(subj));
Assert.assertTrue(
"end-scope for a bnode that had not been used "
+ subj.getAnonymousID(),
anon.contains(subj));
}
anon.remove(subj);
oldAnon.add(subj);
}
@Override
public void statement(AResource subj, AResource pred, ALiteral lit) {
Assert.assertEquals("no start RDF seen", state, 2);
seeing(subj);
if (--countDown == 0)
throw new SimulatedException();
}
@Override
public void endBNodeScope(AResource bnode) {
Assert.assertTrue(bnode.isAnonymous());
switch (state) {
case 1 :
Assert.fail("Missing startRDF"); return ;
case 2 :
Assert.assertFalse(bnode.hasNodeID());
seen(bnode);
break;
case 3 :
case 4 :
Assert.assertTrue(bnode.hasNodeID());
seen(bnode);
state = 4;
break;
default :
Assert.fail("impossible - test logic error");
}
}
@Override
public void startRDF() {
switch (state) {
case 2 :
case 4 :
Assert.fail("Bad state for startRDF " + state);
}
state = 2;
}
@Override
public void endRDF() {
Assert.assertEquals(state, 2);
state = 3;
}
@Override
public void startPrefixMapping(String prefix, String uri) {
}
@Override
public void endPrefixMapping(String prefix) {
}
/**
*
*/
public void atEndOfFile() {
if (!anon.isEmpty()) {
Iterator<AResource> it = anon.iterator();
while (it.hasNext()) {
AResource a =
it.next();
System.err.print(a.getAnonymousID() + ", ");
if (ARPResource.DEBUG) {
RuntimeException rte = (RuntimeException)a.getUserData();
// throw rte;
rte.printStackTrace();
}
}
}
Assert.assertTrue("("+xCountDown+") some bnode still in scope ", //hasErrors||
anon.isEmpty());
switch (state) {
case 1 :
Assert.fail("end-of-file before anything"); return ;
case 2 :
Assert.fail("did not see endRDF"); return ;
case 3 :
case 4 :
break;
default :
Assert.fail("impossible logic error in test");
}
}
boolean hasErrors = false;
/* (non-Javadoc)
* @see org.xml.sax.ErrorHandler#error(org.xml.sax.SAXParseException)
*/
@Override
public void error(SAXParseException exception) throws SAXException {
hasErrors = true;
super.error(exception);
}
/* (non-Javadoc)
* @see org.xml.sax.ErrorHandler#fatalError(org.xml.sax.SAXParseException)
*/
@Override
public void fatalError(SAXParseException exception)
throws SAXException {
hasErrors = true;
super.fatalError(exception);
}
/**
*
*/
public int getCount() {
return -countDown;
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.rdf.arp.ExtendedHandler#discardNodesWithNodeID()
*/
@Override
public boolean discardNodesWithNodeID() {
return false;
}
}
@Override
Model loadRDF(InFactoryX in, RDFErrorHandler eh, String base)
throws IOException {
return loadRDFx(in, eh, base, true, 0);
}
static Model loadRDFx(
InFactoryX in,
RDFErrorHandler eh,
String base,
boolean wantModel,
int cnt)
throws IOException {
InputStream oldIn = System.in;
InputStream ntIn = null;
File ntriples = null;
PrintStream out;
TestHandler th;
if (wantModel) {
ntriples = File.createTempFile("arp", ".nt");
out = new PrintStream(new FileOutputStream(ntriples));
th = new TestHandler(eh);
} else {
out = new PrintStream(new OutputStream() {
@Override
public void write(int b) throws IOException {
}
});
th = new TestHandler(eh, cnt);
}
PrintStream oldOut = System.out;
try {
System.setIn(in.open());
System.setOut(out);
try {
NTriple.mainEh(new String[] { "-b", base, "-s" }, th, th);
} catch (SimulatedException e) {
if (wantModel)
throw e;
}
out.close();
th.atEndOfFile();
if (cnt == 0) {
// retry with sudden death
for (int i = th.getCount(); i >= 1; i--)
loadRDFx(in, TestScope.suppress, base, false, i);
}
if (wantModel) {
ntIn = new FileInputStream(ntriples);
return loadNT(ntIn, base);
}
return null;
} finally {
System.in.close();
System.setIn(oldIn);
System.setOut(oldOut);
if (ntIn != null)
ntIn.close();
if (ntriples != null)
ntriples.delete();
}
}
}
| |
/*
* Copyright (C) 2010-2014 The Project Lombok Authors.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package lombok.eclipse.handlers;
import static lombok.core.handlers.HandlerUtil.*;
import static lombok.eclipse.Eclipse.fromQualifiedName;
import static lombok.eclipse.handlers.EclipseHandlerUtil.*;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import lombok.ConfigurationKeys;
import lombok.core.AnnotationValues;
import lombok.eclipse.EclipseAnnotationHandler;
import lombok.eclipse.EclipseNode;
import lombok.eclipse.handlers.EclipseHandlerUtil.MemberExistsResult;
import org.eclipse.jdt.internal.compiler.ast.Annotation;
import org.eclipse.jdt.internal.compiler.ast.ClassLiteralAccess;
import org.eclipse.jdt.internal.compiler.ast.Expression;
import org.eclipse.jdt.internal.compiler.ast.FieldDeclaration;
import org.eclipse.jdt.internal.compiler.ast.MessageSend;
import org.eclipse.jdt.internal.compiler.ast.QualifiedTypeReference;
import org.eclipse.jdt.internal.compiler.ast.SingleTypeReference;
import org.eclipse.jdt.internal.compiler.ast.StringLiteral;
import org.eclipse.jdt.internal.compiler.ast.TypeDeclaration;
import org.eclipse.jdt.internal.compiler.ast.TypeReference;
import org.eclipse.jdt.internal.compiler.classfmt.ClassFileConstants;
import org.mangosdk.spi.ProviderFor;
public class HandleLog {
private HandleLog() {
throw new UnsupportedOperationException();
}
public static void processAnnotation(LoggingFramework framework, AnnotationValues<? extends java.lang.annotation.Annotation> annotation, Annotation source, EclipseNode annotationNode, String loggerTopic) {
EclipseNode owner = annotationNode.up();
switch (owner.getKind()) {
case TYPE:
String logFieldName = annotationNode.getAst().readConfiguration(ConfigurationKeys.LOG_ANY_FIELD_NAME);
if (logFieldName == null) logFieldName = "log";
boolean useStatic = !Boolean.FALSE.equals(annotationNode.getAst().readConfiguration(ConfigurationKeys.LOG_ANY_FIELD_IS_STATIC));
TypeDeclaration typeDecl = null;
if (owner.get() instanceof TypeDeclaration) typeDecl = (TypeDeclaration) owner.get();
int modifiers = typeDecl == null ? 0 : typeDecl.modifiers;
boolean notAClass = (modifiers &
(ClassFileConstants.AccInterface | ClassFileConstants.AccAnnotation)) != 0;
if (typeDecl == null || notAClass) {
annotationNode.addError(framework.getAnnotationAsString() + " is legal only on classes and enums.");
return;
}
if (fieldExists(logFieldName, owner) != MemberExistsResult.NOT_EXISTS) {
annotationNode.addWarning("Field '" + logFieldName + "' already exists.");
return;
}
ClassLiteralAccess loggingType = selfType(owner, source);
FieldDeclaration fieldDeclaration = createField(framework, source, loggingType, logFieldName, useStatic, loggerTopic);
fieldDeclaration.traverse(new SetGeneratedByVisitor(source), typeDecl.staticInitializerScope);
// TODO temporary workaround for issue 217. http://code.google.com/p/projectlombok/issues/detail?id=217
// injectFieldSuppressWarnings(owner, fieldDeclaration);
injectField(owner, fieldDeclaration);
owner.rebuild();
break;
default:
break;
}
}
public static ClassLiteralAccess selfType(EclipseNode type, Annotation source) {
int pS = source.sourceStart, pE = source.sourceEnd;
long p = (long)pS << 32 | pE;
TypeDeclaration typeDeclaration = (TypeDeclaration)type.get();
TypeReference typeReference = new SingleTypeReference(typeDeclaration.name, p);
setGeneratedBy(typeReference, source);
ClassLiteralAccess result = new ClassLiteralAccess(source.sourceEnd, typeReference);
setGeneratedBy(result, source);
return result;
}
private static FieldDeclaration createField(LoggingFramework framework, Annotation source, ClassLiteralAccess loggingType, String logFieldName, boolean useStatic, String loggerTopic) {
int pS = source.sourceStart, pE = source.sourceEnd;
long p = (long) pS << 32 | pE;
// private static final <loggerType> log = <factoryMethod>(<parameter>);
FieldDeclaration fieldDecl = new FieldDeclaration(logFieldName.toCharArray(), 0, -1);
setGeneratedBy(fieldDecl, source);
fieldDecl.declarationSourceEnd = -1;
fieldDecl.modifiers = Modifier.PRIVATE | (useStatic ? Modifier.STATIC : 0) | Modifier.FINAL;
fieldDecl.type = createTypeReference(framework.getLoggerTypeName(), source);
MessageSend factoryMethodCall = new MessageSend();
setGeneratedBy(factoryMethodCall, source);
factoryMethodCall.receiver = createNameReference(framework.getLoggerFactoryTypeName(), source);
factoryMethodCall.selector = framework.getLoggerFactoryMethodName().toCharArray();
Expression parameter;
if (!framework.passTypeName) {
parameter = null;
} else if (loggerTopic == null || loggerTopic.trim().length() == 0) {
parameter = framework.createFactoryParameter(loggingType, source);
} else {
parameter = new StringLiteral(loggerTopic.toCharArray(), pS, pE, 0);
}
factoryMethodCall.arguments = parameter != null ? new Expression[] { parameter } : null;
factoryMethodCall.nameSourcePosition = p;
factoryMethodCall.sourceStart = pS;
factoryMethodCall.sourceEnd = factoryMethodCall.statementEnd = pE;
fieldDecl.initialization = factoryMethodCall;
return fieldDecl;
}
public static TypeReference createTypeReference(String typeName, Annotation source) {
int pS = source.sourceStart, pE = source.sourceEnd;
long p = (long)pS << 32 | pE;
TypeReference typeReference;
if (typeName.contains(".")) {
char[][] typeNameTokens = fromQualifiedName(typeName);
long[] pos = new long[typeNameTokens.length];
Arrays.fill(pos, p);
typeReference = new QualifiedTypeReference(typeNameTokens, pos);
}
else {
typeReference = null;
}
setGeneratedBy(typeReference, source);
return typeReference;
}
/**
* Handles the {@link lombok.extern.apachecommons.CommonsLog} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleCommonsLog extends EclipseAnnotationHandler<lombok.extern.apachecommons.CommonsLog> {
@Override public void handle(AnnotationValues<lombok.extern.apachecommons.CommonsLog> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_COMMONS_FLAG_USAGE, "@apachecommons.CommonsLog", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.COMMONS, annotation, source, annotationNode, annotation.getInstance().topic());
}
}
/**
* Handles the {@link lombok.extern.java.Log} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleJulLog extends EclipseAnnotationHandler<lombok.extern.java.Log> {
@Override public void handle(AnnotationValues<lombok.extern.java.Log> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_JUL_FLAG_USAGE, "@java.Log", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.JUL, annotation, source, annotationNode, annotation.getInstance().topic());
}
}
/**
* Handles the {@link lombok.extern.log4j.Log4j} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleLog4jLog extends EclipseAnnotationHandler<lombok.extern.log4j.Log4j> {
@Override public void handle(AnnotationValues<lombok.extern.log4j.Log4j> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_LOG4J_FLAG_USAGE, "@Log4j", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.LOG4J, annotation, source, annotationNode, annotation.getInstance().topic());
}
}
/**
* Handles the {@link lombok.extern.log4j.Log4j2} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleLog4j2Log extends EclipseAnnotationHandler<lombok.extern.log4j.Log4j2> {
@Override public void handle(AnnotationValues<lombok.extern.log4j.Log4j2> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_LOG4J2_FLAG_USAGE, "@Log4j2", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.LOG4J2, annotation, source, annotationNode, annotation.getInstance().topic());
}
}
/**
* Handles the {@link lombok.extern.slf4j.Slf4j} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleSlf4jLog extends EclipseAnnotationHandler<lombok.extern.slf4j.Slf4j> {
@Override public void handle(AnnotationValues<lombok.extern.slf4j.Slf4j> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_SLF4J_FLAG_USAGE, "@Slf4j", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.SLF4J, annotation, source, annotationNode, annotation.getInstance().topic());
}
}
/**
* Handles the {@link lombok.extern.slf4j.XSlf4j} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleXSlf4jLog extends EclipseAnnotationHandler<lombok.extern.slf4j.XSlf4j> {
@Override public void handle(AnnotationValues<lombok.extern.slf4j.XSlf4j> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_XSLF4J_FLAG_USAGE, "@XSlf4j", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.XSLF4J, annotation, source, annotationNode, annotation.getInstance().topic());
}
}
/**
* Handles the {@link lombok.extern.jbosslog.JBossLog} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleJBossLog extends EclipseAnnotationHandler<lombok.extern.jbosslog.JBossLog> {
@Override public void handle(AnnotationValues<lombok.extern.jbosslog.JBossLog> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_JBOSSLOG_FLAG_USAGE, "@JBossLog", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.JBOSSLOG, annotation, source, annotationNode, annotation.getInstance().topic());
}
}
/**
* Handles the {@link lombok.extern.flogger.Flogger} annotation for Eclipse.
*/
@ProviderFor(EclipseAnnotationHandler.class)
public static class HandleFloggerLog extends EclipseAnnotationHandler<lombok.extern.flogger.Flogger> {
@Override public void handle(AnnotationValues<lombok.extern.flogger.Flogger> annotation, Annotation source, EclipseNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.LOG_FLOGGER_FLAG_USAGE, "@Flogger", ConfigurationKeys.LOG_ANY_FLAG_USAGE, "any @Log");
processAnnotation(LoggingFramework.FLOGGER, annotation, source, annotationNode, "");
}
}
enum LoggingFramework {
// private static final org.apache.commons.logging.Log log = org.apache.commons.logging.LogFactory.getLog(TargetType.class);
COMMONS("org.apache.commons.logging.Log", "org.apache.commons.logging.LogFactory", "getLog", "@CommonsLog"),
// private static final java.util.logging.Logger log = java.util.logging.Logger.getLogger(TargetType.class.getName());
JUL("java.util.logging.Logger", "java.util.logging.Logger", "getLogger", "@Log") {
@Override public Expression createFactoryParameter(ClassLiteralAccess type, Annotation source) {
int pS = source.sourceStart, pE = source.sourceEnd;
long p = (long)pS << 32 | pE;
MessageSend factoryParameterCall = new MessageSend();
setGeneratedBy(factoryParameterCall, source);
factoryParameterCall.receiver = super.createFactoryParameter(type, source);
factoryParameterCall.selector = "getName".toCharArray();
factoryParameterCall.nameSourcePosition = p;
factoryParameterCall.sourceStart = pS;
factoryParameterCall.sourceEnd = factoryParameterCall.statementEnd = pE;
return factoryParameterCall;
}
},
// private static final org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger(TargetType.class);
LOG4J("org.apache.log4j.Logger", "org.apache.log4j.Logger", "getLogger", "@Log4j"),
// private static final org.apache.logging.log4j.Logger log = org.apache.logging.log4j.LogManager.getLogger(TargetType.class);
LOG4J2("org.apache.logging.log4j.Logger", "org.apache.logging.log4j.LogManager", "getLogger", "@Log4j2"),
// private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(TargetType.class);
SLF4J("org.slf4j.Logger", "org.slf4j.LoggerFactory", "getLogger", "@Slf4j"),
// private static final org.slf4j.ext.XLogger log = org.slf4j.ext.XLoggerFactory.getXLogger(TargetType.class);
XSLF4J("org.slf4j.ext.XLogger", "org.slf4j.ext.XLoggerFactory", "getXLogger", "@XSlf4j"),
// private static final org.jboss.logging.Logger log = org.jboss.logging.Logger.getLogger(TargetType.class);
JBOSSLOG("org.jboss.logging.Logger", "org.jboss.logging.Logger", "getLogger", "@JBossLog"),
// private static final com.google.common.flogger.FluentLogger log = com.google.common.flogger.FluentLogger.forEnclosingClass();
FLOGGER("com.google.common.flogger.FluentLogger", "com.google.common.flogger.FluentLogger", "forEnclosingClass", "@Flogger", false),
;
private final String loggerTypeName;
private final String loggerFactoryTypeName;
private final String loggerFactoryMethodName;
private final String annotationAsString;
private final boolean passTypeName;
LoggingFramework(String loggerTypeName, String loggerFactoryTypeName, String loggerFactoryMethodName, String annotationAsString, boolean passTypeName) {
this.loggerTypeName = loggerTypeName;
this.loggerFactoryTypeName = loggerFactoryTypeName;
this.loggerFactoryMethodName = loggerFactoryMethodName;
this.annotationAsString = annotationAsString;
this.passTypeName = passTypeName;
}
LoggingFramework(String loggerTypeName, String loggerFactoryTypeName, String loggerFactoryMethodName, String annotationAsString) {
this.loggerTypeName = loggerTypeName;
this.loggerFactoryTypeName = loggerFactoryTypeName;
this.loggerFactoryMethodName = loggerFactoryMethodName;
this.annotationAsString = annotationAsString;
this.passTypeName = true;
}
final String getAnnotationAsString() {
return annotationAsString;
}
final String getLoggerTypeName() {
return loggerTypeName;
}
final String getLoggerFactoryTypeName() {
return loggerFactoryTypeName;
}
final String getLoggerFactoryMethodName() {
return loggerFactoryMethodName;
}
Expression createFactoryParameter(ClassLiteralAccess loggingType, Annotation source) {
TypeReference copy = copyType(loggingType.type, source);
ClassLiteralAccess result = new ClassLiteralAccess(source.sourceEnd, copy);
setGeneratedBy(result, source);
return result;
};
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.lookup.impl;
import com.intellij.codeInsight.completion.CodeCompletionFeatures;
import com.intellij.codeInsight.completion.CompletionProgressIndicator;
import com.intellij.codeInsight.completion.impl.CompletionServiceImpl;
import com.intellij.codeInsight.lookup.CharFilter;
import com.intellij.codeInsight.lookup.Lookup;
import com.intellij.codeInsight.lookup.LookupManager;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.ui.ListScrollingUtil;
import org.jetbrains.annotations.Nullable;
/**
* @author yole
*/
public abstract class LookupActionHandler extends EditorActionHandler {
protected final EditorActionHandler myOriginalHandler;
private final boolean myRequireFocusedLookup;
public LookupActionHandler(EditorActionHandler originalHandler, boolean requireFocusedLookup) {
myOriginalHandler = originalHandler;
myRequireFocusedLookup = requireFocusedLookup;
}
@Override
public boolean executeInCommand(Editor editor, DataContext dataContext) {
return LookupManager.getActiveLookup(editor) == null;
}
@Override
public void doExecute(Editor editor, Caret caret, DataContext dataContext){
LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(editor);
if (lookup == null || !lookup.isAvailableToUser() || myRequireFocusedLookup && !lookup.isFocused()) {
Project project = editor.getProject();
if (project != null) {
LookupManager.getInstance(project).hideActiveLookup();
}
myOriginalHandler.execute(editor, caret, dataContext);
return;
}
lookup.markSelectionTouched();
executeInLookup(lookup, dataContext, caret);
}
protected abstract void executeInLookup(LookupImpl lookup, DataContext context, @Nullable Caret caret);
@Override
public boolean isEnabled(Editor editor, DataContext dataContext) {
LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(editor);
return lookup != null || myOriginalHandler.isEnabled(editor, dataContext);
}
private static void executeUpOrDown(LookupImpl lookup, boolean up) {
if (!lookup.isFocused()) {
boolean semiFocused = lookup.getFocusDegree() == LookupImpl.FocusDegree.SEMI_FOCUSED;
lookup.setFocusDegree(LookupImpl.FocusDegree.FOCUSED);
if (!up && !semiFocused) {
return;
}
}
if (up) {
ListScrollingUtil.moveUp(lookup.getList(), 0);
} else {
ListScrollingUtil.moveDown(lookup.getList(), 0);
}
lookup.markSelectionTouched();
lookup.refreshUi(false, true);
}
public static class DownHandler extends LookupActionHandler {
public DownHandler(EditorActionHandler originalHandler){
super(originalHandler, false);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
executeUpOrDown(lookup, false);
}
}
public static class UpAction extends DumbAwareAction {
@Override
public void actionPerformed(AnActionEvent e) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_CONTROL_ARROWS);
LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(CommonDataKeys.EDITOR.getData(e.getDataContext()));
assert lookup != null;
lookup.hide();
ActionManager.getInstance().getAction(IdeActions.ACTION_EDITOR_MOVE_CARET_UP).actionPerformed(e);
}
@Override
public void update(AnActionEvent e) {
Lookup lookup = LookupManager.getActiveLookup(CommonDataKeys.EDITOR.getData(e.getDataContext()));
e.getPresentation().setEnabled(lookup != null);
}
}
public static class DownAction extends DumbAwareAction {
@Override
public void actionPerformed(AnActionEvent e) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_CONTROL_ARROWS);
LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(CommonDataKeys.EDITOR.getData(e.getDataContext()));
assert lookup != null;
lookup.hide();
ActionManager.getInstance().getAction(IdeActions.ACTION_EDITOR_MOVE_CARET_DOWN).actionPerformed(e);
}
@Override
public void update(AnActionEvent e) {
Lookup lookup = LookupManager.getActiveLookup(CommonDataKeys.EDITOR.getData(e.getDataContext()));
e.getPresentation().setEnabled(lookup != null);
}
}
public static class UpHandler extends LookupActionHandler {
public UpHandler(EditorActionHandler originalHandler){
super(originalHandler, false);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
if (!UISettings.getInstance().CYCLE_SCROLLING && !lookup.isFocused() && lookup.getList().getSelectedIndex() == 0) {
myOriginalHandler.execute(lookup.getEditor(), caret, context);
return;
}
executeUpOrDown(lookup, true);
}
}
public static class PageDownHandler extends LookupActionHandler {
public PageDownHandler(final EditorActionHandler originalHandler) {
super(originalHandler, false);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
lookup.setFocusDegree(LookupImpl.FocusDegree.FOCUSED);
ListScrollingUtil.movePageDown(lookup.getList());
}
}
public static class PageUpHandler extends LookupActionHandler {
public PageUpHandler(EditorActionHandler originalHandler){
super(originalHandler, false);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
lookup.setFocusDegree(LookupImpl.FocusDegree.FOCUSED);
ListScrollingUtil.movePageUp(lookup.getList());
}
}
public static class LeftHandler extends LookupActionHandler {
public LeftHandler(EditorActionHandler originalHandler) {
super(originalHandler, false);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
if (!lookup.isCompletion()) {
myOriginalHandler.execute(lookup.getEditor(), caret, context);
return;
}
if (!lookup.performGuardedChange(new Runnable() {
@Override
public void run() {
lookup.getEditor().getSelectionModel().removeSelection();
}
})) {
return;
}
BackspaceHandler.truncatePrefix(context, lookup, myOriginalHandler, lookup.getLookupStart() - 1, caret);
}
}
public static class RightHandler extends LookupActionHandler {
public RightHandler(EditorActionHandler originalHandler) {
super(originalHandler, false);
}
@Override
protected void executeInLookup(LookupImpl lookup, DataContext context, Caret caret) {
final Editor editor = lookup.getEditor();
final int offset = editor.getCaretModel().getOffset();
CharSequence seq = editor.getDocument().getCharsSequence();
if (seq.length() <= offset || !lookup.isCompletion()) {
myOriginalHandler.execute(editor, caret, context);
return;
}
char c = seq.charAt(offset);
CharFilter.Result lookupAction = LookupTypedHandler.getLookupAction(c, lookup);
if (lookupAction != CharFilter.Result.ADD_TO_PREFIX || Character.isWhitespace(c)) {
myOriginalHandler.execute(editor, caret, context);
return;
}
if (!lookup.performGuardedChange(new Runnable() {
@Override
public void run() {
editor.getSelectionModel().removeSelection();
editor.getCaretModel().moveToOffset(offset + 1);
}
})) {
return;
}
lookup.appendPrefix(c);
final CompletionProgressIndicator completion = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
if (completion != null) {
completion.prefixUpdated();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
/*
* Created on Jul 27, 2004
*/
package org.apache.jmeter.save.converters;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
import org.apache.jmeter.save.SaveService;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.util.NameUpdater;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
import com.thoughtworks.xstream.converters.UnmarshallingContext;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
/**
* Utility conversion routines for use with XStream
*
*/
public class ConversionHelp {
private static final Logger log = LoggingManager.getLoggerForClass();
private static final String CHAR_SET = "UTF-8"; //$NON-NLS-1$
// Attributes for TestElement and TestElementProperty
// Must all be unique
public static final String ATT_CLASS = "class"; //$NON-NLS-1$
// Also used by PropertyConverter classes
public static final String ATT_NAME = "name"; // $NON-NLS-1$
public static final String ATT_ELEMENT_TYPE = "elementType"; // $NON-NLS-1$
private static final String ATT_TE_ENABLED = "enabled"; //$NON-NLS-1$
private static final String ATT_TE_TESTCLASS = "testclass"; //$NON-NLS-1$
static final String ATT_TE_GUICLASS = "guiclass"; //$NON-NLS-1$
private static final String ATT_TE_NAME = "testname"; //$NON-NLS-1$
/*
* These must be set before reading/writing the XML. Rather a hack, but
* saves changing all the method calls to include an extra variable.
*
* AFAIK the variables should only be accessed from one thread, so no need to synchronize.
*/
private static String inVersion;
private static String outVersion = "1.1"; // Default for writing//$NON-NLS-1$
public static void setInVersion(String v) {
inVersion = v;
}
public static void setOutVersion(String v) {
outVersion = v;
}
/**
* Encode a string (if necessary) for output to a JTL file.
* Strings are only encoded if the output version is 1.0,
* but nulls are always converted to the empty string.
*
* @param p string to encode
* @return encoded string (will never be null)
*/
public static String encode(String p) {
if (p == null) {// Nulls cannot be written using PrettyPrintWriter - they cause an NPE
return ""; // $NON-NLS-1$
}
// Only encode strings if outVersion = 1.0
if (!"1.0".equals(outVersion)) {//$NON-NLS-1$
return p;
}
try {
String p1 = URLEncoder.encode(p, CHAR_SET);
return p1;
} catch (UnsupportedEncodingException e) {
log.warn("System doesn't support " + CHAR_SET, e);
return p;
}
}
/**
* Decode a string if {@link #inVersion} equals <code>1.0</code>
*
* @param p
* the string to be decoded
* @return the newly decoded string
*/
public static String decode(String p) {
if (!"1.0".equals(inVersion)) {//$NON-NLS-1$
return p;
}
// Only decode strings if inVersion = 1.0
if (p == null) {
return null;
}
try {
return URLDecoder.decode(p, CHAR_SET);
} catch (UnsupportedEncodingException e) {
log.warn("System doesn't support " + CHAR_SET, e);
return p;
}
}
/**
* Embed an array of bytes as a string with <code>encoding</code> in a
* xml-cdata section
*
* @param chars
* bytes to be encoded and embedded
* @param encoding
* the encoding to be used
* @return the encoded string embedded in a xml-cdata section
* @throws UnsupportedEncodingException
* when the bytes can not be encoded using <code>encoding</code>
*/
public static String cdata(byte[] chars, String encoding) throws UnsupportedEncodingException {
StringBuilder buf = new StringBuilder("<![CDATA[");
buf.append(new String(chars, encoding));
buf.append("]]>");
return buf.toString();
}
/**
* Names of properties that are handled specially
*/
private static final Map<String, String> propertyToAttribute=new HashMap<String, String>();
private static void mapentry(String prop, String att){
propertyToAttribute.put(prop,att);
}
static{
mapentry(TestElement.NAME,ATT_TE_NAME);
mapentry(TestElement.GUI_CLASS,ATT_TE_GUICLASS);//$NON-NLS-1$
mapentry(TestElement.TEST_CLASS,ATT_TE_TESTCLASS);//$NON-NLS-1$
mapentry(TestElement.ENABLED,ATT_TE_ENABLED);
}
private static void saveClass(TestElement el, HierarchicalStreamWriter writer, String prop){
String clazz=el.getPropertyAsString(prop);
if (clazz.length()>0) {
writer.addAttribute(propertyToAttribute.get(prop),SaveService.classToAlias(clazz));
}
}
private static void restoreClass(TestElement el, HierarchicalStreamReader reader, String prop) {
String att=propertyToAttribute.get(prop);
String alias=reader.getAttribute(att);
if (alias!=null){
alias=SaveService.aliasToClass(alias);
if (TestElement.GUI_CLASS.equals(prop)) { // mainly for TestElementConverter
alias = NameUpdater.getCurrentName(alias);
}
el.setProperty(prop,alias);
}
}
private static void saveItem(TestElement el, HierarchicalStreamWriter writer, String prop,
boolean encode){
String item=el.getPropertyAsString(prop);
if (item.length() > 0) {
if (encode) {
item=ConversionHelp.encode(item);
}
writer.addAttribute(propertyToAttribute.get(prop),item);
}
}
private static void restoreItem(TestElement el, HierarchicalStreamReader reader, String prop,
boolean decode) {
String att=propertyToAttribute.get(prop);
String value=reader.getAttribute(att);
if (value!=null){
if (decode) {
value=ConversionHelp.decode(value);
}
el.setProperty(prop,value);
}
}
/**
* Check whether <code>name</code> specifies a <em>special</em> property
*
* @param name
* the name of the property to be checked
* @return <code>true</code> if <code>name</code> is the name of a special
* property
*/
public static boolean isSpecialProperty(String name) {
return propertyToAttribute.containsKey(name);
}
/**
* Get the property name, updating it if necessary using {@link NameUpdater}.
* @param reader where to read the name attribute
* @param context the unmarshalling context
*
* @return the property name, may be null if the property has been deleted.
* @see #getUpgradePropertyName(String, UnmarshallingContext)
*/
public static String getPropertyName(HierarchicalStreamReader reader, UnmarshallingContext context) {
String name = ConversionHelp.decode(reader.getAttribute(ATT_NAME));
return getUpgradePropertyName(name, context);
}
/**
* Get the property value, updating it if necessary using {@link NameUpdater}.
*
* Do not use for GUI_CLASS or TEST_CLASS.
*
* @param reader where to read the value
* @param context the unmarshalling context
* @param name the name of the property
*
* @return the property value, updated if necessary.
* @see #getUpgradePropertyValue(String, String, UnmarshallingContext)
*/
public static String getPropertyValue(HierarchicalStreamReader reader, UnmarshallingContext context, String name) {
String value = ConversionHelp.decode(reader.getValue());
return getUpgradePropertyValue(name, value, context);
}
/**
* Update a property name using {@link NameUpdater}.
* @param name the original property name
* @param context the unmarshalling context
*
* @return the property name, may be null if the property has been deleted.
*/
public static String getUpgradePropertyName(String name, UnmarshallingContext context) {
String testClass = (String) context.get(SaveService.TEST_CLASS_NAME);
final String newName = NameUpdater.getCurrentName(name, testClass);
// Delete any properties whose name converts to the empty string
if (name.length() != 0 && newName.length()==0) {
return null;
}
return newName;
}
/**
* Update a property value using {@link NameUpdater#getCurrentName(String, String, String)}.
*
* Do not use for GUI_CLASS or TEST_CLASS.
*
* @param name the original property name
* @param value the original property value
* @param context the unmarshalling context
*
* @return the property value, updated if necessary
*/
public static String getUpgradePropertyValue(String name, String value, UnmarshallingContext context) {
String testClass = (String) context.get(SaveService.TEST_CLASS_NAME);
return NameUpdater.getCurrentName(value, name, testClass);
}
/**
* Save the special properties:
* <ul>
* <li>TestElement.GUI_CLASS</li>
* <li>TestElement.TEST_CLASS</li>
* <li>TestElement.NAME</li>
* <li>TestElement.ENABLED</li>
* </ul>
*
* @param testElement
* element for which the special properties should be saved
* @param writer
* {@link HierarchicalStreamWriter} in which the special
* properties should be saved
*/
public static void saveSpecialProperties(TestElement testElement, HierarchicalStreamWriter writer) {
saveClass(testElement,writer,TestElement.GUI_CLASS);
saveClass(testElement,writer,TestElement.TEST_CLASS);
saveItem(testElement,writer,TestElement.NAME,true);
saveItem(testElement,writer,TestElement.ENABLED,false);
}
/**
* Restore the special properties:
* <ul>
* <li>TestElement.GUI_CLASS</li>
* <li>TestElement.TEST_CLASS</li>
* <li>TestElement.NAME</li>
* <li>TestElement.ENABLED</li>
* </ul>
*
* @param testElement
* in which the special properties should be restored
* @param reader
* {@link HierarchicalStreamReader} from which the special
* properties should be restored
*/
public static void restoreSpecialProperties(TestElement testElement, HierarchicalStreamReader reader) {
restoreClass(testElement,reader,TestElement.GUI_CLASS);
restoreClass(testElement,reader,TestElement.TEST_CLASS);
restoreItem(testElement,reader,TestElement.NAME,true);
restoreItem(testElement,reader,TestElement.ENABLED,false);
}
}
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.index;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Change.Status;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.query.AndPredicate;
import com.google.gerrit.server.query.NotPredicate;
import com.google.gerrit.server.query.OrPredicate;
import com.google.gerrit.server.query.Predicate;
import com.google.gerrit.server.query.QueryParseException;
import com.google.gerrit.server.query.QueryRewriter;
import com.google.gerrit.server.query.change.AndSource;
import com.google.gerrit.server.query.change.BasicChangeRewrites;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.server.query.change.ChangeQueryBuilder;
import com.google.gerrit.server.query.change.ChangeQueryRewriter;
import com.google.gerrit.server.query.change.ChangeStatusPredicate;
import com.google.gerrit.server.query.change.OrSource;
import com.google.gerrit.server.query.change.SqlRewriterImpl;
import com.google.inject.Inject;
import com.google.inject.Provider;
import java.util.BitSet;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
/** Rewriter that pushes boolean logic into the secondary index. */
public class IndexRewriteImpl implements ChangeQueryRewriter {
/** Set of all open change statuses. */
public static final Set<Change.Status> OPEN_STATUSES;
/** Set of all closed change statuses. */
public static final Set<Change.Status> CLOSED_STATUSES;
static {
EnumSet<Change.Status> open = EnumSet.noneOf(Change.Status.class);
EnumSet<Change.Status> closed = EnumSet.noneOf(Change.Status.class);
for (Change.Status s : Change.Status.values()) {
if (s.isOpen()) {
open.add(s);
} else {
closed.add(s);
}
}
OPEN_STATUSES = Sets.immutableEnumSet(open);
CLOSED_STATUSES = Sets.immutableEnumSet(closed);
}
@VisibleForTesting
static final int MAX_LIMIT = 1000;
/**
* Get the set of statuses that changes matching the given predicate may have.
*
* @param in predicate
* @return the maximal set of statuses that any changes matching the input
* predicates may have, based on examining boolean and
* {@link ChangeStatusPredicate}s.
*/
public static EnumSet<Change.Status> getPossibleStatus(Predicate<ChangeData> in) {
EnumSet<Change.Status> s = extractStatus(in);
return s != null ? s : EnumSet.allOf(Change.Status.class);
}
private static EnumSet<Change.Status> extractStatus(Predicate<ChangeData> in) {
if (in instanceof ChangeStatusPredicate) {
return EnumSet.of(((ChangeStatusPredicate) in).getStatus());
} else if (in instanceof NotPredicate) {
EnumSet<Status> s = extractStatus(in.getChild(0));
return s != null ? EnumSet.complementOf(s) : null;
} else if (in instanceof OrPredicate) {
EnumSet<Change.Status> r = null;
int childrenWithStatus = 0;
for (int i = 0; i < in.getChildCount(); i++) {
EnumSet<Status> c = extractStatus(in.getChild(i));
if (c != null) {
if (r == null) {
r = EnumSet.noneOf(Change.Status.class);
}
r.addAll(c);
childrenWithStatus++;
}
}
if (r != null && childrenWithStatus < in.getChildCount()) {
// At least one child supplied a status but another did not.
// Assume all statuses for the children that did not feed a
// status at this part of the tree. This matches behavior if
// the child was used at the root of a query.
return EnumSet.allOf(Change.Status.class);
}
return r;
} else if (in instanceof AndPredicate) {
EnumSet<Change.Status> r = null;
for (int i = 0; i < in.getChildCount(); i++) {
EnumSet<Change.Status> c = extractStatus(in.getChild(i));
if (c != null) {
if (r == null) {
r = EnumSet.allOf(Change.Status.class);
}
r.retainAll(c);
}
}
return r;
}
return null;
}
private final IndexCollection indexes;
private final Provider<ReviewDb> db;
private final BasicRewritesImpl basicRewrites;
private final SqlRewriterImpl sqlRewriter;
@Inject
IndexRewriteImpl(IndexCollection indexes,
Provider<ReviewDb> db,
BasicRewritesImpl basicRewrites,
SqlRewriterImpl sqlRewriter) {
this.indexes = indexes;
this.db = db;
this.basicRewrites = basicRewrites;
this.sqlRewriter = sqlRewriter;
}
@Override
public Predicate<ChangeData> rewrite(Predicate<ChangeData> in)
throws QueryParseException {
ChangeIndex index = indexes.getSearchIndex();
if (index == null) {
return sqlRewriter.rewrite(in);
}
in = basicRewrites.rewrite(in);
// Add 1 to specified limit to match behavior of QueryProcessor.
int limit = ChangeQueryBuilder.hasLimit(in)
? ChangeQueryBuilder.getLimit(in) + 1
: MAX_LIMIT;
Predicate<ChangeData> out = rewriteImpl(in, index, limit);
if (in == out || out instanceof IndexPredicate) {
return new IndexedChangeQuery(db, index, out, limit);
} else if (out == null /* cannot rewrite */) {
return in;
} else {
return out;
}
}
/**
* Rewrite a single predicate subtree.
*
* @param in predicate to rewrite.
* @param index index whose schema determines which fields are indexed.
* @param limit maximum number of results to return.
* @return {@code null} if no part of this subtree can be queried in the
* index directly. {@code in} if this subtree and all its children can be
* queried directly in the index. Otherwise, a predicate that is
* semantically equivalent, with some of its subtrees wrapped to query the
* index directly.
* @throws QueryParseException if the underlying index implementation does not
* support this predicate.
*/
private Predicate<ChangeData> rewriteImpl(Predicate<ChangeData> in,
ChangeIndex index, int limit) throws QueryParseException {
if (isIndexPredicate(in, index)) {
return in;
} else if (!isRewritePossible(in)) {
return null; // magic to indicate "in" cannot be rewritten
}
int n = in.getChildCount();
BitSet isIndexed = new BitSet(n);
BitSet notIndexed = new BitSet(n);
BitSet rewritten = new BitSet(n);
List<Predicate<ChangeData>> newChildren = Lists.newArrayListWithCapacity(n);
for (int i = 0; i < n; i++) {
Predicate<ChangeData> c = in.getChild(i);
Predicate<ChangeData> nc = rewriteImpl(c, index, limit);
if (nc == c) {
isIndexed.set(i);
newChildren.add(c);
} else if (nc == null /* cannot rewrite c */) {
notIndexed.set(i);
newChildren.add(c);
} else {
rewritten.set(i);
newChildren.add(nc);
}
}
if (isIndexed.cardinality() == n) {
return in; // All children are indexed, leave as-is for parent.
} else if (notIndexed.cardinality() == n) {
return null; // Can't rewrite any children, so cannot rewrite in.
} else if (rewritten.cardinality() == n) {
return in.copy(newChildren); // All children were rewritten.
}
return partitionChildren(in, newChildren, isIndexed, index, limit);
}
private boolean isIndexPredicate(Predicate<ChangeData> in, ChangeIndex index) {
if (!(in instanceof IndexPredicate)) {
return false;
}
IndexPredicate<ChangeData> p = (IndexPredicate<ChangeData>) in;
return index.getSchema().getFields().containsKey(p.getField().getName());
}
private Predicate<ChangeData> partitionChildren(
Predicate<ChangeData> in,
List<Predicate<ChangeData>> newChildren,
BitSet isIndexed,
ChangeIndex index,
int limit) throws QueryParseException {
if (isIndexed.cardinality() == 1) {
int i = isIndexed.nextSetBit(0);
newChildren.add(
0, new IndexedChangeQuery(db, index, newChildren.remove(i), limit));
return copy(in, newChildren);
}
// Group all indexed predicates into a wrapped subtree.
List<Predicate<ChangeData>> indexed =
Lists.newArrayListWithCapacity(isIndexed.cardinality());
List<Predicate<ChangeData>> all =
Lists.newArrayListWithCapacity(
newChildren.size() - isIndexed.cardinality() + 1);
for (int i = 0; i < newChildren.size(); i++) {
Predicate<ChangeData> c = newChildren.get(i);
if (isIndexed.get(i)) {
indexed.add(c);
} else {
all.add(c);
}
}
all.add(0, new IndexedChangeQuery(db, index, in.copy(indexed), limit));
return copy(in, all);
}
private Predicate<ChangeData> copy(
Predicate<ChangeData> in,
List<Predicate<ChangeData>> all) {
if (in instanceof AndPredicate) {
return new AndSource(db, all);
} else if (in instanceof OrPredicate) {
return new OrSource(all);
}
return in.copy(all);
}
private static boolean isRewritePossible(Predicate<ChangeData> p) {
return p.getChildCount() > 0 && (
p instanceof AndPredicate
|| p instanceof OrPredicate
|| p instanceof NotPredicate);
}
static class BasicRewritesImpl extends BasicChangeRewrites {
private static final QueryRewriter.Definition<ChangeData, BasicRewritesImpl> mydef =
new QueryRewriter.Definition<ChangeData, BasicRewritesImpl>(
BasicRewritesImpl.class, SqlRewriterImpl.BUILDER);
@Inject
BasicRewritesImpl(Provider<ReviewDb> db, IndexCollection indexes) {
super(mydef, db, indexes);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.dboe.transaction.txn.journal;
import static org.apache.jena.dboe.sys.SysDB.SizeOfInt;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.zip.Adler32;
import org.apache.jena.atlas.iterator.IteratorSlotted;
import org.apache.jena.atlas.lib.ByteBufferLib;
import org.apache.jena.atlas.lib.Closeable;
import org.apache.jena.atlas.lib.FileOps;
import org.apache.jena.atlas.lib.Sync;
import org.apache.jena.dboe.base.file.BufferChannel;
import org.apache.jena.dboe.base.file.BufferChannelFile;
import org.apache.jena.dboe.base.file.BufferChannelMem;
import org.apache.jena.dboe.base.file.Location;
import org.apache.jena.dboe.sys.Names;
import org.apache.jena.dboe.transaction.txn.ComponentId;
import org.apache.jena.dboe.transaction.txn.PrepareState;
import org.apache.jena.dboe.transaction.txn.TransactionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A transaction journal.
* The journal is append-only for writes, with truncation of the file
* every so often. It is read during recovery.
* The size of entries depends on per-component redo/undo records;
* the control records like COMMIT are quite small.
* Entries have a CRC to ensure that part-entries are not acted on.
*/
public final
class Journal implements Sync, Closeable
{
private static final boolean LOGGING = false;
private static Logger log = LoggerFactory.getLogger(Journal.class);
private static boolean logging() {
return LOGGING && log.isInfoEnabled();
}
private static void log(String fmt, Object...args) {
if ( ! logging() )
return;
log.info(String.format(fmt, args));
}
private BufferChannel channel;
private long position;
private final Location location;
private long journalWriteStart = -1;
private boolean journalWriteEnded = false;
// Header: fixed, inc CRC
// length of data 4 bytes
// CRC of whole entry. 4 bytes
// entry type 4 bytes (1 byte and 3 alignment)
// component 16 bytes (fixed??)
// Data area : variable
// Bytes
private static final int posnLength = 0;
private static final int posnCRC = posnLength + SizeOfInt;
private static final int posnEntry = posnCRC + SizeOfInt;
private static final int posnComponent = posnEntry + SizeOfInt;
// Start of the component data area.
private static final int posnData = posnComponent + ComponentId.SIZE;
// Currently, the header is fixed size so this is the size.
private static int HeaderLen = posnData-posnLength;
private ByteBuffer header = ByteBuffer.allocate(HeaderLen);
public static boolean exists(Location location) {
if ( location.isMemUnique() )
return false;
if ( location.isMem() )
return location.exists(Names.journalFile);
return FileOps.exists(journalFilename(location));
}
public static Journal create(BufferChannel chan) {
return new Journal(chan, null);
}
public static Journal create(Location location) {
return new Journal(location);
}
private static String journalFilename(Location location) {
return location.absolute(Names.journalFile);
}
private Journal(Location location) {
this(openFromLocation(location), location);
}
private Journal(BufferChannel chan, Location location) {
this.channel = chan;
this.position = 0;
this.location = location;
}
/**
* Forced reopen - Thread.interrupt causes java to close file.
* Attempt to close, open, and position.
*/
public void reopen() {
if ( location == null )
// Can't reopen.
return;
if ( channel != null ) {
try { channel.close(); }
catch (Exception ex) { /*ignore*/ }
}
channel = openFromLocation(location);
long posn = writeStartPosn();
if ( posn >= 0 ) {
truncate(posn);
position = posn;
sync();
} else {
position = channel.size();
}
writeReset();
}
private static BufferChannel openFromLocation(Location location) {
String channelName = journalFilename(location);
if ( location.isMem() )
return BufferChannelMem.create(channelName);
else
return BufferChannelFile.create(channelName);
}
// synchronized : excessive?
// Given the calling context, we know it's thread safe.
synchronized public long writeJournal(JournalEntry entry) {
long posn = write(entry.getType(), entry.getComponentId(), entry.getByteBuffer());
if ( entry.getPosition() < 0 ) {
entry.setPosition(posn);
entry.setEndPosition(position);
}
return posn;
}
// /** Write an entry and return its location in the journal */
// synchronized public void write(List<PrepareState> prepareStates) {
// prepareStates.forEach(this::write);
// }
public long write(PrepareState prepareState) {
return write(JournalEntryType.REDO, prepareState.getComponent(), prepareState.getData());
}
/** Write an entry and return it's location in the journal */
synchronized public long write(JournalEntryType type, ComponentId componentId, ByteBuffer buffer) {
// Check buffer set right.
if ( LOGGING ) {
log("write@%-3d >> %s %s %s", position, type.name(),
componentId == null ? "<null>" : componentId.label(),
buffer == null ? "<null>" : ByteBufferLib.details(buffer));
}
long posn = position;
int len = -1;
int bufferLimit = -1;
int bufferPosition = -1;
if ( buffer != null ) {
bufferLimit = buffer.limit();
bufferPosition = buffer.position();
buffer.rewind();
len = buffer.remaining();
}
// Header: (length/4, crc/4, entry/4, component/16)
header.clear();
header.putInt(len);
header.putInt(0); // Set CRC to zero
header.putInt(type.id);
header.put(componentId.getBytes());
header.flip();
// Need to put CRC in before writing.
Adler32 adler = new Adler32();
adler.update(header.array());
if ( len > 0 ) {
adler.update(buffer);
buffer.rewind();
}
int crc = (int)adler.getValue();
header.putInt(posnCRC, crc);
if ( LOGGING )
log("write@ -- crc = %s", Integer.toHexString(crc) );
channel.write(header);
if ( len > 0 ) {
channel.write(buffer);
buffer.position(bufferPosition);
buffer.limit(bufferLimit);
}
position += HeaderLen + len;
if ( LOGGING )
log("write@%-3d << %s", position, componentId.label());
if ( len > 0 ) {
buffer.position(bufferPosition);
buffer.limit(bufferLimit);
}
return posn;
}
synchronized public JournalEntry readJournal(long id) {
return _readJournal(id);
}
private JournalEntry _readJournal(long id) {
long x = channel.position();
if ( x != id )
channel.position(id);
JournalEntry entry = _read();
long x2 = channel.position();
entry.setPosition(id);
entry.setEndPosition(x2);
if ( x != id )
channel.position(x);
return entry;
}
// -- Journal write cycle used during Transaction.writerPrepareCommit.
public void startWrite() {
journalWriteStart = this.position;
journalWriteEnded = false;
}
public long writeStartPosn() { return journalWriteStart; }
public void commitWrite() {
journalWriteStart = -1;
journalWriteEnded = true;
channel.sync();
}
// Idempotent. Safe to call multiple times and after commit (when it has no effect).
public void abortWrite() {
if ( !journalWriteEnded && journalWriteStart > 0 ) {
truncate(journalWriteStart);
sync();
}
journalWriteEnded = true;
}
public void endWrite() {
if ( ! journalWriteEnded )
abortWrite();
writeReset();
}
private void writeReset() {
journalWriteStart = -1;
journalWriteEnded = false;
}
// -- Journal write cycle.
// read one entry at the channel position.
// Move position to end of read.
private JournalEntry _read() {
if ( LOGGING ) {
log("read@%-3d >>", channel.position());
}
header.clear();
int lenRead = channel.read(header);
if ( lenRead == -1 ) {
// probably broken file.
throw new TransactionException("Read off the end of a journal file");
// return null;
}
if ( lenRead != header.capacity() )
throw new TransactionException("Partial read of journal file");
header.rewind();
// Header: (length/4, crc/4, entry/4, component/16)
int len = header.getInt();
int checksum = header.getInt();
header.putInt(posnCRC, 0);
int entryType = header.getInt();
byte[] bytes = new byte[ComponentId.SIZE];
header.get(bytes);
ComponentId component = ComponentId.create(null, bytes);
Adler32 adler = new Adler32();
adler.update(header.array());
ByteBuffer bb = null;
if ( len > 0 ) {
bb = ByteBuffer.allocate(len);
lenRead = channel.read(bb);
if ( lenRead != len )
throw new TransactionException("Failed to read the journal entry data: wanted " + len + " bytes, got " + lenRead);
bb.rewind();
adler.update(bb);
bb.rewind();
}
int crc = (int)adler.getValue();
if ( checksum != crc )
throw new TransactionException("Checksum error reading from the Journal. "+Integer.toHexString(checksum)+" / "+Integer.toHexString(crc));
JournalEntryType type = JournalEntryType.type(entryType);
JournalEntry entry = new JournalEntry(type, component, bb);
if ( LOGGING )
log("read@%-3d >> %s", channel.position(), entry);
return entry;
}
/**
* Iterator of entries from current point in Journal, going forward. Must be
* JournalEntry aligned at start.
*/
private class IteratorEntries extends IteratorSlotted<JournalEntry> {
JournalEntry slot = null;
final long endPoint;
long iterPosn;
public IteratorEntries(long startPosition) {
iterPosn = startPosition;
endPoint = channel.size();
}
@Override
protected JournalEntry moveToNext() {
// synchronized necessary? Outer policy is single thread?
synchronized (Journal.this) {
if ( iterPosn >= endPoint )
return null;
JournalEntry e = _readJournal(iterPosn);
iterPosn = e.getEndPosition();
return e;
}
}
@Override
protected boolean hasMore() {
return iterPosn < endPoint;
}
}
public Iterator<JournalEntry> entries() {
return new IteratorEntries(0);
}
synchronized public Iterator<JournalEntry> entries(long startPosition) {
return new IteratorEntries(startPosition);
}
@Override
public void sync() { channel.sync(); }
@Override
public void close() { channel.close(); }
public long size() { return channel.size(); }
public boolean isEmpty() { return channel.size() == 0; }
public void truncate(long size) { channel.truncate(size); }
public void reset() {
truncate(0);
sync();
}
public long position() { return channel.position(); }
// public void position(long posn) { channel.position(posn); }
// public void append() { position(size()); }
public Location getLocation() { return location; }
public String getFilename() { return channel.getFilename(); }
}
| |
package com.tngtech.archunit.junit;
import java.lang.annotation.Retention;
import com.tngtech.archunit.ArchConfiguration;
import com.tngtech.archunit.core.domain.JavaClasses;
import com.tngtech.archunit.junit.ArchUnitRunner.SharedCache;
import com.tngtech.archunit.lang.ArchRule;
import com.tngtech.archunit.testutil.ArchConfigurationRule;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.Description;
import org.junit.runner.notification.Failure;
import org.junit.runner.notification.RunNotifier;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
import static com.tngtech.archunit.core.domain.TestUtils.importClassesWithContext;
import static com.tngtech.archunit.junit.ArchUnitRunnerRunsRuleFieldsTest.ArchTestWithPrivateInstanceField.PRIVATE_RULE_FIELD_NAME;
import static com.tngtech.archunit.junit.ArchUnitRunnerRunsRuleFieldsTest.IgnoredArchTest.RULE_ONE_IN_IGNORED_TEST;
import static com.tngtech.archunit.junit.ArchUnitRunnerRunsRuleFieldsTest.IgnoredArchTest.RULE_TWO_IN_IGNORED_TEST;
import static com.tngtech.archunit.junit.ArchUnitRunnerRunsRuleFieldsTest.SomeArchTest.FAILING_FIELD_NAME;
import static com.tngtech.archunit.junit.ArchUnitRunnerRunsRuleFieldsTest.SomeArchTest.IGNORED_FIELD_NAME;
import static com.tngtech.archunit.junit.ArchUnitRunnerRunsRuleFieldsTest.SomeArchTest.SATISFIED_FIELD_NAME;
import static com.tngtech.archunit.junit.ArchUnitRunnerRunsRuleFieldsTest.WrongArchTestWrongFieldType.NO_RULE_AT_ALL_FIELD_NAME;
import static com.tngtech.archunit.junit.ArchUnitRunnerTestUtils.BE_SATISFIED;
import static com.tngtech.archunit.junit.ArchUnitRunnerTestUtils.NEVER_BE_SATISFIED;
import static com.tngtech.archunit.junit.ArchUnitRunnerTestUtils.newRunnerFor;
import static com.tngtech.archunit.lang.syntax.ArchRuleDefinition.classes;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ArchUnitRunnerRunsRuleFieldsTest {
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Rule
public final MockitoRule mockitoRule = MockitoJUnit.rule();
@Rule
public final ArchConfigurationRule archConfigurationRule = new ArchConfigurationRule();
@Mock
private SharedCache cache;
@Mock
private ClassCache classCache;
@Mock
private RunNotifier runNotifier;
@Captor
private ArgumentCaptor<Description> descriptionCaptor;
@Captor
private ArgumentCaptor<Failure> failureCaptor;
@InjectMocks
private ArchUnitRunner runner = ArchUnitRunnerTestUtils.newRunnerFor(SomeArchTest.class);
private JavaClasses cachedClasses = importClassesWithContext(Object.class);
@Before
public void setUp() {
when(cache.get()).thenReturn(classCache);
when(classCache.getClassesToAnalyzeFor(any(Class.class), any(ClassAnalysisRequest.class))).thenReturn(cachedClasses);
}
@Test
public void should_find_children() {
assertThat(runner.getChildren()).as("Rules defined in Test Class").hasSize(3);
}
@Test
public void should_start_rule() {
ArchTestExecution satisfiedRule = getRule(SATISFIED_FIELD_NAME);
runner.runChild(satisfiedRule, runNotifier);
verify(runNotifier).fireTestStarted(descriptionCaptor.capture());
assertThat(descriptionCaptor.getValue().toString()).contains(SATISFIED_FIELD_NAME);
}
@Test
public void should_accept_satisfied_rule() {
ArchTestExecution satisfiedRule = getRule(SATISFIED_FIELD_NAME);
runner.runChild(satisfiedRule, runNotifier);
verifyTestFinishedSuccessfully(SATISFIED_FIELD_NAME);
}
@Test
public void should_allow_instance_fields_of_all_visibility() {
ArchUnitRunner runner = newRunnerFor(ArchTestWithPrivateInstanceField.class, cache);
runner.runChild(ArchUnitRunnerTestUtils.getRule(PRIVATE_RULE_FIELD_NAME, runner), runNotifier);
verifyTestFinishedSuccessfully(PRIVATE_RULE_FIELD_NAME);
}
@Test
public void should_allow_instance_field_in_abstract_base_class() {
ArchUnitRunner runner = newRunnerFor(ArchTestWithAbstractBaseClass.class, cache);
runner.runChild(ArchUnitRunnerTestUtils.getRule(AbstractBaseClass.INSTANCE_FIELD_NAME, runner), runNotifier);
verifyTestFinishedSuccessfully(AbstractBaseClass.INSTANCE_FIELD_NAME);
}
@Test
public void should_fail_on_wrong_field_type() {
ArchUnitRunner runner = newRunnerFor(WrongArchTestWrongFieldType.class, cache);
thrown.expectMessage("Rule field " +
WrongArchTestWrongFieldType.class.getSimpleName() + "." + NO_RULE_AT_ALL_FIELD_NAME +
" to check must be of type " + ArchRule.class.getSimpleName());
runner.runChild(ArchUnitRunnerTestUtils.getRule(NO_RULE_AT_ALL_FIELD_NAME, runner), runNotifier);
}
@Test
public void should_fail_unsatisfied_rule() {
ArchTestExecution satisfiedRule = getRule(FAILING_FIELD_NAME);
runner.runChild(satisfiedRule, runNotifier);
verify(runNotifier).fireTestFailure(failureCaptor.capture());
Failure failure = failureCaptor.getValue();
assertThat(failure.getDescription().toString()).contains(FAILING_FIELD_NAME);
assertThat(failure.getException()).isInstanceOf(AssertionError.class);
}
@Test
public void should_skip_ignored_rule() {
ArchTestExecution satisfiedRule = getRule(IGNORED_FIELD_NAME);
runner.runChild(satisfiedRule, runNotifier);
verify(runNotifier).fireTestIgnored(descriptionCaptor.capture());
assertThat(descriptionCaptor.getValue().toString()).contains(IGNORED_FIELD_NAME);
}
@Test
public void should_skip_ignored_test() {
ArchUnitRunner runner = newRunnerFor(IgnoredArchTest.class, cache);
runner.runChild(ArchUnitRunnerTestUtils.getRule(RULE_ONE_IN_IGNORED_TEST, runner), runNotifier);
runner.runChild(ArchUnitRunnerTestUtils.getRule(RULE_TWO_IN_IGNORED_TEST, runner), runNotifier);
verify(runNotifier, times(2)).fireTestIgnored(descriptionCaptor.capture());
assertThat(descriptionCaptor.getAllValues()).extractingResultOf("getMethodName")
.contains(RULE_ONE_IN_IGNORED_TEST, RULE_TWO_IN_IGNORED_TEST);
}
@Test
public void should_pass_annotations_of_rule_field() {
ArchUnitRunner runner = newRunnerFor(ArchTestWithFieldWithAdditionalAnnotation.class, cache);
runner.runChild(ArchUnitRunnerTestUtils.getRule(ArchTestWithFieldWithAdditionalAnnotation.TEST_FIELD_NAME, runner), runNotifier);
verify(runNotifier).fireTestFinished(descriptionCaptor.capture());
Description description = descriptionCaptor.getValue();
assertThat(description.getAnnotation(SomeAnnotation.class)).as("expected annotation").isNotNull();
}
@Test
public void underscores_in_field_name_are_replaced_with_blanks_if_property_is_set_to_true() {
ArchUnitRunner runner = newRunnerFor(ArchTestWithFieldWithUnderscoresInName.class, cache);
ArchConfiguration.get().setProperty(DisplayNameResolver.JUNIT_DISPLAYNAME_REPLACE_UNDERSCORES_BY_SPACES_PROPERTY_NAME, "true");
runner.runChild(ArchUnitRunnerTestUtils.getRule(ArchTestWithFieldWithUnderscoresInName.TEST_FIELD_NAME, runner), runNotifier);
verify(runNotifier).fireTestFinished(descriptionCaptor.capture());
Description description = descriptionCaptor.getValue();
assertThat(description.getDisplayName()).as("expected display name").startsWith("some test Field(");
}
@Test
public void original_field_name_is_used_as_displayname_if_property_is_set_to_false() {
ArchUnitRunner runner = newRunnerFor(ArchTestWithFieldWithUnderscoresInName.class, cache);
ArchConfiguration.get().setProperty(DisplayNameResolver.JUNIT_DISPLAYNAME_REPLACE_UNDERSCORES_BY_SPACES_PROPERTY_NAME, "false");
runner.runChild(ArchUnitRunnerTestUtils.getRule(ArchTestWithFieldWithUnderscoresInName.TEST_FIELD_NAME, runner), runNotifier);
verify(runNotifier).fireTestFinished(descriptionCaptor.capture());
Description description = descriptionCaptor.getValue();
assertThat(description.getDisplayName()).as("expected display name").startsWith("some_test_Field(");
}
private ArchTestExecution getRule(String name) {
return ArchUnitRunnerTestUtils.getRule(name, runner);
}
private void verifyTestFinishedSuccessfully(String expectedDescriptionMethodName) {
verifyTestFinishedSuccessfully(runNotifier, descriptionCaptor, expectedDescriptionMethodName);
}
static void verifyTestFinishedSuccessfully(RunNotifier runNotifier, ArgumentCaptor<Description> descriptionCaptor,
String expectedDescriptionMethodName) {
verify(runNotifier, never()).fireTestFailure(any(Failure.class));
verify(runNotifier).fireTestFinished(descriptionCaptor.capture());
Description description = descriptionCaptor.getValue();
assertThat(description.getMethodName()).isEqualTo(expectedDescriptionMethodName);
}
@AnalyzeClasses(packages = "some.pkg")
public static class SomeArchTest {
static final String SATISFIED_FIELD_NAME = "someSatisfiedRule";
static final String FAILING_FIELD_NAME = "someFailingRule";
static final String IGNORED_FIELD_NAME = "someIgnoredRule";
@ArchTest
public static final ArchRule someSatisfiedRule = classes().should(BE_SATISFIED);
@ArchTest
public static final ArchRule someFailingRule = classes().should(NEVER_BE_SATISFIED);
@ArchIgnore
@ArchTest
public static final ArchRule someIgnoredRule = classes().should(NEVER_BE_SATISFIED);
}
@SuppressWarnings("WeakerAccess")
@AnalyzeClasses(packages = "some.pkg")
public static class ArchTestWithPrivateInstanceField {
static final String PRIVATE_RULE_FIELD_NAME = "privateField";
@ArchTest
private ArchRule privateField = classes().should(BE_SATISFIED);
}
@SuppressWarnings("WeakerAccess")
@AnalyzeClasses(packages = "some.pkg")
public static class ArchTestWithAbstractBaseClass extends AbstractBaseClass {
}
abstract static class AbstractBaseClass {
static final String INSTANCE_FIELD_NAME = "abstractBaseClassInstanceField";
@ArchTest
ArchRule abstractBaseClassInstanceField = classes().should(BE_SATISFIED);
}
@AnalyzeClasses(packages = "some.pkg")
public static class WrongArchTestWrongFieldType {
static final String NO_RULE_AT_ALL_FIELD_NAME = "noRuleAtAll";
@ArchTest
public static Object noRuleAtAll = new Object();
}
@ArchIgnore
@AnalyzeClasses(packages = "some.pkg")
public static class IgnoredArchTest {
static final String RULE_ONE_IN_IGNORED_TEST = "someRuleOne";
static final String RULE_TWO_IN_IGNORED_TEST = "someRuleTwo";
@ArchTest
public static final ArchRule someRuleOne = classes().should(NEVER_BE_SATISFIED);
@ArchTest
public static final ArchRule someRuleTwo = classes().should(NEVER_BE_SATISFIED);
}
@AnalyzeClasses(packages = "some.pkg")
public static class ArchTestWithFieldWithAdditionalAnnotation {
static final String TEST_FIELD_NAME = "annotatedTestField";
@SomeAnnotation
@ArchTest
public static final ArchRule annotatedTestField = classes().should(NEVER_BE_SATISFIED);
}
@AnalyzeClasses(packages = "some.pkg")
public static class ArchTestWithFieldWithUnderscoresInName {
static final String TEST_FIELD_NAME = "some_test_Field";
@ArchTest
public static final ArchRule some_test_Field = classes().should(NEVER_BE_SATISFIED);
}
@Retention(RUNTIME)
@interface SomeAnnotation {
}
}
| |
package com.sleekbyte.tailor.functional;
import static org.junit.Assert.assertArrayEquals;
import com.github.mustachejava.DefaultMustacheFactory;
import com.github.mustachejava.Mustache;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.sleekbyte.tailor.Tailor;
import com.sleekbyte.tailor.common.ConfigProperties;
import com.sleekbyte.tailor.common.Messages;
import com.sleekbyte.tailor.common.Rules;
import com.sleekbyte.tailor.common.Severity;
import com.sleekbyte.tailor.format.CCFormatter;
import com.sleekbyte.tailor.format.Format;
import com.sleekbyte.tailor.format.Formatter;
import com.sleekbyte.tailor.format.HTMLFormatter;
import com.sleekbyte.tailor.output.Printer;
import com.sleekbyte.tailor.output.ViolationMessage;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Tests for {@link Tailor} output formats.
*/
@RunWith(MockitoJUnitRunner.class)
public final class FormatTest {
private static final String TEST_INPUT_DIR = "src/test/swift/com/sleekbyte/tailor/functional";
private static final String NEWLINE_REGEX = "\\r?\\n";
private static final String NEWLINE_PATTERN = "\n";
private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().setPrettyPrinting().create();
protected ByteArrayOutputStream outContent;
protected File inputFile;
protected List<String> expectedMessages;
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Before
public void setUp() throws IOException {
inputFile = new File(TEST_INPUT_DIR + "/UpperCamelCaseTest.swift");
expectedMessages = new ArrayList<>();
outContent = new ByteArrayOutputStream();
System.setOut(new PrintStream(outContent, false, Charset.defaultCharset().name()));
}
@After
public void tearDown() {
System.setOut(null);
}
@Test
public void testXcodeFormat() throws IOException {
Format format = Format.XCODE;
final String[] command = new String[] {
"--format", format.getName(),
"--no-color",
"--only=upper-camel-case",
inputFile.getPath()
};
expectedMessages.addAll(getExpectedMsgs().stream().map(msg -> Printer.genOutputStringForTest(
msg.getRule(),
inputFile.getName(),
msg.getLineNumber(),
msg.getColumnNumber(),
msg.getSeverity(),
msg.getMessage())).collect(Collectors.toList()));
Tailor.main(command);
List<String> actualOutput = new ArrayList<>();
String[] msgs = outContent.toString(Charset.defaultCharset().name()).split(NEWLINE_REGEX);
// Skip first four lines for file header, last two lines for summary
msgs = Arrays.copyOfRange(msgs, 4, msgs.length - 2);
for (String msg : msgs) {
String truncatedMsg = msg.substring(msg.indexOf(inputFile.getName()));
actualOutput.add(truncatedMsg);
}
assertArrayEquals(outContent.toString(Charset.defaultCharset().name()), this.expectedMessages.toArray(),
actualOutput.toArray());
}
@Test
public void testJSONFormat() throws IOException {
Format format = Format.JSON;
final String[] command = new String[] {
"--format", format.getName(),
"--no-color",
"--only=upper-camel-case",
inputFile.getPath()
};
Map<String, Object> expectedOutput = getJSONMessages();
Tailor.main(command);
List<String> expected = new ArrayList<>();
List<String> actual = new ArrayList<>();
expectedMessages.addAll(
Arrays.asList((GSON.toJson(expectedOutput) + System.lineSeparator()).split(NEWLINE_REGEX)));
for (String msg : expectedMessages) {
String strippedMsg = msg.replaceAll(inputFile.getCanonicalPath(), "");
expected.add(strippedMsg);
}
String[] msgs = outContent.toString(Charset.defaultCharset().name()).split(NEWLINE_REGEX);
for (String msg : msgs) {
String strippedMsg = msg.replaceAll(inputFile.getCanonicalPath(), "");
actual.add(strippedMsg);
}
assertArrayEquals(outContent.toString(Charset.defaultCharset().name()), expected.toArray(), actual.toArray());
}
@Test
public void testXcodeConfigOption() throws IOException {
File configurationFile = xcodeFormatConfigFile(".tailor.yml");
final String[] command = new String[] {
"--config", configurationFile.getAbsolutePath(),
"--no-color",
"--only=upper-camel-case",
inputFile.getPath()
};
expectedMessages.addAll(getExpectedMsgs().stream().map(msg -> Printer.genOutputStringForTest(
msg.getRule(),
inputFile.getName(),
msg.getLineNumber(),
msg.getColumnNumber(),
msg.getSeverity(),
msg.getMessage())).collect(Collectors.toList()));
Tailor.main(command);
List<String> actualOutput = new ArrayList<>();
String[] msgs = outContent.toString(Charset.defaultCharset().name()).split(NEWLINE_REGEX);
// Skip first four lines for file header, last two lines for summary
msgs = Arrays.copyOfRange(msgs, 4, msgs.length - 2);
for (String msg : msgs) {
String truncatedMsg = msg.substring(msg.indexOf(inputFile.getName()));
actualOutput.add(truncatedMsg);
}
assertArrayEquals(outContent.toString(Charset.defaultCharset().name()), this.expectedMessages.toArray(),
actualOutput.toArray());
}
public void testCCFormat() throws IOException {
Format format = Format.CC;
final String[] command = new String[] {
"--format", format.getName(),
"--no-color",
"--only=upper-camel-case",
inputFile.getPath()
};
Tailor.main(command);
List<String> expected = new ArrayList<>();
List<String> actual = new ArrayList<>();
StringBuilder expectedOutput = new StringBuilder();
for (Map<String, Object> msg : getCCMessages()) {
expectedOutput.append(GSON.toJson(msg)).append(CCFormatter.NULL_CHAR).append(System.lineSeparator());
}
expectedMessages.addAll(Arrays.asList(expectedOutput.toString().split(NEWLINE_REGEX)));
for (String msg : expectedMessages) {
String strippedMsg = msg.replaceAll(inputFile.getCanonicalPath(), "");
expected.add(strippedMsg);
}
String[] msgs = outContent.toString(Charset.defaultCharset().name()).split(NEWLINE_REGEX);
for (String msg : msgs) {
String strippedMsg = msg.replaceAll(inputFile.getCanonicalPath(), "");
actual.add(strippedMsg);
}
assertArrayEquals(outContent.toString(Charset.defaultCharset().name()), expected.toArray(), actual.toArray());
}
@Test
public void testHTMLFormat() throws IOException {
Format format = Format.HTML;
final String[] command = new String[] {
"--format", format.getName(),
"--no-color",
"--only=upper-camel-case",
inputFile.getPath()
};
Map<String, Object> expectedOutput = getHTMLMessages();
Tailor.main(command);
List<String> expected = new ArrayList<>();
List<String> actual = new ArrayList<>();
Mustache mustache = new DefaultMustacheFactory().compile(
new InputStreamReader(HTMLFormatter.class.getResourceAsStream("index.html"), Charset.defaultCharset()),
"index.html"
);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
mustache.execute(new OutputStreamWriter(baos, Charset.defaultCharset()), expectedOutput).flush();
expectedMessages.addAll(Arrays.asList(baos.toString(Charset.defaultCharset().name()).split(NEWLINE_REGEX)));
for (String msg : expectedMessages) {
String strippedMsg = msg.replaceAll(inputFile.getCanonicalPath(), "");
expected.add(strippedMsg);
}
String[] msgs = outContent.toString(Charset.defaultCharset().name()).split(NEWLINE_REGEX);
for (String msg : msgs) {
String strippedMsg = msg.replaceAll(inputFile.getCanonicalPath(), "");
actual.add(strippedMsg);
}
assertArrayEquals(outContent.toString(Charset.defaultCharset().name()), expected.toArray(), actual.toArray());
}
protected List<ViolationMessage> getExpectedMsgs() {
List<ViolationMessage> messages = new ArrayList<>();
messages.add(createViolationMessage(3, 7, Severity.WARNING, Messages.CLASS + Messages.NAMES));
messages.add(createViolationMessage(7, 7, Severity.WARNING, Messages.CLASS + Messages.NAMES));
messages.add(createViolationMessage(42, 6, Severity.WARNING, Messages.ENUM + Messages.NAMES));
messages.add(createViolationMessage(46, 6, Severity.WARNING, Messages.ENUM + Messages.NAMES));
messages.add(createViolationMessage(50, 6, Severity.WARNING, Messages.ENUM + Messages.NAMES));
messages.add(createViolationMessage(72, 8, Severity.WARNING, Messages.STRUCT + Messages.NAMES));
messages.add(createViolationMessage(76, 8, Severity.WARNING, Messages.STRUCT + Messages.NAMES));
messages.add(createViolationMessage(90, 10, Severity.WARNING, Messages.PROTOCOL + Messages.NAMES));
messages.add(createViolationMessage(94, 10, Severity.WARNING, Messages.PROTOCOL + Messages.NAMES));
messages.add(createViolationMessage(98, 10, Severity.WARNING, Messages.PROTOCOL + Messages.NAMES));
messages.add(createViolationMessage(119, 18, Severity.WARNING, Messages.GENERIC_PARAMETERS + Messages.NAMES));
messages.add(createViolationMessage(119, 23, Severity.WARNING, Messages.GENERIC_PARAMETERS + Messages.NAMES));
messages.add(createViolationMessage(128, 20, Severity.WARNING, Messages.GENERIC_PARAMETERS + Messages.NAMES));
messages.add(createViolationMessage(137, 14, Severity.WARNING, Messages.GENERIC_PARAMETERS + Messages.NAMES));
return messages;
}
private ViolationMessage createViolationMessage(int line, int column, Severity severity, String msg) {
return new ViolationMessage(Rules.UPPER_CAMEL_CASE, line, column, severity, msg + Messages.UPPER_CAMEL_CASE);
}
private Map<String, Object> getJSONSummary(long analyzed, long skipped, long errors, long warnings) {
Map<String, Object> summary = new HashMap<>();
summary.put(Messages.ANALYZED_KEY, analyzed);
summary.put(Messages.SKIPPED_KEY, skipped);
summary.put(Messages.VIOLATIONS_KEY, errors + warnings);
summary.put(Messages.ERRORS_KEY, errors);
summary.put(Messages.WARNINGS_KEY, warnings);
return summary;
}
private Map<String, Object> getJSONMessages() {
List<Map<String, Object>> violations = new ArrayList<>();
for (ViolationMessage msg : getExpectedMsgs()) {
Map<String, Object> violation = new HashMap<>();
Map<String, Object> location = new HashMap<>();
location.put(Messages.LINE_KEY, msg.getLineNumber());
location.put(Messages.COLUMN_KEY, msg.getColumnNumber());
violation.put(Messages.LOCATION_KEY, location);
violation.put(Messages.SEVERITY_KEY, msg.getSeverity().toString());
violation.put(Messages.RULE_KEY, Rules.UPPER_CAMEL_CASE.getName());
violation.put(Messages.MESSAGE_KEY, msg.getMessage());
violations.add(violation);
}
Map<String, Object> file = new HashMap<>();
file.put(Messages.PATH_KEY, "");
file.put(Messages.PARSED_KEY, true);
file.put(Messages.VIOLATIONS_KEY, violations);
List<Object> files = new ArrayList<>();
files.add(file);
Map<String, Object> expectedOutput = new LinkedHashMap<>();
expectedOutput.put(Messages.FILES_KEY, files);
expectedOutput.put(Messages.SUMMARY_KEY, getJSONSummary(1, 0, 0, violations.size()));
return expectedOutput;
}
private File xcodeFormatConfigFile(String fileName) throws IOException {
File configFile = folder.newFile(fileName);
Writer streamWriter = new OutputStreamWriter(new FileOutputStream(configFile), Charset.forName("UTF-8"));
PrintWriter printWriter = new PrintWriter(streamWriter);
printWriter.println("format: xcode");
streamWriter.close();
printWriter.close();
return configFile;
}
private List<Map<String, Object>> getCCMessages() {
List<Map<String, Object>> violations = new ArrayList<>();
for (ViolationMessage msg : getExpectedMsgs()) {
Map<String, Object> violation = new HashMap<>();
Map<String, Object> location = new HashMap<>();
Map<String, Object> positions = new HashMap<>();
Map<String, Object> lines = new HashMap<>();
Map<String, Object> begin = new HashMap<>();
Map<String, Object> end = new HashMap<>();
if (msg.getColumnNumber() != 0) {
begin.put(Messages.LINE_KEY, msg.getLineNumber());
begin.put(Messages.COLUMN_KEY, msg.getColumnNumber());
end.put(Messages.LINE_KEY, msg.getLineNumber());
end.put(Messages.COLUMN_KEY, msg.getColumnNumber());
positions.put(Messages.BEGIN_KEY, begin);
positions.put(Messages.END_KEY, end);
location.put(Messages.POSITIONS_KEY, positions);
} else {
lines.put(Messages.BEGIN_KEY, msg.getLineNumber());
lines.put(Messages.END_KEY, msg.getLineNumber());
location.put(Messages.LINES_KEY, lines);
}
violation.put(Messages.TYPE_KEY, Messages.ISSUE_VALUE);
violation.put(Messages.CHECK_NAME_KEY, msg.getRule().getName());
violation.put(Messages.DESCRIPTION_KEY, msg.getMessage());
Map<String, Object> content = new HashMap<>();
content.put(Messages.BODY_KEY, msg.getRule().getExamples());
violation.put(Messages.CONTENT_KEY, content);
List<String> categories = new ArrayList<>();
categories.add(msg.getRule().getCategory());
violation.put(Messages.CATEGORIES_KEY, categories);
location.put(Messages.PATH_KEY, inputFile.getPath());
violation.put(Messages.LOCATION_KEY, location);
violation.put(Messages.REMEDIATION_POINTS_KEY, msg.getRule().getRemediationPoints());
violations.add(violation);
}
return violations;
}
private Map<String, Object> getHTMLMessages() throws IOException {
List<Map<String, Object>> violations = new ArrayList<>();
for (ViolationMessage msg : getExpectedMsgs()) {
Map<String, Object> violation = new HashMap<>();
Map<String, Object> location = new HashMap<>();
location.put(Messages.LINE_KEY, msg.getLineNumber());
location.put(Messages.COLUMN_KEY, msg.getColumnNumber());
violation.put(Messages.LOCATION_KEY, location);
switch (msg.getSeverity()) {
case ERROR:
violation.put(Messages.ERROR, true);
break;
case WARNING:
violation.put(Messages.WARNING, true);
break;
default:
break;
}
violation.put(Messages.RULE_KEY, Rules.UPPER_CAMEL_CASE.getName());
violation.put(Messages.MESSAGE_KEY, msg.getMessage());
violations.add(violation);
}
Map<String, Object> file = new HashMap<>();
file.put(Messages.PATH_KEY, "");
file.put(Messages.PARSED_KEY, true);
file.put(Messages.VIOLATIONS_KEY, violations);
file.put(Messages.NUM_VIOLATIONS_KEY,
Formatter.pluralize(violations.size(), Messages.SINGLE_VIOLATION_KEY, Messages.MULTI_VIOLATIONS_KEY));
List<Object> files = new ArrayList<>();
files.add(file);
Map<String, Object> expectedOutput = new LinkedHashMap<>();
expectedOutput.put(Messages.FILES_KEY, files);
expectedOutput.put(Messages.SUMMARY_KEY,
Formatter.formatSummary(1, 0, 0, violations.size()).replace(NEWLINE_PATTERN, ""));
expectedOutput.put(Messages.VERSION_LONG_OPT, new ConfigProperties().getVersion());
return expectedOutput;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.apple.xcode.XCScheme;
import com.facebook.buck.apple.xcode.xcodeproj.PBXAggregateTarget;
import com.facebook.buck.apple.xcode.xcodeproj.PBXTarget;
import com.facebook.buck.apple.xcode.xcodeproj.ProductType;
import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer;
import com.facebook.buck.event.BuckEventBusFactory;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.ActionGraph;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetGraphToActionGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.coercer.Either;
import com.facebook.buck.rules.coercer.SourceWithFlags;
import com.facebook.buck.shell.GenruleBuilder;
import com.facebook.buck.shell.GenruleDescription;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.facebook.buck.timing.SettableFakeClock;
import com.facebook.buck.util.NullFileHashCache;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import org.hamcrest.FeatureMatcher;
import org.hamcrest.Matcher;
import org.hamcrest.core.AllOf;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class WorkspaceAndProjectGeneratorTest {
private ProjectFilesystem projectFilesystem;
private TargetGraph targetGraph;
private TargetNode<XcodeWorkspaceConfigDescription.Arg> workspaceNode;
private TargetNode<XcodeWorkspaceConfigDescription.Arg> workspaceWithExtraSchemeNode;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() throws IOException {
projectFilesystem = new FakeProjectFilesystem(new SettableFakeClock(0, 0));
// Add support files needed by project generation to fake filesystem.
projectFilesystem.writeContentsToPath(
"",
Paths.get(ProjectGenerator.PATH_TO_ASSET_CATALOG_BUILD_PHASE_SCRIPT));
projectFilesystem.writeContentsToPath(
"",
Paths.get(ProjectGenerator.PATH_TO_ASSET_CATALOG_COMPILER));
setUpWorkspaceAndProjects();
}
private void setUpWorkspaceAndProjects() {
// Create the following dep tree:
// FooBin -has-test-> FooBinTest
// |
// V
// FooLib -has-test-> FooLibTest
// | |
// V V
// BarLib BazLib -has-test-> BazLibTest
// ^
// |
// QuxBin
//
//
// FooBin and BazLib use "tests" to specify their tests while FooLibTest uses source_under_test
// to specify that it is a test of FooLib.
//
// Calling generate on FooBin should pull in everything except BazLibTest and QuxBin
BuildTarget bazTestTarget = BuildTarget.builder("//baz", "xctest").build();
BuildTarget fooBinTestTarget = BuildTarget.builder("//foo", "bin-xctest").build();
BuildTarget fooTestTarget = BuildTarget.builder("//foo", "lib-xctest").build();
BuildTarget barLibTarget = BuildTarget.builder("//bar", "lib").build();
TargetNode<?> barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget fooLibTarget = BuildTarget.builder("//foo", "lib").build();
TargetNode<?> fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barLibTarget)))
.setTests(Optional.of(ImmutableSortedSet.of(fooTestTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget fooBinBinaryTarget = BuildTarget.builder("//foo", "binbinary").build();
TargetNode<?> fooBinBinaryNode = AppleBinaryBuilder
.createBuilder(fooBinBinaryTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooLibTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget fooBinTarget = BuildTarget.builder("//foo", "bin").build();
TargetNode<?> fooBinNode = AppleBundleBuilder
.createBuilder(fooBinTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.APP))
.setBinary(fooBinBinaryTarget)
.setTests(Optional.of(ImmutableSortedSet.of(fooBinTestTarget)))
.build();
BuildTarget bazLibTarget = BuildTarget.builder("//baz", "lib").build();
TargetNode<?> bazLibNode = AppleLibraryBuilder
.createBuilder(bazLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooLibTarget)))
.setTests(Optional.of(ImmutableSortedSet.of(bazTestTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
TargetNode<?> bazTestNode = AppleTestBuilder
.createBuilder(bazTestTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(bazLibTarget)))
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
TargetNode<?> fooTestNode = AppleTestBuilder
.createBuilder(fooTestTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setDeps(Optional.of(ImmutableSortedSet.of(bazLibTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
TargetNode<?> fooBinTestNode = AppleTestBuilder
.createBuilder(fooBinTestTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooBinTarget)))
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget quxBinTarget = BuildTarget.builder("//qux", "bin").build();
TargetNode<?> quxBinNode = AppleBinaryBuilder
.createBuilder(quxBinTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barLibTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget workspaceTarget = BuildTarget.builder("//foo", "workspace").build();
workspaceNode = XcodeWorkspaceConfigBuilder
.createBuilder(workspaceTarget)
.setWorkspaceName(Optional.of("workspace"))
.setSrcTarget(Optional.of(fooBinTarget))
.build();
targetGraph = TargetGraphFactory.newInstance(
barLibNode,
fooLibNode,
fooBinBinaryNode,
fooBinNode,
bazLibNode,
bazTestNode,
fooTestNode,
fooBinTestNode,
quxBinNode,
workspaceNode);
}
@Test
public void workspaceAndProjectsShouldDiscoverDependenciesAndTests() throws IOException {
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceNode.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.of(ProjectGenerator.Option.INCLUDE_TESTS),
false /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
ProjectGenerator fooProjectGenerator =
projectGenerators.get(Paths.get("foo"));
ProjectGenerator barProjectGenerator =
projectGenerators.get(Paths.get("bar"));
ProjectGenerator bazProjectGenerator =
projectGenerators.get(Paths.get("baz"));
ProjectGenerator quxProjectGenerator =
projectGenerators.get(Paths.get("qux"));
assertNull(
"The Qux project should not be generated at all",
quxProjectGenerator);
assertNotNull(
"The Foo project should have been generated",
fooProjectGenerator);
assertNotNull(
"The Bar project should have been generated",
barProjectGenerator);
assertNotNull(
"The Baz project should have been generated",
bazProjectGenerator);
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:bin");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:lib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:bin-xctest");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:lib-xctest");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
barProjectGenerator.getGeneratedProject(),
"//bar:lib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
bazProjectGenerator.getGeneratedProject(),
"//baz:lib");
}
@Test
public void combinedProjectShouldDiscoverDependenciesAndTests() throws IOException {
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceNode.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.of(ProjectGenerator.Option.INCLUDE_TESTS),
true /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
assertTrue(
"Combined project generation should not populate the project generators map",
projectGenerators.isEmpty());
Optional<ProjectGenerator> projectGeneratorOptional = generator.getCombinedProjectGenerator();
assertTrue(
"Combined project generator should be present",
projectGeneratorOptional.isPresent());
ProjectGenerator projectGenerator = projectGeneratorOptional.get();
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerator.getGeneratedProject(),
"//foo:bin");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerator.getGeneratedProject(),
"//foo:lib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerator.getGeneratedProject(),
"//foo:bin-xctest");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerator.getGeneratedProject(),
"//foo:lib-xctest");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerator.getGeneratedProject(),
"//bar:lib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerator.getGeneratedProject(),
"//baz:lib");
}
@Test
public void workspaceAndProjectsWithoutTests() throws IOException {
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceNode.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.<ProjectGenerator.Option>of(),
false /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
ProjectGenerator fooProjectGenerator =
projectGenerators.get(Paths.get("foo"));
ProjectGenerator barProjectGenerator =
projectGenerators.get(Paths.get("bar"));
ProjectGenerator bazProjectGenerator =
projectGenerators.get(Paths.get("baz"));
ProjectGenerator quxProjectGenerator =
projectGenerators.get(Paths.get("qux"));
assertNull(
"The Qux project should not be generated at all",
quxProjectGenerator);
assertNull(
"The Baz project should not be generated at all",
bazProjectGenerator);
assertNotNull(
"The Foo project should have been generated",
fooProjectGenerator);
assertNotNull(
"The Bar project should have been generated",
barProjectGenerator);
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:bin");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:lib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
barProjectGenerator.getGeneratedProject(),
"//bar:lib");
}
@Test
public void requiredBuildTargets() throws IOException {
BuildTarget genruleTarget = BuildTarget.builder("//foo", "gen").build();
TargetNode<GenruleDescription.Arg> genrule = GenruleBuilder
.newGenruleBuilder(genruleTarget)
.setOut("source.m")
.build();
BuildTarget libraryTarget = BuildTarget.builder("//foo", "lib").build();
TargetNode<AppleNativeTargetDescriptionArg> library = AppleLibraryBuilder
.createBuilder(libraryTarget)
.setSrcs(
Optional.of(
ImmutableList.of(
SourceWithFlags.of(
new BuildTargetSourcePath(projectFilesystem, genruleTarget)))))
.build();
TargetNode<XcodeWorkspaceConfigDescription.Arg> workspaceNode = XcodeWorkspaceConfigBuilder
.createBuilder(BuildTarget.builder("//foo", "workspace").build())
.setSrcTarget(Optional.of(libraryTarget))
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(genrule, library, workspaceNode);
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceNode.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.<ProjectGenerator.Option>of(),
false /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
assertEquals(
generator.getRequiredBuildTargets(),
ImmutableSet.of(genruleTarget));
}
@Test
public void requiredBuildTargetsForCombinedProject() throws IOException {
BuildTarget genruleTarget = BuildTarget.builder("//foo", "gen").build();
TargetNode<GenruleDescription.Arg> genrule = GenruleBuilder
.newGenruleBuilder(genruleTarget)
.setOut("source.m")
.build();
BuildTarget libraryTarget = BuildTarget.builder("//foo", "lib").build();
TargetNode<AppleNativeTargetDescriptionArg> library = AppleLibraryBuilder
.createBuilder(libraryTarget)
.setSrcs(
Optional.of(
ImmutableList.of(
SourceWithFlags.of(
new BuildTargetSourcePath(projectFilesystem, genruleTarget)))))
.build();
TargetNode<XcodeWorkspaceConfigDescription.Arg> workspaceNode = XcodeWorkspaceConfigBuilder
.createBuilder(BuildTarget.builder("//foo", "workspace").build())
.setSrcTarget(Optional.of(libraryTarget))
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(genrule, library, workspaceNode);
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceNode.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.<ProjectGenerator.Option>of(),
true /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
assertEquals(
generator.getRequiredBuildTargets(),
ImmutableSet.of(genruleTarget));
}
@Test
public void buildWithBuck() throws IOException {
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceNode.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.of(ProjectGenerator.Option.INCLUDE_TESTS),
false /* combinedProject */,
true /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
ProjectGenerator fooProjectGenerator = projectGenerators.get(Paths.get("foo"));
assertThat(fooProjectGenerator, is(notNullValue()));
PBXTarget buildWithBuckTarget = null;
for (PBXTarget target : fooProjectGenerator.getGeneratedProject().getTargets()) {
if (target.getProductName() != null && target.getProductName().endsWith("-Buck")) {
buildWithBuckTarget = target;
break;
}
}
assertThat(buildWithBuckTarget, is(notNullValue()));
assertThat(buildWithBuckTarget, is(instanceOf(PBXAggregateTarget.class)));
String gid = buildWithBuckTarget.getGlobalID();
Optional<XCScheme> scheme = Iterables
.getOnlyElement(generator.getSchemeGenerators().values())
.getOutputScheme();
assertThat(scheme.isPresent(), is(true));
XCScheme.BuildableReference buildWithBuckBuildableReference = null;
for (XCScheme.BuildActionEntry buildActionEntry :
scheme.get().getBuildAction().get().getBuildActionEntries()) {
XCScheme.BuildableReference buildableReference = buildActionEntry.getBuildableReference();
if (buildableReference.getBlueprintIdentifier().equals(gid)) {
buildWithBuckBuildableReference = buildableReference;
}
}
assertThat(buildWithBuckBuildableReference, is(notNullValue()));
assertThat(buildWithBuckBuildableReference.getBuildableName(), equalTo("//foo:bin-Buck"));
}
@Test
public void combinedTestBundle() throws IOException {
TargetNode<AppleTestDescription.Arg> combinableTest1 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//foo", "combinableTest1").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(true))
.build();
TargetNode<AppleTestDescription.Arg> combinableTest2 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//bar", "combinableTest2").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(true))
.build();
TargetNode<AppleTestDescription.Arg> testMarkedUncombinable = AppleTestBuilder
.createBuilder(BuildTarget.builder("//foo", "testMarkedUncombinable").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(false))
.build();
TargetNode<AppleTestDescription.Arg> anotherTest = AppleTestBuilder
.createBuilder(BuildTarget.builder("//foo", "anotherTest").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.OCTEST))
.setCanGroup(Optional.of(true))
.build();
TargetNode<AppleNativeTargetDescriptionArg> library = AppleLibraryBuilder
.createBuilder(BuildTarget.builder("//foo", "lib").build())
.setTests(
Optional.of(
ImmutableSortedSet.of(
combinableTest1.getBuildTarget(),
combinableTest2.getBuildTarget(),
testMarkedUncombinable.getBuildTarget(),
anotherTest.getBuildTarget())))
.build();
TargetNode<XcodeWorkspaceConfigDescription.Arg> workspace = XcodeWorkspaceConfigBuilder
.createBuilder(BuildTarget.builder("//foo", "workspace").build())
.setSrcTarget(Optional.of(library.getBuildTarget()))
.setWorkspaceName(Optional.of("workspace"))
.build();
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
library,
combinableTest1,
combinableTest2,
testMarkedUncombinable,
anotherTest,
workspace);
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspace.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.of(ProjectGenerator.Option.INCLUDE_TESTS),
false /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
generator.setGroupableTests(AppleBuildRules.filterGroupableTests(targetGraph.getNodes()));
Map<Path, ProjectGenerator> projectGenerators = Maps.newHashMap();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
// Tests should become libraries
PBXTarget combinableTestTarget1 = ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerators.get(Paths.get("foo")).getGeneratedProject(),
"//foo:combinableTest1");
assertEquals(
"Test in the bundle should be built as a static library.",
ProductType.STATIC_LIBRARY,
combinableTestTarget1.getProductType());
PBXTarget combinableTestTarget2 = ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerators.get(Paths.get("bar")).getGeneratedProject(),
"//bar:combinableTest2");
assertEquals(
"Other test in the bundle should be built as a static library.",
ProductType.STATIC_LIBRARY,
combinableTestTarget2.getProductType());
// Test not bundled with any others should retain behavior.
PBXTarget notCombinedTest = ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerators.get(Paths.get("foo")).getGeneratedProject(),
"//foo:anotherTest");
assertEquals(
"Test that is not combined with other tests should also generate a test bundle.",
ProductType.STATIC_LIBRARY,
notCombinedTest.getProductType());
// Test not bundled with any others should retain behavior.
PBXTarget uncombinableTest = ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
projectGenerators.get(Paths.get("foo")).getGeneratedProject(),
"//foo:testMarkedUncombinable");
assertEquals(
"Test marked uncombinable should not be combined",
ProductType.UNIT_TEST,
uncombinableTest.getProductType());
// Combined test project should be generated with a combined test bundle.
PBXTarget combinedTestBundle = ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
generator.getCombinedTestsProjectGenerator().get().getGeneratedProject(),
"_BuckCombinedTest-xctest-0");
assertEquals(
"Combined test project target should be test bundle.",
ProductType.UNIT_TEST,
combinedTestBundle.getProductType());
// Main scheme should contain generated test targets.
XCScheme scheme = generator.getSchemeGenerators().get("workspace").getOutputScheme().get();
XCScheme.TestAction testAction = scheme.getTestAction().get();
assertThat(
"Combined test target should be a testable",
testAction.getTestables(),
hasItem(testableWithName("_BuckCombinedTest-xctest-0")));
assertThat(
"Uncombined but groupable test should be a testable",
testAction.getTestables(),
hasItem(testableWithName("_BuckCombinedTest-octest-1")));
assertThat(
"Bundled test library is not a testable",
testAction.getTestables(),
not(hasItem(testableWithName("combinableTest1"))));
XCScheme.BuildAction buildAction = scheme.getBuildAction().get();
assertThat(
"Bundled test library should be built for tests",
buildAction.getBuildActionEntries(),
hasItem(
withNameAndBuildingFor(
"combinableTest1",
equalTo(XCScheme.BuildActionEntry.BuildFor.TEST_ONLY))));
assertThat(
"Combined test library should be built for tests",
buildAction.getBuildActionEntries(),
hasItem(
withNameAndBuildingFor(
"_BuckCombinedTest-xctest-0",
equalTo(XCScheme.BuildActionEntry.BuildFor.TEST_ONLY))));
}
@Test
public void groupTests() {
TargetNode<AppleTestDescription.Arg> combinableTest1 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//foo", "test1").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(true))
.build();
TargetNode<AppleTestDescription.Arg> combinableTest2 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//bar", "test2").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(true))
.build();
ImmutableMultimap.Builder<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMapBuilder = ImmutableMultimap.builder();
ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescription.Arg>>
ungroupedTestsMapBuilder = ImmutableSetMultimap.builder();
WorkspaceAndProjectGenerator.groupSchemeTests(
ImmutableSet.of(
combinableTest1,
combinableTest2),
ImmutableSetMultimap.of(
"workspace", combinableTest1,
"workspace", combinableTest2),
groupedTestsMapBuilder,
ungroupedTestsMapBuilder);
ImmutableMultimap<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMap = groupedTestsMapBuilder.build();
assertThat(
ungroupedTestsMapBuilder.build().entries(),
empty());
assertEquals(
ImmutableSortedSet.of(
combinableTest1,
combinableTest2),
ImmutableSortedSet.copyOf(
groupedTestsMap.values()));
ImmutableList<Map.Entry<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>>
groupedTests = ImmutableList.copyOf(groupedTestsMap.entries());
assertEquals(2, groupedTests.size());
assertEquals(groupedTests.get(0).getKey(), groupedTests.get(1).getKey());
}
@Test
public void doNotGroupTestsWithDifferentExtensions() {
TargetNode<AppleTestDescription.Arg> combinableTest1 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//foo", "test1").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(true))
.build();
TargetNode<AppleTestDescription.Arg> combinableTest2 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//bar", "test2").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.OCTEST))
.setCanGroup(Optional.of(true))
.build();
ImmutableMultimap.Builder<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMapBuilder = ImmutableMultimap.builder();
ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescription.Arg>>
ungroupedTestsMapBuilder = ImmutableSetMultimap.builder();
WorkspaceAndProjectGenerator.groupSchemeTests(
ImmutableSet.of(
combinableTest1,
combinableTest2),
ImmutableSetMultimap.of(
"workspace", combinableTest1,
"workspace", combinableTest2),
groupedTestsMapBuilder,
ungroupedTestsMapBuilder);
ImmutableMultimap<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMap = groupedTestsMapBuilder.build();
assertThat(
ungroupedTestsMapBuilder.build().entries(),
empty());
assertEquals(
ImmutableSortedSet.of(
combinableTest1,
combinableTest2),
ImmutableSortedSet.copyOf(
groupedTestsMap.values()));
ImmutableList<Map.Entry<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>>
groupedTests = ImmutableList.copyOf(groupedTestsMap.entries());
assertEquals(2, groupedTests.size());
assertNotEquals(groupedTests.get(0).getKey(), groupedTests.get(1).getKey());
}
@Test
public void doNotGroupTestsWithDifferentConfigs() {
ImmutableSortedMap<String, ImmutableMap<String, String>> configs = ImmutableSortedMap.of(
"Debug",
ImmutableMap.of("KEY", "VALUE"));
TargetNode<AppleTestDescription.Arg> combinableTest1 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//foo", "test1").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(true))
.build();
TargetNode<AppleTestDescription.Arg> combinableTest2 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//bar", "test2").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setConfigs(Optional.of(configs))
.setCanGroup(Optional.of(true))
.build();
ImmutableMultimap.Builder<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMapBuilder = ImmutableMultimap.builder();
ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescription.Arg>>
ungroupedTestsMapBuilder = ImmutableSetMultimap.builder();
WorkspaceAndProjectGenerator.groupSchemeTests(
ImmutableSet.of(
combinableTest1,
combinableTest2),
ImmutableSetMultimap.of(
"workspace", combinableTest1,
"workspace", combinableTest2),
groupedTestsMapBuilder,
ungroupedTestsMapBuilder);
ImmutableMultimap<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMap = groupedTestsMapBuilder.build();
assertThat(
ungroupedTestsMapBuilder.build().entries(),
empty());
assertEquals(
ImmutableSortedSet.of(
combinableTest1,
combinableTest2),
ImmutableSortedSet.copyOf(
groupedTestsMap.values()));
ImmutableList<Map.Entry<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>>
groupedTests = ImmutableList.copyOf(groupedTestsMap.entries());
assertEquals(2, groupedTests.size());
assertNotEquals(groupedTests.get(0).getKey(), groupedTests.get(1).getKey());
}
@Test
public void doNotGroupTestsWithDifferentLinkerFlags() {
TargetNode<AppleTestDescription.Arg> combinableTest1 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//foo", "test1").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setCanGroup(Optional.of(true))
.build();
TargetNode<AppleTestDescription.Arg> combinableTest2 = AppleTestBuilder
.createBuilder(BuildTarget.builder("//bar", "test2").build())
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setLinkerFlags(Optional.of(ImmutableList.of("-flag")))
.setExportedLinkerFlags(Optional.of(ImmutableList.of("-exported-flag")))
.setCanGroup(Optional.of(true))
.build();
ImmutableMultimap.Builder<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMapBuilder = ImmutableMultimap.builder();
ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescription.Arg>>
ungroupedTestsMapBuilder = ImmutableSetMultimap.builder();
WorkspaceAndProjectGenerator.groupSchemeTests(
ImmutableSet.of(
combinableTest1,
combinableTest2),
ImmutableSetMultimap.of(
"workspace", combinableTest1,
"workspace", combinableTest2),
groupedTestsMapBuilder,
ungroupedTestsMapBuilder);
ImmutableMultimap<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>
groupedTestsMap = groupedTestsMapBuilder.build();
assertThat(
ungroupedTestsMapBuilder.build().entries(),
empty());
assertEquals(
ImmutableSortedSet.of(
combinableTest1,
combinableTest2),
ImmutableSortedSet.copyOf(
groupedTestsMap.values()));
ImmutableList<Map.Entry<AppleTestBundleParamsKey, TargetNode<AppleTestDescription.Arg>>>
groupedTests = ImmutableList.copyOf(groupedTestsMap.entries());
assertEquals(2, groupedTests.size());
assertNotEquals(groupedTests.get(0).getKey(), groupedTests.get(1).getKey());
}
private void setUpWorkspaceWithSchemeAndProjects() {
// Create the following dep tree:
// FooBin -has-test-> FooBinTest
// |
// V
// FooLib -has-test-> FooLibTest
// | |
// V V
// BarLib BazLib -has-test-> BazLibTest
// ^
// |
// QuxBin
//
//
// FooBin and BazLib use "tests" to specify their tests while FooLibTest uses source_under_test
// to specify that it is a test of FooLib.
//
// Calling generate on FooBin should pull in everything except BazLibTest and QuxBin
BuildTarget bazTestTarget = BuildTarget.builder("//baz", "BazTest").build();
BuildTarget fooBinTestTarget = BuildTarget.builder("//foo", "FooBinTest").build();
BuildTarget fooTestTarget = BuildTarget.builder("//foo", "FooLibTest").build();
BuildTarget barLibTarget = BuildTarget.builder("//bar", "BarLib").build();
TargetNode<?> barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget fooLibTarget = BuildTarget.builder("//foo", "FooLib").build();
TargetNode<?> fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barLibTarget)))
.setTests(Optional.of(ImmutableSortedSet.of(fooTestTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget fooBinBinaryTarget = BuildTarget.builder("//foo", "FooBinBinary").build();
TargetNode<?> fooBinBinaryNode = AppleBinaryBuilder
.createBuilder(fooBinBinaryTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooLibTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget fooBinTarget = BuildTarget.builder("//foo", "FooBin").build();
TargetNode<?> fooBinNode = AppleBundleBuilder
.createBuilder(fooBinTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.APP))
.setBinary(fooBinBinaryTarget)
.setTests(Optional.of(ImmutableSortedSet.of(fooBinTestTarget)))
.build();
BuildTarget bazLibTarget = BuildTarget.builder("//baz", "BazLib").build();
TargetNode<?> bazLibNode = AppleLibraryBuilder
.createBuilder(bazLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooLibTarget)))
.setTests(Optional.of(ImmutableSortedSet.of(bazTestTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
TargetNode<?> bazTestNode = AppleTestBuilder
.createBuilder(bazTestTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(bazLibTarget)))
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
TargetNode<?> fooTestNode = AppleTestBuilder
.createBuilder(fooTestTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setDeps(Optional.of(ImmutableSortedSet.of(bazLibTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
TargetNode<?> fooBinTestNode = AppleTestBuilder
.createBuilder(fooBinTestTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooBinTarget)))
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget quxBinTarget = BuildTarget.builder("//qux", "QuxBin").build();
TargetNode<?> quxBinNode = AppleBinaryBuilder
.createBuilder(quxBinTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barLibTarget)))
.setUseBuckHeaderMaps(Optional.of(Boolean.TRUE))
.build();
BuildTarget workspaceTarget = BuildTarget.builder("//foo", "workspace").build();
workspaceNode = XcodeWorkspaceConfigBuilder
.createBuilder(workspaceTarget)
.setWorkspaceName(Optional.of("workspace"))
.setSrcTarget(Optional.of(fooBinTarget))
.build();
BuildTarget workspaceWithExtraSchemeTarget = BuildTarget.builder("//qux", "workspace").build();
workspaceWithExtraSchemeNode = XcodeWorkspaceConfigBuilder
.createBuilder(workspaceWithExtraSchemeTarget)
.setWorkspaceName(Optional.of("workspace"))
.setSrcTarget(Optional.of(quxBinTarget))
.setExtraSchemes(Optional.of(ImmutableSortedMap.of("FooScheme", workspaceTarget)))
.build();
targetGraph = TargetGraphFactory.newInstance(
barLibNode,
fooLibNode,
fooBinBinaryNode,
fooBinNode,
bazLibNode,
bazTestNode,
fooTestNode,
fooBinTestNode,
quxBinNode,
workspaceNode,
workspaceWithExtraSchemeNode);
}
@Test
public void targetsForWorkspaceWithExtraSchemes() throws IOException {
setUpWorkspaceWithSchemeAndProjects();
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceWithExtraSchemeNode.getConstructorArg(),
workspaceWithExtraSchemeNode.getBuildTarget(),
ImmutableSet.of(ProjectGenerator.Option.INCLUDE_TESTS),
false /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
ProjectGenerator fooProjectGenerator =
projectGenerators.get(Paths.get("foo"));
ProjectGenerator barProjectGenerator =
projectGenerators.get(Paths.get("bar"));
ProjectGenerator bazProjectGenerator =
projectGenerators.get(Paths.get("baz"));
ProjectGenerator quxProjectGenerator =
projectGenerators.get(Paths.get("qux"));
assertNotNull(
"The Qux project should have been generated",
quxProjectGenerator);
assertNotNull(
"The Foo project should have been generated",
fooProjectGenerator);
assertNotNull(
"The Bar project should have been generated",
barProjectGenerator);
assertNotNull(
"The Baz project should have been generated",
bazProjectGenerator);
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:FooBin");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:FooLib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:FooBinTest");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:FooLibTest");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
barProjectGenerator.getGeneratedProject(),
"//bar:BarLib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
bazProjectGenerator.getGeneratedProject(),
"//baz:BazLib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
quxProjectGenerator.getGeneratedProject(),
"//qux:QuxBin");
XCScheme mainScheme = generator.getSchemeGenerators().get("workspace").getOutputScheme().get();
XCScheme.BuildAction mainSchemeBuildAction = mainScheme.getBuildAction().get();
// I wish we could use Hamcrest contains() here, but we hit
// https://code.google.com/p/hamcrest/issues/detail?id=190 if we do that.
assertThat(
mainSchemeBuildAction.getBuildActionEntries(),
hasSize(2));
assertThat(
mainSchemeBuildAction.getBuildActionEntries().get(0),
withNameAndBuildingFor(
"BarLib",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
assertThat(
mainSchemeBuildAction.getBuildActionEntries().get(1),
withNameAndBuildingFor(
"QuxBin",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
XCScheme fooScheme = generator.getSchemeGenerators().get("FooScheme").getOutputScheme().get();
XCScheme.BuildAction fooSchemeBuildAction = fooScheme.getBuildAction().get();
assertThat(
fooSchemeBuildAction.getBuildActionEntries(),
hasSize(6));
assertThat(
fooSchemeBuildAction.getBuildActionEntries().get(0),
withNameAndBuildingFor(
"BarLib",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
assertThat(
fooSchemeBuildAction.getBuildActionEntries().get(1),
withNameAndBuildingFor(
"FooLib",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
assertThat(
fooSchemeBuildAction.getBuildActionEntries().get(2),
withNameAndBuildingFor(
"FooBin",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
assertThat(
fooSchemeBuildAction.getBuildActionEntries().get(3),
withNameAndBuildingFor(
"BazLib",
equalTo(XCScheme.BuildActionEntry.BuildFor.TEST_ONLY)));
assertThat(
fooSchemeBuildAction.getBuildActionEntries().get(4),
withNameAndBuildingFor(
"FooLibTest",
equalTo(XCScheme.BuildActionEntry.BuildFor.TEST_ONLY)));
assertThat(
fooSchemeBuildAction.getBuildActionEntries().get(5),
withNameAndBuildingFor(
"FooBinTest",
equalTo(XCScheme.BuildActionEntry.BuildFor.TEST_ONLY)));
}
@Test
public void targetsForWorkspaceWithExtraTargets() throws IOException {
BuildTarget fooLibTarget = BuildTarget.builder("//foo", "FooLib").build();
TargetNode<AppleNativeTargetDescriptionArg> fooLib = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.build();
BuildTarget barLibTarget = BuildTarget.builder("//bar", "BarLib").build();
TargetNode<AppleNativeTargetDescriptionArg> barLib = AppleLibraryBuilder
.createBuilder(barLibTarget)
.build();
BuildTarget bazLibTarget = BuildTarget.builder("//baz", "BazLib").build();
TargetNode<AppleNativeTargetDescriptionArg> bazLib = AppleLibraryBuilder
.createBuilder(bazLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barLibTarget)))
.build();
TargetNode<XcodeWorkspaceConfigDescription.Arg> workspaceNode = XcodeWorkspaceConfigBuilder
.createBuilder(BuildTarget.builder("//foo", "workspace").build())
.setWorkspaceName(Optional.of("workspace"))
.setSrcTarget(Optional.of(fooLibTarget))
.setExtraTargets(Optional.of(ImmutableSortedSet.of(bazLibTarget)))
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(
fooLib, barLib, bazLib, workspaceNode);
WorkspaceAndProjectGenerator generator = new WorkspaceAndProjectGenerator(
projectFilesystem,
targetGraph,
workspaceNode.getConstructorArg(),
workspaceNode.getBuildTarget(),
ImmutableSet.of(ProjectGenerator.Option.INCLUDE_TESTS),
false /* combinedProject */,
false /* buildWithBuck */,
ImmutableList.<String>of(),
"BUCK",
getOutputPathOfNodeFunction(targetGraph));
Map<Path, ProjectGenerator> projectGenerators = new HashMap<>();
generator.generateWorkspaceAndDependentProjects(projectGenerators);
ProjectGenerator fooProjectGenerator =
projectGenerators.get(Paths.get("foo"));
ProjectGenerator barProjectGenerator =
projectGenerators.get(Paths.get("bar"));
ProjectGenerator bazProjectGenerator =
projectGenerators.get(Paths.get("baz"));
assertNotNull(
"The Foo project should have been generated",
fooProjectGenerator);
assertNotNull(
"The Bar project should have been generated",
barProjectGenerator);
assertNotNull(
"The Baz project should have been generated",
bazProjectGenerator);
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
fooProjectGenerator.getGeneratedProject(),
"//foo:FooLib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
barProjectGenerator.getGeneratedProject(),
"//bar:BarLib");
ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget(
bazProjectGenerator.getGeneratedProject(),
"//baz:BazLib");
XCScheme mainScheme = generator.getSchemeGenerators().get("workspace").getOutputScheme().get();
XCScheme.BuildAction mainSchemeBuildAction = mainScheme.getBuildAction().get();
// I wish we could use Hamcrest contains() here, but we hit
// https://code.google.com/p/hamcrest/issues/detail?id=190 if we do that.
assertThat(
mainSchemeBuildAction.getBuildActionEntries(),
hasSize(3));
assertThat(
mainSchemeBuildAction.getBuildActionEntries().get(0),
withNameAndBuildingFor(
"FooLib",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
assertThat(
mainSchemeBuildAction.getBuildActionEntries().get(1),
withNameAndBuildingFor(
"BarLib",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
assertThat(
mainSchemeBuildAction.getBuildActionEntries().get(2),
withNameAndBuildingFor(
"BazLib",
equalTo(XCScheme.BuildActionEntry.BuildFor.DEFAULT)));
}
private Matcher<XCScheme.BuildActionEntry> buildActionEntryWithName(String name) {
return new FeatureMatcher<XCScheme.BuildActionEntry, String>(
equalTo(name), "BuildActionEntry named", "name") {
@Override
protected String featureValueOf(XCScheme.BuildActionEntry buildActionEntry) {
return buildActionEntry.getBuildableReference().blueprintName;
}
};
}
private Matcher<XCScheme.TestableReference> testableWithName(String name) {
return new FeatureMatcher<XCScheme.TestableReference, String>(
equalTo(name), "TestableReference named", "name") {
@Override
protected String featureValueOf(XCScheme.TestableReference testableReference) {
return testableReference.getBuildableReference().blueprintName;
}
};
}
private Matcher<XCScheme.BuildActionEntry> withNameAndBuildingFor(
String name,
Matcher<? super EnumSet<XCScheme.BuildActionEntry.BuildFor>> buildFor) {
return AllOf.allOf(
buildActionEntryWithName(name),
new FeatureMatcher<
XCScheme.BuildActionEntry,
EnumSet<XCScheme.BuildActionEntry.BuildFor>>(buildFor, "Building for", "BuildFor") {
@Override
protected EnumSet<XCScheme.BuildActionEntry.BuildFor> featureValueOf(
XCScheme.BuildActionEntry entry) {
return entry.getBuildFor();
}
});
}
private Function<TargetNode<?>, Path> getOutputPathOfNodeFunction(final TargetGraph targetGraph) {
return new Function<TargetNode<?>, Path>() {
@Nullable
@Override
public Path apply(TargetNode<?> input) {
TargetGraphToActionGraph targetGraphToActionGraph = new TargetGraphToActionGraph(
BuckEventBusFactory.newInstance(),
new BuildTargetNodeToBuildRuleTransformer(),
new NullFileHashCache());
TargetGraph subgraph = targetGraph.getSubgraph(
ImmutableSet.of(
input));
ActionGraph actionGraph = Preconditions.checkNotNull(
targetGraphToActionGraph.apply(subgraph));
BuildRule rule = Preconditions.checkNotNull(
actionGraph.findBuildRuleByTarget(input.getBuildTarget()));
return rule.getPathToOutput();
}
};
}
}
| |
package com.viesis.viescraft.client.gui.airship.visual.balloon;
import java.awt.Color;
import java.io.IOException;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.GL11;
import com.viesis.viescraft.api.GuiVC;
import com.viesis.viescraft.api.References;
import com.viesis.viescraft.api.util.Keybinds;
import com.viesis.viescraft.client.gui.GuiButtonGeneralVC;
import com.viesis.viescraft.common.entity.airshipcolors.EntityAirshipBaseVC;
import com.viesis.viescraft.common.entity.airshipcolors.containers.all.ContainerAirshipAppearance;
import com.viesis.viescraft.init.InitItemsVC;
import com.viesis.viescraft.network.NetworkHandler;
import com.viesis.viescraft.network.server.airship.visual.MessageGuiVisualMenuBalloon;
import com.viesis.viescraft.network.server.airship.visual.balloon.MessageHelperGuiVisualMenuBalloonTier1Pg1;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.renderer.RenderHelper;
import net.minecraft.client.renderer.entity.RenderManager;
import net.minecraft.entity.Entity;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
public class GuiVisualMenuBalloonTier1Pg1 extends GuiContainer {
private IInventory playerInv;
private EntityAirshipBaseVC airship;
public static int metaInfo;
public GuiVisualMenuBalloonTier1Pg1(IInventory playerInv, EntityAirshipBaseVC airshipIn)
{
super(new ContainerAirshipAppearance(playerInv, airshipIn));
this.playerInv = playerInv;
this.airship = airshipIn;
this.xSize = 176;
this.ySize = 202;
}
/**
* Adds the buttons (and other controls) to the screen in question.
*/
@Override
public void initGui()
{
super.initGui();
buttonList.clear();
Keyboard.enableRepeatEvents(true);
int startPlace = 25;
GuiVC.button502 = new GuiButtonGeneralVC(502, this.guiLeft + 110, this.guiTop + 143, 48, 14, References.localNameVC("vc.button.reset"));
GuiVC.button503 = new GuiButtonGeneralVC(503, this.guiLeft + 126, this.guiTop - 13, 48, 14, References.localNameVC("vc.button.next"));
GuiVC.button505 = new GuiButtonGeneralVC(505, this.guiLeft + 125, this.guiTop + 177, 40, 14, References.localNameVC("vc.button.back"));
GuiVC.buttonB01 = new GuiButtonGeneralVC( 1, this.guiLeft + 15, this.guiTop + startPlace + (14 * 0), 76, 14, References.localNameVC("vc.enum.visualballoon.1"));
GuiVC.buttonB02 = new GuiButtonGeneralVC( 2, this.guiLeft + 15, this.guiTop + startPlace + (14 * 1), 76, 14, References.localNameVC("vc.enum.visualballoon.2"));
GuiVC.buttonB03 = new GuiButtonGeneralVC( 3, this.guiLeft + 15, this.guiTop + startPlace + (14 * 2), 76, 14, References.localNameVC("vc.enum.visualballoon.3"));
GuiVC.buttonB04 = new GuiButtonGeneralVC( 4, this.guiLeft + 15, this.guiTop + startPlace + (14 * 3), 76, 14, References.localNameVC("vc.enum.visualballoon.4"));
GuiVC.buttonB05 = new GuiButtonGeneralVC( 5, this.guiLeft + 15, this.guiTop + startPlace + (14 * 4), 76, 14, References.localNameVC("vc.enum.visualballoon.5"));
GuiVC.buttonB06 = new GuiButtonGeneralVC( 6, this.guiLeft + 15, this.guiTop + startPlace + (14 * 5), 76, 14, References.localNameVC("vc.enum.visualballoon.6"));
GuiVC.buttonB07 = new GuiButtonGeneralVC( 7, this.guiLeft + 15, this.guiTop + startPlace + (14 * 6), 76, 14, References.localNameVC("vc.enum.visualballoon.7"));
GuiVC.buttonB08 = new GuiButtonGeneralVC( 8, this.guiLeft + 15, this.guiTop + startPlace + (14 * 7), 76, 14, References.localNameVC("vc.enum.visualballoon.8"));
GuiVC.buttonB09 = new GuiButtonGeneralVC( 9, this.guiLeft + 15, this.guiTop + startPlace + (14 * 8), 76, 14, References.localNameVC("vc.enum.visualballoon.9"));
GuiVC.buttonB10 = new GuiButtonGeneralVC(10, this.guiLeft + 15, this.guiTop + startPlace + (14 * 9), 76, 14, References.localNameVC("vc.enum.visualballoon.10"));
GuiVC.buttonB11 = new GuiButtonGeneralVC(11, this.guiLeft + 15, this.guiTop + startPlace + (14 * 10), 76, 14, References.localNameVC("vc.enum.visualballoon.11"));
GuiVC.buttonB12 = new GuiButtonGeneralVC(12, this.guiLeft + 15, this.guiTop + startPlace + (14 * 11), 76, 14, References.localNameVC("vc.enum.visualballoon.12"));
this.buttonList.add(GuiVC.button502);
//this.buttonList.add(GuiVC.button503);
this.buttonList.add(GuiVC.button505);
this.buttonList.add(GuiVC.buttonB01);
this.buttonList.add(GuiVC.buttonB02);
this.buttonList.add(GuiVC.buttonB03);
this.buttonList.add(GuiVC.buttonB04);
this.buttonList.add(GuiVC.buttonB05);
this.buttonList.add(GuiVC.buttonB06);
//this.buttonList.add(GuiVC.buttonB07);
//this.buttonList.add(GuiVC.buttonB08);
//this.buttonList.add(GuiVC.buttonB09);
//this.buttonList.add(GuiVC.buttonB10);
//this.buttonList.add(GuiVC.buttonB11);
//this.buttonList.add(GuiVC.buttonB12);
}
/**
* Called by the controls from the buttonList when activated. (Mouse pressed for buttons)
*/
@Override
protected void actionPerformed(GuiButton parButton)
{
if (parButton.id == 502)
{
this.metaInfo = 0;
NetworkHandler.sendToServer(new MessageHelperGuiVisualMenuBalloonTier1Pg1());
}
if (parButton.id == 505)
{
NetworkHandler.sendToServer(new MessageGuiVisualMenuBalloon());
}
if (parButton.id <= 450)
{
this.metaInfo = parButton.id;
NetworkHandler.sendToServer(new MessageHelperGuiVisualMenuBalloonTier1Pg1());
}
this.buttonList.clear();
this.initGui();
this.updateScreen();
}
@Override
protected void drawGuiContainerBackgroundLayer(float partialTicks, int mouseX, int mouseY)
{
GlStateManager.color(1.0f, 1.0f, 1.0f, 1.0f);
this.mc.getTextureManager().bindTexture(new ResourceLocation(References.MOD_ID + ":" + "textures/gui/container_airship_menu_visual_frame2.png"));
this.drawTexturedModalRect(this.guiLeft, this.guiTop, 0, 0, this.xSize, this.ySize);
this.drawRect(this.guiLeft + 49, this.guiTop - 17, this.guiLeft + 127, this.guiTop, Color.BLACK.getRGB());
this.drawRect(this.guiLeft + 50, this.guiTop - 16, this.guiLeft + 126, this.guiTop, Color.LIGHT_GRAY.getRGB());
this.drawRect(this.guiLeft + 52, this.guiTop - 14, this.guiLeft + 124, this.guiTop, Color.BLACK.getRGB());
int i = this.guiLeft;
int j = this.guiTop;
this.drawEntityOnScreen(i + 134, j + 134, 15, mouseX, mouseY, this.airship);
}
@Override
protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY)
{
GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F);
this.fontRenderer.drawString(References.localNameVC("vc.enum.tier.1"), 120, 29, 16777215);
this.fontRenderer.drawString(References.localNameVC("vc.main.appearance"), 58, -10, 65521);
int i = this.guiLeft;
int j = this.guiTop;
this.drawItemStack(new ItemStack(InitItemsVC.UPGRADE_BALLOON, 1, 1), 46, 8, "");
}
@Override
protected void keyTyped(char typedChar, int keyCode) throws IOException
{
if (keyCode == 1
|| keyCode == Keybinds.vcInventory.getKeyCode()
|| this.mc.gameSettings.keyBindInventory.isActiveAndMatches(keyCode))
{
this.mc.player.closeScreen();
}
}
@Override
public void updateScreen()
{
super.updateScreen();
if (!this.mc.player.isEntityAlive() || this.mc.player.isDead
|| !this.mc.player.isRiding())
{
this.mc.player.closeScreen();
}
}
/**
* Draws an ItemStack.
*/
private void drawItemStack(ItemStack stack, int x, int y, String altText)
{
GlStateManager.translate(0.0F, 0.0F, 32.0F);
this.zLevel = 200.0F;
this.itemRender.zLevel = 200.0F;
net.minecraft.client.gui.FontRenderer font = stack.getItem().getFontRenderer(stack);
if (font == null) font = fontRenderer;
this.itemRender.renderItemAndEffectIntoGUI(stack, x, y);
this.zLevel = 0.0F;
this.itemRender.zLevel = 0.0F;
}
/**
* Draws an entity on the screen looking toward the cursor.
*/
public static void drawEntityOnScreen(int posX, int posY, int scale, float mouseX, float mouseY, Entity entityIn)
{
GlStateManager.pushMatrix();
{
GL11.glEnable(GL11.GL_CULL_FACE);
GL11.glCullFace(GL11.GL_FRONT);
GlStateManager.translate(posX, posY, 100.0F);
GlStateManager.scale((float)(scale), (float)scale, (float)scale);
/////Flips the model right side up.
GlStateManager.rotate(200.0F, 0.0F, 0.0F, 1.0F);
GlStateManager.rotate(45.0F, 0.0F, 1.0F, 0.0F);
GlStateManager.rotate(30.0F, 1.0F, 0.0F, 0.0F);
RenderHelper.disableStandardItemLighting();
RenderManager rendermanager = Minecraft.getMinecraft().getRenderManager();
rendermanager.renderMultipass(entityIn, 0);
rendermanager.setPlayerViewY(180.0F);
rendermanager.setRenderShadow(false);
rendermanager.renderEntity(entityIn, 0.0D, 0.0D, 0.0D, 0.0F, 1.0F, false);
rendermanager.setRenderShadow(true);
GL11.glCullFace(GL11.GL_BACK);
GL11.glDisable(GL11.GL_CULL_FACE);
}
GlStateManager.popMatrix();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.compile;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.http.annotation.Immutable;
import org.apache.phoenix.compile.OrderPreservingTracker.Ordering;
import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.exception.SQLExceptionInfo;
import org.apache.phoenix.execute.TupleProjector;
import org.apache.phoenix.expression.CoerceExpression;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.parse.AliasedNode;
import org.apache.phoenix.parse.DistinctCountParseNode;
import org.apache.phoenix.parse.HintNode.Hint;
import org.apache.phoenix.parse.ParseNode;
import org.apache.phoenix.parse.SelectStatement;
import org.apache.phoenix.schema.AmbiguousColumnException;
import org.apache.phoenix.schema.ColumnNotFoundException;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PVarbinary;
import org.apache.phoenix.util.IndexUtil;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
/**
*
* Validates GROUP BY clause and builds a {@link GroupBy} instance to encapsulate the
* group by expressions.
*
*
* @since 0.1
*/
public class GroupByCompiler {
@Immutable
public static class GroupBy {
private final List<Expression> expressions;
private final List<Expression> keyExpressions;
private final boolean isOrderPreserving;
private final int orderPreservingColumnCount;
private final boolean isUngroupedAggregate;
public static final GroupByCompiler.GroupBy EMPTY_GROUP_BY = new GroupBy(new GroupByBuilder()) {
@Override
public GroupBy compile(StatementContext context, TupleProjector tupleProjector) throws SQLException {
return this;
}
@Override
public void explain(List<String> planSteps, Integer limit) {
}
@Override
public String getScanAttribName() {
return null;
}
};
public static final GroupByCompiler.GroupBy UNGROUPED_GROUP_BY = new GroupBy(new GroupByBuilder().setIsOrderPreserving(true).setIsUngroupedAggregate(true)) {
@Override
public GroupBy compile(StatementContext context, TupleProjector tupleProjector) throws SQLException {
return this;
}
@Override
public void explain(List<String> planSteps, Integer limit) {
planSteps.add(" SERVER AGGREGATE INTO SINGLE ROW");
}
@Override
public String getScanAttribName() {
return BaseScannerRegionObserver.UNGROUPED_AGG;
}
};
private GroupBy(GroupByBuilder builder) {
this.expressions = ImmutableList.copyOf(builder.expressions);
this.keyExpressions = builder.expressions == builder.keyExpressions ?
this.expressions : builder.keyExpressions == null ? null :
ImmutableList.copyOf(builder.keyExpressions);
this.isOrderPreserving = builder.isOrderPreserving;
this.orderPreservingColumnCount = builder.orderPreservingColumnCount;
this.isUngroupedAggregate = builder.isUngroupedAggregate;
}
public List<Expression> getExpressions() {
return expressions;
}
public List<Expression> getKeyExpressions() {
return keyExpressions;
}
public String getScanAttribName() {
if (isUngroupedAggregate) {
return BaseScannerRegionObserver.UNGROUPED_AGG;
} else if (isOrderPreserving) {
return BaseScannerRegionObserver.KEY_ORDERED_GROUP_BY_EXPRESSIONS;
} else {
return BaseScannerRegionObserver.UNORDERED_GROUP_BY_EXPRESSIONS;
}
}
public boolean isEmpty() {
return expressions.isEmpty();
}
public boolean isOrderPreserving() {
return isOrderPreserving;
}
public boolean isUngroupedAggregate() {
return isUngroupedAggregate;
}
public int getOrderPreservingColumnCount() {
return orderPreservingColumnCount;
}
public GroupBy compile(StatementContext context, TupleProjector tupleProjector) throws SQLException {
boolean isOrderPreserving = this.isOrderPreserving;
int orderPreservingColumnCount = 0;
if (isOrderPreserving) {
OrderPreservingTracker tracker = new OrderPreservingTracker(context, GroupBy.EMPTY_GROUP_BY, Ordering.UNORDERED, expressions.size(), tupleProjector);
for (int i = 0; i < expressions.size(); i++) {
Expression expression = expressions.get(i);
tracker.track(expression);
}
// This is true if the GROUP BY is composed of only PK columns. We further check here that
// there are no "gaps" in the PK columns positions used (i.e. we start with the first PK
// column and use each subsequent one in PK order).
isOrderPreserving = tracker.isOrderPreserving();
orderPreservingColumnCount = tracker.getOrderPreservingColumnCount();
if(isOrderPreserving) {
//reorder the groupby expressions following pk columns
List<Expression> newExpressions = tracker.getExpressionsFromOrderPreservingTrackInfos();
assert newExpressions.size() == expressions.size();
return new GroupBy.GroupByBuilder(this)
.setIsOrderPreserving(isOrderPreserving)
.setOrderPreservingColumnCount(orderPreservingColumnCount)
.setExpressions(newExpressions)
.setKeyExpressions(newExpressions)
.build();
}
}
if (isUngroupedAggregate) {
return new GroupBy.GroupByBuilder(this)
.setIsOrderPreserving(isOrderPreserving)
.setOrderPreservingColumnCount(orderPreservingColumnCount)
.build();
}
List<Expression> expressions = Lists.newArrayListWithExpectedSize(this.expressions.size());
List<Expression> keyExpressions = expressions;
List<Pair<Integer,Expression>> groupBys = Lists.newArrayListWithExpectedSize(this.expressions.size());
for (int i = 0; i < this.expressions.size(); i++) {
Expression expression = this.expressions.get(i);
groupBys.add(new Pair<Integer,Expression>(i,expression));
}
/*
* If we're not ordered along the PK axis, our coprocessor needs to collect all distinct groups within
* a region, sort them, and hold on to them until the scan completes.
* Put fixed length nullables at the end, so that we can represent null by the absence of the trailing
* value in the group by key. If there is more than one, we'll need to convert the ones not at the end
* into a Decimal so that we can use an empty byte array as our representation for null (which correctly
* maintains the sort order). We convert the Decimal back to the appropriate type (Integer or Long) when
* it's retrieved from the result set.
*
* More specifically, order into the following buckets:
* 1) non nullable fixed width
* 2) variable width
* 3) nullable fixed width
* Within each bucket, order based on the column position in the schema. Putting the fixed width values
* in the beginning optimizes access to subsequent values.
*/
Collections.sort(groupBys, new Comparator<Pair<Integer,Expression>>() {
@Override
public int compare(Pair<Integer,Expression> gb1, Pair<Integer,Expression> gb2) {
Expression e1 = gb1.getSecond();
Expression e2 = gb2.getSecond();
PDataType t1 = e1.getDataType();
PDataType t2 = e2.getDataType();
boolean isFixed1 = t1.isFixedWidth();
boolean isFixed2 = t2.isFixedWidth();
boolean isFixedNullable1 = e1.isNullable() &&isFixed1;
boolean isFixedNullable2 = e2.isNullable() && isFixed2;
boolean oae1 = onlyAtEndType(e1);
boolean oae2 = onlyAtEndType(e2);
if (oae1 == oae2) {
if (isFixedNullable1 == isFixedNullable2) {
if (isFixed1 == isFixed2) {
// Not strictly necessary, but forces the order to match the schema
// column order (with PK columns before value columns).
//return o1.getColumnPosition() - o2.getColumnPosition();
return gb1.getFirst() - gb2.getFirst();
} else if (isFixed1) {
return -1;
} else {
return 1;
}
} else if (isFixedNullable1) {
return 1;
} else {
return -1;
}
} else if (oae1) {
return 1;
} else {
return -1;
}
}
});
boolean foundOnlyAtEndType = false;
for (Pair<Integer,Expression> groupBy : groupBys) {
Expression e = groupBy.getSecond();
if (onlyAtEndType(e)) {
if (foundOnlyAtEndType) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.UNSUPPORTED_GROUP_BY_EXPRESSIONS)
.setMessage(e.toString()).build().buildException();
}
foundOnlyAtEndType = true;
}
expressions.add(e);
}
for (int i = expressions.size()-2; i >= 0; i--) {
Expression expression = expressions.get(i);
PDataType keyType = getGroupByDataType(expression);
if (keyType == expression.getDataType()) {
continue;
}
// Copy expressions only when keyExpressions will be different than expressions
if (keyExpressions == expressions) {
keyExpressions = new ArrayList<Expression>(expressions);
}
// Wrap expression in an expression that coerces the expression to the required type..
// This is done so that we have a way of expressing null as an empty key when more
// than one fixed and nullable types are used in a group by clause
keyExpressions.set(i, CoerceExpression.create(expression, keyType));
}
GroupBy groupBy = new GroupBy.GroupByBuilder().setIsOrderPreserving(isOrderPreserving).setExpressions(expressions).setKeyExpressions(keyExpressions).build();
return groupBy;
}
public static class GroupByBuilder {
private boolean isOrderPreserving;
private int orderPreservingColumnCount;
private List<Expression> expressions = Collections.emptyList();
private List<Expression> keyExpressions = Collections.emptyList();
private boolean isUngroupedAggregate;
public GroupByBuilder() {
}
public GroupByBuilder(GroupBy groupBy) {
this.isOrderPreserving = groupBy.isOrderPreserving;
this.orderPreservingColumnCount = groupBy.orderPreservingColumnCount;
this.expressions = groupBy.expressions;
this.keyExpressions = groupBy.keyExpressions;
this.isUngroupedAggregate = groupBy.isUngroupedAggregate;
}
public GroupByBuilder setExpressions(List<Expression> expressions) {
this.expressions = expressions;
return this;
}
public GroupByBuilder setKeyExpressions(List<Expression> keyExpressions) {
this.keyExpressions = keyExpressions;
return this;
}
public GroupByBuilder setIsOrderPreserving(boolean isOrderPreserving) {
this.isOrderPreserving = isOrderPreserving;
return this;
}
public GroupByBuilder setIsUngroupedAggregate(boolean isUngroupedAggregate) {
this.isUngroupedAggregate = isUngroupedAggregate;
return this;
}
public GroupByBuilder setOrderPreservingColumnCount(int orderPreservingColumnCount) {
this.orderPreservingColumnCount = orderPreservingColumnCount;
return this;
}
public GroupBy build() {
return new GroupBy(this);
}
}
public void explain(List<String> planSteps, Integer limit) {
if (isUngroupedAggregate) {
planSteps.add(" SERVER AGGREGATE INTO SINGLE ROW");
} else if (isOrderPreserving) {
planSteps.add(" SERVER AGGREGATE INTO ORDERED DISTINCT ROWS BY " + getExpressions() + (limit == null ? "" : " LIMIT " + limit + " GROUP" + (limit.intValue() == 1 ? "" : "S")));
} else {
planSteps.add(" SERVER AGGREGATE INTO DISTINCT ROWS BY " + getExpressions() + (limit == null ? "" : " LIMIT " + limit + " GROUP" + (limit.intValue() == 1 ? "" : "S")));
}
}
}
/**
* Get list of columns in the GROUP BY clause.
* @param context query context kept between compilation of different query clauses
* @param statement SQL statement being compiled
* @return the {@link GroupBy} instance encapsulating the group by clause
* @throws ColumnNotFoundException if column name could not be resolved
* @throws AmbiguousColumnException if an unaliased column name is ambiguous across multiple tables
*/
public static GroupBy compile(StatementContext context, SelectStatement statement, boolean isOrderPreserving) throws SQLException {
List<ParseNode> groupByNodes = statement.getGroupBy();
/**
* Distinct can use an aggregate plan if there's no group by.
* Otherwise, we need to insert a step after the Merge that dedups.
* Order by only allowed on columns in the select distinct
*/
boolean isUngroupedAggregate = false;
if (groupByNodes.isEmpty()) {
if (statement.isAggregate()) {
// do not optimize if
// 1. we were asked not to optimize
// 2. there's any HAVING clause
// TODO: PHOENIX-2989 suggests some ways to optimize the latter case
if (statement.getHint().hasHint(Hint.RANGE_SCAN) ||
statement.getHaving() != null) {
return GroupBy.UNGROUPED_GROUP_BY;
}
groupByNodes = Lists.newArrayListWithExpectedSize(statement.getSelect().size());
for (AliasedNode aliasedNode : statement.getSelect()) {
if (aliasedNode.getNode() instanceof DistinctCountParseNode) {
// only add children of DistinctCount nodes
groupByNodes.addAll(aliasedNode.getNode().getChildren());
} else {
// if we found anything else, do not attempt any further optimization
return GroupBy.UNGROUPED_GROUP_BY;
}
}
isUngroupedAggregate = true;
} else if (statement.isDistinct()) {
groupByNodes = Lists.newArrayListWithExpectedSize(statement.getSelect().size());
for (AliasedNode aliasedNode : statement.getSelect()) {
// for distinct at all select expression as group by conditions
groupByNodes.add(aliasedNode.getNode());
}
} else {
return GroupBy.EMPTY_GROUP_BY;
}
}
// Accumulate expressions in GROUP BY
ExpressionCompiler compiler =
new ExpressionCompiler(context, GroupBy.EMPTY_GROUP_BY);
List<Expression> expressions = Lists.newArrayListWithExpectedSize(groupByNodes.size());
for (int i = 0; i < groupByNodes.size(); i++) {
ParseNode node = groupByNodes.get(i);
Expression expression = node.accept(compiler);
if (!expression.isStateless()) {
if (compiler.isAggregate()) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.AGGREGATE_IN_GROUP_BY)
.setMessage(expression.toString()).build().buildException();
}
expressions.add(expression);
}
compiler.reset();
}
if (expressions.isEmpty()) {
return GroupBy.EMPTY_GROUP_BY;
}
GroupBy groupBy = new GroupBy.GroupByBuilder()
.setIsOrderPreserving(isOrderPreserving)
.setExpressions(expressions).setKeyExpressions(expressions)
.setIsUngroupedAggregate(isUngroupedAggregate).build();
return groupBy;
}
private static boolean onlyAtEndType(Expression expression) {
// Due to the encoding schema of these types, they may only be
// used once in a group by and are located at the end of the
// group by row key.
PDataType type = getGroupByDataType(expression);
return type.isArrayType() || type == PVarbinary.INSTANCE;
}
private static PDataType getGroupByDataType(Expression expression) {
return IndexUtil.getIndexColumnDataType(expression.isNullable(), expression.getDataType());
}
private GroupByCompiler() {
}
}
| |
// Generated from /Users/robby/Repositories/Sireum/v3/pilar/jvm/src/main/resources/org/sireum/pilar/parser/Antlr4Pilar.g4 by ANTLR 4.9
package org.sireum.pilar.parser;
// @formatter:off
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class Antlr4PilarParser extends Parser {
static { RuntimeMetaData.checkVersion("4.9", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, T__1=2, T__2=3, T__3=4, T__4=5, T__5=6, T__6=7, T__7=8, T__8=9,
T__9=10, T__10=11, T__11=12, T__12=13, T__13=14, T__14=15, T__15=16, T__16=17,
T__17=18, T__18=19, T__19=20, T__20=21, T__21=22, T__22=23, T__23=24,
T__24=25, T__25=26, LIT=27, ID=28, WS=29, ERROR_CHAR=30;
public static final int
RULE_modelFile = 0, RULE_model = 1, RULE_modelElement = 2, RULE_globalVarDecl = 3,
RULE_procDecl = 4, RULE_param = 5, RULE_procBody = 6, RULE_localVarDecl = 7,
RULE_location = 8, RULE_transformation = 9, RULE_action = 10, RULE_jump = 11,
RULE_switchCase = 12, RULE_exp = 13, RULE_primArgs = 14, RULE_expSuffix = 15,
RULE_prim = 16, RULE_arg = 17, RULE_lit = 18, RULE_annotation = 19;
private static String[] makeRuleNames() {
return new String[] {
"modelFile", "model", "modelElement", "globalVarDecl", "procDecl", "param",
"procBody", "localVarDecl", "location", "transformation", "action", "jump",
"switchCase", "exp", "primArgs", "expSuffix", "prim", "arg", "lit", "annotation"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'global'", "'var'", "';'", "'def'", "'('", "','", "')'", "'{'",
"'}'", "'#'", "'call'", "':='", "'goto'", "'assert'", "'assume'", "'ext'",
"'if'", "'then'", "'else'", "'return'", "'switch'", "'default'", "':'",
"'jext'", "'case'", "'@'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, "LIT", "ID", "WS", "ERROR_CHAR"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
@Override
public String getGrammarFileName() { return "Antlr4Pilar.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public ATN getATN() { return _ATN; }
public Antlr4PilarParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class ModelFileContext extends ParserRuleContext {
public ModelContext model() {
return getRuleContext(ModelContext.class,0);
}
public TerminalNode EOF() { return getToken(Antlr4PilarParser.EOF, 0); }
public ModelFileContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_modelFile; }
}
public final ModelFileContext modelFile() throws RecognitionException {
ModelFileContext _localctx = new ModelFileContext(_ctx, getState());
enterRule(_localctx, 0, RULE_modelFile);
try {
enterOuterAlt(_localctx, 1);
{
setState(40);
model();
setState(41);
match(EOF);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ModelContext extends ParserRuleContext {
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public List<ModelElementContext> modelElement() {
return getRuleContexts(ModelElementContext.class);
}
public ModelElementContext modelElement(int i) {
return getRuleContext(ModelElementContext.class,i);
}
public ModelContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_model; }
}
public final ModelContext model() throws RecognitionException {
ModelContext _localctx = new ModelContext(_ctx, getState());
enterRule(_localctx, 2, RULE_model);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(46);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(43);
annotation();
}
}
setState(48);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(52);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__0 || _la==T__3) {
{
{
setState(49);
modelElement();
}
}
setState(54);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ModelElementContext extends ParserRuleContext {
public ModelElementContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_modelElement; }
public ModelElementContext() { }
public void copyFrom(ModelElementContext ctx) {
super.copyFrom(ctx);
}
}
public static class GlobalVarContext extends ModelElementContext {
public GlobalVarDeclContext globalVarDecl() {
return getRuleContext(GlobalVarDeclContext.class,0);
}
public GlobalVarContext(ModelElementContext ctx) { copyFrom(ctx); }
}
public static class ProcedureContext extends ModelElementContext {
public ProcDeclContext procDecl() {
return getRuleContext(ProcDeclContext.class,0);
}
public ProcedureContext(ModelElementContext ctx) { copyFrom(ctx); }
}
public final ModelElementContext modelElement() throws RecognitionException {
ModelElementContext _localctx = new ModelElementContext(_ctx, getState());
enterRule(_localctx, 4, RULE_modelElement);
try {
setState(59);
_errHandler.sync(this);
switch (_input.LA(1)) {
case T__0:
_localctx = new GlobalVarContext(_localctx);
enterOuterAlt(_localctx, 1);
{
setState(55);
match(T__0);
setState(56);
match(T__1);
setState(57);
globalVarDecl();
}
break;
case T__3:
_localctx = new ProcedureContext(_localctx);
enterOuterAlt(_localctx, 2);
{
setState(58);
procDecl();
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class GlobalVarDeclContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public GlobalVarDeclContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_globalVarDecl; }
}
public final GlobalVarDeclContext globalVarDecl() throws RecognitionException {
GlobalVarDeclContext _localctx = new GlobalVarDeclContext(_ctx, getState());
enterRule(_localctx, 6, RULE_globalVarDecl);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(61);
match(ID);
setState(65);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(62);
annotation();
}
}
setState(67);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(68);
match(T__2);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ProcDeclContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public ProcBodyContext procBody() {
return getRuleContext(ProcBodyContext.class,0);
}
public List<ParamContext> param() {
return getRuleContexts(ParamContext.class);
}
public ParamContext param(int i) {
return getRuleContext(ParamContext.class,i);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public ProcDeclContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_procDecl; }
}
public final ProcDeclContext procDecl() throws RecognitionException {
ProcDeclContext _localctx = new ProcDeclContext(_ctx, getState());
enterRule(_localctx, 8, RULE_procDecl);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(70);
match(T__3);
setState(71);
match(ID);
setState(72);
match(T__4);
setState(81);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==ID) {
{
setState(73);
param();
setState(78);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__5) {
{
{
setState(74);
match(T__5);
setState(75);
param();
}
}
setState(80);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
setState(83);
match(T__6);
setState(87);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(84);
annotation();
}
}
setState(89);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(92);
_errHandler.sync(this);
switch (_input.LA(1)) {
case T__7:
{
setState(90);
procBody();
}
break;
case T__2:
{
setState(91);
match(T__2);
}
break;
default:
throw new NoViableAltException(this);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ParamContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public ParamContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_param; }
}
public final ParamContext param() throws RecognitionException {
ParamContext _localctx = new ParamContext(_ctx, getState());
enterRule(_localctx, 10, RULE_param);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(94);
match(ID);
setState(98);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(95);
annotation();
}
}
setState(100);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ProcBodyContext extends ParserRuleContext {
public List<LocationContext> location() {
return getRuleContexts(LocationContext.class);
}
public LocationContext location(int i) {
return getRuleContext(LocationContext.class,i);
}
public List<LocalVarDeclContext> localVarDecl() {
return getRuleContexts(LocalVarDeclContext.class);
}
public LocalVarDeclContext localVarDecl(int i) {
return getRuleContext(LocalVarDeclContext.class,i);
}
public ProcBodyContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_procBody; }
}
public final ProcBodyContext procBody() throws RecognitionException {
ProcBodyContext _localctx = new ProcBodyContext(_ctx, getState());
enterRule(_localctx, 12, RULE_procBody);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(101);
match(T__7);
setState(108);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==T__1) {
{
setState(102);
match(T__1);
setState(104);
_errHandler.sync(this);
_la = _input.LA(1);
do {
{
{
setState(103);
localVarDecl();
}
}
setState(106);
_errHandler.sync(this);
_la = _input.LA(1);
} while ( _la==ID );
}
}
setState(111);
_errHandler.sync(this);
_la = _input.LA(1);
do {
{
{
setState(110);
location();
}
}
setState(113);
_errHandler.sync(this);
_la = _input.LA(1);
} while ( _la==T__9 );
setState(115);
match(T__8);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class LocalVarDeclContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public LocalVarDeclContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_localVarDecl; }
}
public final LocalVarDeclContext localVarDecl() throws RecognitionException {
LocalVarDeclContext _localctx = new LocalVarDeclContext(_ctx, getState());
enterRule(_localctx, 14, RULE_localVarDecl);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(117);
match(ID);
setState(121);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(118);
annotation();
}
}
setState(123);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(124);
match(T__2);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class LocationContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public TransformationContext transformation() {
return getRuleContext(TransformationContext.class,0);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public LocationContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_location; }
}
public final LocationContext location() throws RecognitionException {
LocationContext _localctx = new LocationContext(_ctx, getState());
enterRule(_localctx, 16, RULE_location);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(126);
match(T__9);
setState(127);
match(ID);
setState(131);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(128);
annotation();
}
}
setState(133);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(134);
transformation();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class TransformationContext extends ParserRuleContext {
public TransformationContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_transformation; }
public TransformationContext() { }
public void copyFrom(TransformationContext ctx) {
super.copyFrom(ctx);
}
}
public static class CallContext extends TransformationContext {
public Token p;
public Token l;
public ArgContext arg() {
return getRuleContext(ArgContext.class,0);
}
public List<TerminalNode> ID() { return getTokens(Antlr4PilarParser.ID); }
public TerminalNode ID(int i) {
return getToken(Antlr4PilarParser.ID, i);
}
public ExpContext exp() {
return getRuleContext(ExpContext.class,0);
}
public CallContext(TransformationContext ctx) { copyFrom(ctx); }
}
public static class BlockContext extends TransformationContext {
public JumpContext jump() {
return getRuleContext(JumpContext.class,0);
}
public List<ActionContext> action() {
return getRuleContexts(ActionContext.class);
}
public ActionContext action(int i) {
return getRuleContext(ActionContext.class,i);
}
public BlockContext(TransformationContext ctx) { copyFrom(ctx); }
}
public final TransformationContext transformation() throws RecognitionException {
TransformationContext _localctx = new TransformationContext(_ctx, getState());
enterRule(_localctx, 18, RULE_transformation);
int _la;
try {
setState(155);
_errHandler.sync(this);
switch (_input.LA(1)) {
case T__10:
_localctx = new CallContext(_localctx);
enterOuterAlt(_localctx, 1);
{
setState(136);
match(T__10);
setState(140);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) {
case 1:
{
setState(137);
exp();
setState(138);
match(T__11);
}
break;
}
setState(142);
((CallContext)_localctx).p = match(ID);
setState(143);
arg();
setState(144);
match(T__12);
setState(145);
((CallContext)_localctx).l = match(ID);
setState(146);
match(T__2);
}
break;
case T__4:
case T__12:
case T__13:
case T__14:
case T__15:
case T__16:
case T__19:
case T__20:
case T__23:
case ID:
_localctx = new BlockContext(_localctx);
enterOuterAlt(_localctx, 2);
{
setState(151);
_errHandler.sync(this);
_la = _input.LA(1);
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__4) | (1L << T__13) | (1L << T__14) | (1L << T__15) | (1L << ID))) != 0)) {
{
{
setState(148);
action();
}
}
setState(153);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(154);
jump();
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ActionContext extends ParserRuleContext {
public ActionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_action; }
public ActionContext() { }
public void copyFrom(ActionContext ctx) {
super.copyFrom(ctx);
}
}
public static class ExtActionContext extends ActionContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public ArgContext arg() {
return getRuleContext(ArgContext.class,0);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public ExtActionContext(ActionContext ctx) { copyFrom(ctx); }
}
public static class AssignActionContext extends ActionContext {
public ExpContext l;
public ExpContext r;
public List<ExpContext> exp() {
return getRuleContexts(ExpContext.class);
}
public ExpContext exp(int i) {
return getRuleContext(ExpContext.class,i);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public AssignActionContext(ActionContext ctx) { copyFrom(ctx); }
}
public static class AssumeActionContext extends ActionContext {
public ExpContext exp() {
return getRuleContext(ExpContext.class,0);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public AssumeActionContext(ActionContext ctx) { copyFrom(ctx); }
}
public static class AssertActionContext extends ActionContext {
public ExpContext exp() {
return getRuleContext(ExpContext.class,0);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public AssertActionContext(ActionContext ctx) { copyFrom(ctx); }
}
public final ActionContext action() throws RecognitionException {
ActionContext _localctx = new ActionContext(_ctx, getState());
enterRule(_localctx, 20, RULE_action);
int _la;
try {
setState(199);
_errHandler.sync(this);
switch (_input.LA(1)) {
case T__4:
case ID:
_localctx = new AssignActionContext(_localctx);
enterOuterAlt(_localctx, 1);
{
setState(157);
((AssignActionContext)_localctx).l = exp();
setState(158);
match(T__11);
setState(159);
((AssignActionContext)_localctx).r = exp();
setState(163);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(160);
annotation();
}
}
setState(165);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(166);
match(T__2);
}
break;
case T__13:
_localctx = new AssertActionContext(_localctx);
enterOuterAlt(_localctx, 2);
{
setState(168);
match(T__13);
setState(169);
exp();
setState(173);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(170);
annotation();
}
}
setState(175);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(176);
match(T__2);
}
break;
case T__14:
_localctx = new AssumeActionContext(_localctx);
enterOuterAlt(_localctx, 3);
{
setState(178);
match(T__14);
setState(179);
exp();
setState(183);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(180);
annotation();
}
}
setState(185);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(186);
match(T__2);
}
break;
case T__15:
_localctx = new ExtActionContext(_localctx);
enterOuterAlt(_localctx, 4);
{
setState(188);
match(T__15);
setState(189);
match(ID);
setState(190);
arg();
setState(194);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(191);
annotation();
}
}
setState(196);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(197);
match(T__2);
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class JumpContext extends ParserRuleContext {
public JumpContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_jump; }
public JumpContext() { }
public void copyFrom(JumpContext ctx) {
super.copyFrom(ctx);
}
}
public static class ExtJumpContext extends JumpContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public ArgContext arg() {
return getRuleContext(ArgContext.class,0);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public ExtJumpContext(JumpContext ctx) { copyFrom(ctx); }
}
public static class SwitchJumpContext extends JumpContext {
public Token b;
public ExpContext exp() {
return getRuleContext(ExpContext.class,0);
}
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public List<SwitchCaseContext> switchCase() {
return getRuleContexts(SwitchCaseContext.class);
}
public SwitchCaseContext switchCase(int i) {
return getRuleContext(SwitchCaseContext.class,i);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public SwitchJumpContext(JumpContext ctx) { copyFrom(ctx); }
}
public static class GotoJumpContext extends JumpContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public GotoJumpContext(JumpContext ctx) { copyFrom(ctx); }
}
public static class IfJumpContext extends JumpContext {
public Token t;
public Token f;
public ExpContext exp() {
return getRuleContext(ExpContext.class,0);
}
public List<TerminalNode> ID() { return getTokens(Antlr4PilarParser.ID); }
public TerminalNode ID(int i) {
return getToken(Antlr4PilarParser.ID, i);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public IfJumpContext(JumpContext ctx) { copyFrom(ctx); }
}
public static class ReturnJumpContext extends JumpContext {
public ExpContext exp() {
return getRuleContext(ExpContext.class,0);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public ReturnJumpContext(JumpContext ctx) { copyFrom(ctx); }
}
public final JumpContext jump() throws RecognitionException {
JumpContext _localctx = new JumpContext(_ctx, getState());
enterRule(_localctx, 22, RULE_jump);
int _la;
try {
setState(265);
_errHandler.sync(this);
switch (_input.LA(1)) {
case T__12:
_localctx = new GotoJumpContext(_localctx);
enterOuterAlt(_localctx, 1);
{
setState(201);
match(T__12);
setState(202);
match(ID);
setState(206);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(203);
annotation();
}
}
setState(208);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(209);
match(T__2);
}
break;
case T__16:
_localctx = new IfJumpContext(_localctx);
enterOuterAlt(_localctx, 2);
{
setState(210);
match(T__16);
setState(211);
exp();
setState(212);
match(T__17);
setState(213);
((IfJumpContext)_localctx).t = match(ID);
setState(214);
match(T__18);
setState(215);
((IfJumpContext)_localctx).f = match(ID);
setState(219);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(216);
annotation();
}
}
setState(221);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(222);
match(T__2);
}
break;
case T__19:
_localctx = new ReturnJumpContext(_localctx);
enterOuterAlt(_localctx, 3);
{
setState(224);
match(T__19);
setState(226);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==T__4 || _la==ID) {
{
setState(225);
exp();
}
}
setState(231);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(228);
annotation();
}
}
setState(233);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(234);
match(T__2);
}
break;
case T__20:
_localctx = new SwitchJumpContext(_localctx);
enterOuterAlt(_localctx, 4);
{
setState(235);
match(T__20);
setState(236);
exp();
setState(240);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__24) {
{
{
setState(237);
switchCase();
}
}
setState(242);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(243);
((SwitchJumpContext)_localctx).b = match(T__21);
setState(244);
match(T__22);
setState(245);
match(ID);
setState(249);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(246);
annotation();
}
}
setState(251);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(252);
match(T__2);
}
break;
case T__23:
_localctx = new ExtJumpContext(_localctx);
enterOuterAlt(_localctx, 5);
{
setState(254);
match(T__23);
setState(255);
match(ID);
setState(256);
arg();
setState(260);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__25) {
{
{
setState(257);
annotation();
}
}
setState(262);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(263);
match(T__2);
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SwitchCaseContext extends ParserRuleContext {
public LitContext lit() {
return getRuleContext(LitContext.class,0);
}
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public SwitchCaseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_switchCase; }
}
public final SwitchCaseContext switchCase() throws RecognitionException {
SwitchCaseContext _localctx = new SwitchCaseContext(_ctx, getState());
enterRule(_localctx, 24, RULE_switchCase);
try {
enterOuterAlt(_localctx, 1);
{
setState(267);
match(T__24);
setState(268);
lit();
setState(269);
match(T__22);
setState(270);
match(ID);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExpContext extends ParserRuleContext {
public PrimArgsContext primArgs() {
return getRuleContext(PrimArgsContext.class,0);
}
public List<ExpSuffixContext> expSuffix() {
return getRuleContexts(ExpSuffixContext.class);
}
public ExpSuffixContext expSuffix(int i) {
return getRuleContext(ExpSuffixContext.class,i);
}
public ExpContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_exp; }
}
public final ExpContext exp() throws RecognitionException {
ExpContext _localctx = new ExpContext(_ctx, getState());
enterRule(_localctx, 26, RULE_exp);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(272);
primArgs();
setState(276);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==ID) {
{
{
setState(273);
expSuffix();
}
}
setState(278);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class PrimArgsContext extends ParserRuleContext {
public PrimContext prim() {
return getRuleContext(PrimContext.class,0);
}
public List<ArgContext> arg() {
return getRuleContexts(ArgContext.class);
}
public ArgContext arg(int i) {
return getRuleContext(ArgContext.class,i);
}
public PrimArgsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_primArgs; }
}
public final PrimArgsContext primArgs() throws RecognitionException {
PrimArgsContext _localctx = new PrimArgsContext(_ctx, getState());
enterRule(_localctx, 28, RULE_primArgs);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(279);
prim();
setState(283);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__4) {
{
{
setState(280);
arg();
}
}
setState(285);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExpSuffixContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public PrimArgsContext primArgs() {
return getRuleContext(PrimArgsContext.class,0);
}
public ExpSuffixContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_expSuffix; }
}
public final ExpSuffixContext expSuffix() throws RecognitionException {
ExpSuffixContext _localctx = new ExpSuffixContext(_ctx, getState());
enterRule(_localctx, 30, RULE_expSuffix);
try {
enterOuterAlt(_localctx, 1);
{
setState(286);
match(ID);
setState(287);
primArgs();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class PrimContext extends ParserRuleContext {
public PrimContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_prim; }
public PrimContext() { }
public void copyFrom(PrimContext ctx) {
super.copyFrom(ctx);
}
}
public static class IdExpContext extends PrimContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public IdExpContext(PrimContext ctx) { copyFrom(ctx); }
}
public static class LitExpContext extends PrimContext {
public LitContext lit() {
return getRuleContext(LitContext.class,0);
}
public LitExpContext(PrimContext ctx) { copyFrom(ctx); }
}
public static class TupleExpContext extends PrimContext {
public List<ExpContext> exp() {
return getRuleContexts(ExpContext.class);
}
public ExpContext exp(int i) {
return getRuleContext(ExpContext.class,i);
}
public List<AnnotationContext> annotation() {
return getRuleContexts(AnnotationContext.class);
}
public AnnotationContext annotation(int i) {
return getRuleContext(AnnotationContext.class,i);
}
public TupleExpContext(PrimContext ctx) { copyFrom(ctx); }
}
public final PrimContext prim() throws RecognitionException {
PrimContext _localctx = new PrimContext(_ctx, getState());
enterRule(_localctx, 32, RULE_prim);
int _la;
try {
int _alt;
setState(307);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) {
case 1:
_localctx = new LitExpContext(_localctx);
enterOuterAlt(_localctx, 1);
{
setState(289);
lit();
}
break;
case 2:
_localctx = new IdExpContext(_localctx);
enterOuterAlt(_localctx, 2);
{
setState(290);
match(ID);
}
break;
case 3:
_localctx = new TupleExpContext(_localctx);
enterOuterAlt(_localctx, 3);
{
setState(291);
match(T__4);
setState(292);
exp();
setState(297);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__5) {
{
{
setState(293);
match(T__5);
setState(294);
exp();
}
}
setState(299);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(300);
match(T__6);
setState(304);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,33,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
setState(301);
annotation();
}
}
}
setState(306);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,33,_ctx);
}
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ArgContext extends ParserRuleContext {
public List<ExpContext> exp() {
return getRuleContexts(ExpContext.class);
}
public ExpContext exp(int i) {
return getRuleContext(ExpContext.class,i);
}
public ArgContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_arg; }
}
public final ArgContext arg() throws RecognitionException {
ArgContext _localctx = new ArgContext(_ctx, getState());
enterRule(_localctx, 34, RULE_arg);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(309);
match(T__4);
setState(318);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==T__4 || _la==ID) {
{
setState(310);
exp();
setState(315);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__5) {
{
{
setState(311);
match(T__5);
setState(312);
exp();
}
}
setState(317);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
setState(320);
match(T__6);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class LitContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public TerminalNode LIT() { return getToken(Antlr4PilarParser.LIT, 0); }
public LitContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_lit; }
}
public final LitContext lit() throws RecognitionException {
LitContext _localctx = new LitContext(_ctx, getState());
enterRule(_localctx, 36, RULE_lit);
try {
enterOuterAlt(_localctx, 1);
{
setState(322);
match(ID);
setState(323);
match(LIT);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class AnnotationContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(Antlr4PilarParser.ID, 0); }
public TerminalNode LIT() { return getToken(Antlr4PilarParser.LIT, 0); }
public AnnotationContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_annotation; }
}
public final AnnotationContext annotation() throws RecognitionException {
AnnotationContext _localctx = new AnnotationContext(_ctx, getState());
enterRule(_localctx, 38, RULE_annotation);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(325);
match(T__25);
setState(326);
match(ID);
setState(328);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==LIT) {
{
setState(327);
match(LIT);
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3 \u014d\4\2\t\2\4"+
"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+
"\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
"\4\23\t\23\4\24\t\24\4\25\t\25\3\2\3\2\3\2\3\3\7\3/\n\3\f\3\16\3\62\13"+
"\3\3\3\7\3\65\n\3\f\3\16\38\13\3\3\4\3\4\3\4\3\4\5\4>\n\4\3\5\3\5\7\5"+
"B\n\5\f\5\16\5E\13\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6\7\6O\n\6\f\6\16\6"+
"R\13\6\5\6T\n\6\3\6\3\6\7\6X\n\6\f\6\16\6[\13\6\3\6\3\6\5\6_\n\6\3\7\3"+
"\7\7\7c\n\7\f\7\16\7f\13\7\3\b\3\b\3\b\6\bk\n\b\r\b\16\bl\5\bo\n\b\3\b"+
"\6\br\n\b\r\b\16\bs\3\b\3\b\3\t\3\t\7\tz\n\t\f\t\16\t}\13\t\3\t\3\t\3"+
"\n\3\n\3\n\7\n\u0084\n\n\f\n\16\n\u0087\13\n\3\n\3\n\3\13\3\13\3\13\3"+
"\13\5\13\u008f\n\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u0098\n\13"+
"\f\13\16\13\u009b\13\13\3\13\5\13\u009e\n\13\3\f\3\f\3\f\3\f\7\f\u00a4"+
"\n\f\f\f\16\f\u00a7\13\f\3\f\3\f\3\f\3\f\3\f\7\f\u00ae\n\f\f\f\16\f\u00b1"+
"\13\f\3\f\3\f\3\f\3\f\3\f\7\f\u00b8\n\f\f\f\16\f\u00bb\13\f\3\f\3\f\3"+
"\f\3\f\3\f\3\f\7\f\u00c3\n\f\f\f\16\f\u00c6\13\f\3\f\3\f\5\f\u00ca\n\f"+
"\3\r\3\r\3\r\7\r\u00cf\n\r\f\r\16\r\u00d2\13\r\3\r\3\r\3\r\3\r\3\r\3\r"+
"\3\r\3\r\7\r\u00dc\n\r\f\r\16\r\u00df\13\r\3\r\3\r\3\r\3\r\5\r\u00e5\n"+
"\r\3\r\7\r\u00e8\n\r\f\r\16\r\u00eb\13\r\3\r\3\r\3\r\3\r\7\r\u00f1\n\r"+
"\f\r\16\r\u00f4\13\r\3\r\3\r\3\r\3\r\7\r\u00fa\n\r\f\r\16\r\u00fd\13\r"+
"\3\r\3\r\3\r\3\r\3\r\3\r\7\r\u0105\n\r\f\r\16\r\u0108\13\r\3\r\3\r\5\r"+
"\u010c\n\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\7\17\u0115\n\17\f\17\16"+
"\17\u0118\13\17\3\20\3\20\7\20\u011c\n\20\f\20\16\20\u011f\13\20\3\21"+
"\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\7\22\u012a\n\22\f\22\16\22\u012d"+
"\13\22\3\22\3\22\7\22\u0131\n\22\f\22\16\22\u0134\13\22\5\22\u0136\n\22"+
"\3\23\3\23\3\23\3\23\7\23\u013c\n\23\f\23\16\23\u013f\13\23\5\23\u0141"+
"\n\23\3\23\3\23\3\24\3\24\3\24\3\25\3\25\3\25\5\25\u014b\n\25\3\25\2\2"+
"\26\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(\2\2\2\u0164\2*\3\2\2"+
"\2\4\60\3\2\2\2\6=\3\2\2\2\b?\3\2\2\2\nH\3\2\2\2\f`\3\2\2\2\16g\3\2\2"+
"\2\20w\3\2\2\2\22\u0080\3\2\2\2\24\u009d\3\2\2\2\26\u00c9\3\2\2\2\30\u010b"+
"\3\2\2\2\32\u010d\3\2\2\2\34\u0112\3\2\2\2\36\u0119\3\2\2\2 \u0120\3\2"+
"\2\2\"\u0135\3\2\2\2$\u0137\3\2\2\2&\u0144\3\2\2\2(\u0147\3\2\2\2*+\5"+
"\4\3\2+,\7\2\2\3,\3\3\2\2\2-/\5(\25\2.-\3\2\2\2/\62\3\2\2\2\60.\3\2\2"+
"\2\60\61\3\2\2\2\61\66\3\2\2\2\62\60\3\2\2\2\63\65\5\6\4\2\64\63\3\2\2"+
"\2\658\3\2\2\2\66\64\3\2\2\2\66\67\3\2\2\2\67\5\3\2\2\28\66\3\2\2\29:"+
"\7\3\2\2:;\7\4\2\2;>\5\b\5\2<>\5\n\6\2=9\3\2\2\2=<\3\2\2\2>\7\3\2\2\2"+
"?C\7\36\2\2@B\5(\25\2A@\3\2\2\2BE\3\2\2\2CA\3\2\2\2CD\3\2\2\2DF\3\2\2"+
"\2EC\3\2\2\2FG\7\5\2\2G\t\3\2\2\2HI\7\6\2\2IJ\7\36\2\2JS\7\7\2\2KP\5\f"+
"\7\2LM\7\b\2\2MO\5\f\7\2NL\3\2\2\2OR\3\2\2\2PN\3\2\2\2PQ\3\2\2\2QT\3\2"+
"\2\2RP\3\2\2\2SK\3\2\2\2ST\3\2\2\2TU\3\2\2\2UY\7\t\2\2VX\5(\25\2WV\3\2"+
"\2\2X[\3\2\2\2YW\3\2\2\2YZ\3\2\2\2Z^\3\2\2\2[Y\3\2\2\2\\_\5\16\b\2]_\7"+
"\5\2\2^\\\3\2\2\2^]\3\2\2\2_\13\3\2\2\2`d\7\36\2\2ac\5(\25\2ba\3\2\2\2"+
"cf\3\2\2\2db\3\2\2\2de\3\2\2\2e\r\3\2\2\2fd\3\2\2\2gn\7\n\2\2hj\7\4\2"+
"\2ik\5\20\t\2ji\3\2\2\2kl\3\2\2\2lj\3\2\2\2lm\3\2\2\2mo\3\2\2\2nh\3\2"+
"\2\2no\3\2\2\2oq\3\2\2\2pr\5\22\n\2qp\3\2\2\2rs\3\2\2\2sq\3\2\2\2st\3"+
"\2\2\2tu\3\2\2\2uv\7\13\2\2v\17\3\2\2\2w{\7\36\2\2xz\5(\25\2yx\3\2\2\2"+
"z}\3\2\2\2{y\3\2\2\2{|\3\2\2\2|~\3\2\2\2}{\3\2\2\2~\177\7\5\2\2\177\21"+
"\3\2\2\2\u0080\u0081\7\f\2\2\u0081\u0085\7\36\2\2\u0082\u0084\5(\25\2"+
"\u0083\u0082\3\2\2\2\u0084\u0087\3\2\2\2\u0085\u0083\3\2\2\2\u0085\u0086"+
"\3\2\2\2\u0086\u0088\3\2\2\2\u0087\u0085\3\2\2\2\u0088\u0089\5\24\13\2"+
"\u0089\23\3\2\2\2\u008a\u008e\7\r\2\2\u008b\u008c\5\34\17\2\u008c\u008d"+
"\7\16\2\2\u008d\u008f\3\2\2\2\u008e\u008b\3\2\2\2\u008e\u008f\3\2\2\2"+
"\u008f\u0090\3\2\2\2\u0090\u0091\7\36\2\2\u0091\u0092\5$\23\2\u0092\u0093"+
"\7\17\2\2\u0093\u0094\7\36\2\2\u0094\u0095\7\5\2\2\u0095\u009e\3\2\2\2"+
"\u0096\u0098\5\26\f\2\u0097\u0096\3\2\2\2\u0098\u009b\3\2\2\2\u0099\u0097"+
"\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u009c\3\2\2\2\u009b\u0099\3\2\2\2\u009c"+
"\u009e\5\30\r\2\u009d\u008a\3\2\2\2\u009d\u0099\3\2\2\2\u009e\25\3\2\2"+
"\2\u009f\u00a0\5\34\17\2\u00a0\u00a1\7\16\2\2\u00a1\u00a5\5\34\17\2\u00a2"+
"\u00a4\5(\25\2\u00a3\u00a2\3\2\2\2\u00a4\u00a7\3\2\2\2\u00a5\u00a3\3\2"+
"\2\2\u00a5\u00a6\3\2\2\2\u00a6\u00a8\3\2\2\2\u00a7\u00a5\3\2\2\2\u00a8"+
"\u00a9\7\5\2\2\u00a9\u00ca\3\2\2\2\u00aa\u00ab\7\20\2\2\u00ab\u00af\5"+
"\34\17\2\u00ac\u00ae\5(\25\2\u00ad\u00ac\3\2\2\2\u00ae\u00b1\3\2\2\2\u00af"+
"\u00ad\3\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b2\3\2\2\2\u00b1\u00af\3\2"+
"\2\2\u00b2\u00b3\7\5\2\2\u00b3\u00ca\3\2\2\2\u00b4\u00b5\7\21\2\2\u00b5"+
"\u00b9\5\34\17\2\u00b6\u00b8\5(\25\2\u00b7\u00b6\3\2\2\2\u00b8\u00bb\3"+
"\2\2\2\u00b9\u00b7\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00bc\3\2\2\2\u00bb"+
"\u00b9\3\2\2\2\u00bc\u00bd\7\5\2\2\u00bd\u00ca\3\2\2\2\u00be\u00bf\7\22"+
"\2\2\u00bf\u00c0\7\36\2\2\u00c0\u00c4\5$\23\2\u00c1\u00c3\5(\25\2\u00c2"+
"\u00c1\3\2\2\2\u00c3\u00c6\3\2\2\2\u00c4\u00c2\3\2\2\2\u00c4\u00c5\3\2"+
"\2\2\u00c5\u00c7\3\2\2\2\u00c6\u00c4\3\2\2\2\u00c7\u00c8\7\5\2\2\u00c8"+
"\u00ca\3\2\2\2\u00c9\u009f\3\2\2\2\u00c9\u00aa\3\2\2\2\u00c9\u00b4\3\2"+
"\2\2\u00c9\u00be\3\2\2\2\u00ca\27\3\2\2\2\u00cb\u00cc\7\17\2\2\u00cc\u00d0"+
"\7\36\2\2\u00cd\u00cf\5(\25\2\u00ce\u00cd\3\2\2\2\u00cf\u00d2\3\2\2\2"+
"\u00d0\u00ce\3\2\2\2\u00d0\u00d1\3\2\2\2\u00d1\u00d3\3\2\2\2\u00d2\u00d0"+
"\3\2\2\2\u00d3\u010c\7\5\2\2\u00d4\u00d5\7\23\2\2\u00d5\u00d6\5\34\17"+
"\2\u00d6\u00d7\7\24\2\2\u00d7\u00d8\7\36\2\2\u00d8\u00d9\7\25\2\2\u00d9"+
"\u00dd\7\36\2\2\u00da\u00dc\5(\25\2\u00db\u00da\3\2\2\2\u00dc\u00df\3"+
"\2\2\2\u00dd\u00db\3\2\2\2\u00dd\u00de\3\2\2\2\u00de\u00e0\3\2\2\2\u00df"+
"\u00dd\3\2\2\2\u00e0\u00e1\7\5\2\2\u00e1\u010c\3\2\2\2\u00e2\u00e4\7\26"+
"\2\2\u00e3\u00e5\5\34\17\2\u00e4\u00e3\3\2\2\2\u00e4\u00e5\3\2\2\2\u00e5"+
"\u00e9\3\2\2\2\u00e6\u00e8\5(\25\2\u00e7\u00e6\3\2\2\2\u00e8\u00eb\3\2"+
"\2\2\u00e9\u00e7\3\2\2\2\u00e9\u00ea\3\2\2\2\u00ea\u00ec\3\2\2\2\u00eb"+
"\u00e9\3\2\2\2\u00ec\u010c\7\5\2\2\u00ed\u00ee\7\27\2\2\u00ee\u00f2\5"+
"\34\17\2\u00ef\u00f1\5\32\16\2\u00f0\u00ef\3\2\2\2\u00f1\u00f4\3\2\2\2"+
"\u00f2\u00f0\3\2\2\2\u00f2\u00f3\3\2\2\2\u00f3\u00f5\3\2\2\2\u00f4\u00f2"+
"\3\2\2\2\u00f5\u00f6\7\30\2\2\u00f6\u00f7\7\31\2\2\u00f7\u00fb\7\36\2"+
"\2\u00f8\u00fa\5(\25\2\u00f9\u00f8\3\2\2\2\u00fa\u00fd\3\2\2\2\u00fb\u00f9"+
"\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\u00fe\3\2\2\2\u00fd\u00fb\3\2\2\2\u00fe"+
"\u00ff\7\5\2\2\u00ff\u010c\3\2\2\2\u0100\u0101\7\32\2\2\u0101\u0102\7"+
"\36\2\2\u0102\u0106\5$\23\2\u0103\u0105\5(\25\2\u0104\u0103\3\2\2\2\u0105"+
"\u0108\3\2\2\2\u0106\u0104\3\2\2\2\u0106\u0107\3\2\2\2\u0107\u0109\3\2"+
"\2\2\u0108\u0106\3\2\2\2\u0109\u010a\7\5\2\2\u010a\u010c\3\2\2\2\u010b"+
"\u00cb\3\2\2\2\u010b\u00d4\3\2\2\2\u010b\u00e2\3\2\2\2\u010b\u00ed\3\2"+
"\2\2\u010b\u0100\3\2\2\2\u010c\31\3\2\2\2\u010d\u010e\7\33\2\2\u010e\u010f"+
"\5&\24\2\u010f\u0110\7\31\2\2\u0110\u0111\7\36\2\2\u0111\33\3\2\2\2\u0112"+
"\u0116\5\36\20\2\u0113\u0115\5 \21\2\u0114\u0113\3\2\2\2\u0115\u0118\3"+
"\2\2\2\u0116\u0114\3\2\2\2\u0116\u0117\3\2\2\2\u0117\35\3\2\2\2\u0118"+
"\u0116\3\2\2\2\u0119\u011d\5\"\22\2\u011a\u011c\5$\23\2\u011b\u011a\3"+
"\2\2\2\u011c\u011f\3\2\2\2\u011d\u011b\3\2\2\2\u011d\u011e\3\2\2\2\u011e"+
"\37\3\2\2\2\u011f\u011d\3\2\2\2\u0120\u0121\7\36\2\2\u0121\u0122\5\36"+
"\20\2\u0122!\3\2\2\2\u0123\u0136\5&\24\2\u0124\u0136\7\36\2\2\u0125\u0126"+
"\7\7\2\2\u0126\u012b\5\34\17\2\u0127\u0128\7\b\2\2\u0128\u012a\5\34\17"+
"\2\u0129\u0127\3\2\2\2\u012a\u012d\3\2\2\2\u012b\u0129\3\2\2\2\u012b\u012c"+
"\3\2\2\2\u012c\u012e\3\2\2\2\u012d\u012b\3\2\2\2\u012e\u0132\7\t\2\2\u012f"+
"\u0131\5(\25\2\u0130\u012f\3\2\2\2\u0131\u0134\3\2\2\2\u0132\u0130\3\2"+
"\2\2\u0132\u0133\3\2\2\2\u0133\u0136\3\2\2\2\u0134\u0132\3\2\2\2\u0135"+
"\u0123\3\2\2\2\u0135\u0124\3\2\2\2\u0135\u0125\3\2\2\2\u0136#\3\2\2\2"+
"\u0137\u0140\7\7\2\2\u0138\u013d\5\34\17\2\u0139\u013a\7\b\2\2\u013a\u013c"+
"\5\34\17\2\u013b\u0139\3\2\2\2\u013c\u013f\3\2\2\2\u013d\u013b\3\2\2\2"+
"\u013d\u013e\3\2\2\2\u013e\u0141\3\2\2\2\u013f\u013d\3\2\2\2\u0140\u0138"+
"\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0142\3\2\2\2\u0142\u0143\7\t\2\2\u0143"+
"%\3\2\2\2\u0144\u0145\7\36\2\2\u0145\u0146\7\35\2\2\u0146\'\3\2\2\2\u0147"+
"\u0148\7\34\2\2\u0148\u014a\7\36\2\2\u0149\u014b\7\35\2\2\u014a\u0149"+
"\3\2\2\2\u014a\u014b\3\2\2\2\u014b)\3\2\2\2(\60\66=CPSY^dlns{\u0085\u008e"+
"\u0099\u009d\u00a5\u00af\u00b9\u00c4\u00c9\u00d0\u00dd\u00e4\u00e9\u00f2"+
"\u00fb\u0106\u010b\u0116\u011d\u012b\u0132\u0135\u013d\u0140\u014a";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.operators;
import org.apache.flink.api.common.distributions.DataDistribution;
import org.apache.flink.api.common.functions.RichMapPartitionFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.DiscardingOutputFormat;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.DataSetUtils;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.test.operators.util.CollectionDataSets;
import org.apache.flink.test.util.MiniClusterWithClientResource;
import org.apache.flink.util.Collector;
import org.apache.flink.util.TestLogger;
import org.junit.ClassRule;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.fail;
/**
* Integration tests for custom {@link DataDistribution}.
*/
@SuppressWarnings("serial")
public class CustomDistributionITCase extends TestLogger {
@ClassRule
public static final MiniClusterWithClientResource MINI_CLUSTER_RESOURCE = new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setNumberTaskManagers(1)
.setNumberSlotsPerTaskManager(8)
.build());
// ------------------------------------------------------------------------
/**
* Test the record partitioned rightly with one field according to the customized data distribution.
*/
@Test
public void testPartitionWithDistribution1() throws Exception {
final TestDataDist1 dist = new TestDataDist1();
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(dist.getParallelism());
DataSet<Tuple3<Integer, Long, String>> input = CollectionDataSets.get3TupleDataSet(env);
DataSet<Boolean> result = DataSetUtils
.partitionByRange(input, dist, 0)
.mapPartition(new RichMapPartitionFunction<Tuple3<Integer, Long, String>, Boolean>() {
@Override
public void mapPartition(Iterable<Tuple3<Integer, Long, String>> values, Collector<Boolean> out) throws Exception {
int pIdx = getRuntimeContext().getIndexOfThisSubtask();
for (Tuple3<Integer, Long, String> s : values) {
boolean correctlyPartitioned = true;
if (pIdx == 0) {
Integer[] upper = dist.boundaries[0];
if (s.f0.compareTo(upper[0]) > 0) {
correctlyPartitioned = false;
}
}
else if (pIdx > 0 && pIdx < dist.getParallelism() - 1) {
Integer[] lower = dist.boundaries[pIdx - 1];
Integer[] upper = dist.boundaries[pIdx];
if (s.f0.compareTo(upper[0]) > 0 || (s.f0.compareTo(lower[0]) <= 0)) {
correctlyPartitioned = false;
}
}
else {
Integer[] lower = dist.boundaries[pIdx - 1];
if ((s.f0.compareTo(lower[0]) <= 0)) {
correctlyPartitioned = false;
}
}
if (!correctlyPartitioned) {
fail("Record was not correctly partitioned: " + s.toString());
}
}
}
}
);
result.output(new DiscardingOutputFormat<Boolean>());
env.execute();
}
/**
* Test the record partitioned rightly with two fields according to the customized data distribution.
*/
@Test
public void testRangeWithDistribution2() throws Exception {
final TestDataDist2 dist = new TestDataDist2();
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(dist.getParallelism());
DataSet<Tuple3<Integer, Integer, String>> input = env.fromElements(
new Tuple3<>(1, 5, "Hi"),
new Tuple3<>(1, 6, "Hi"),
new Tuple3<>(1, 7, "Hi"),
new Tuple3<>(1, 11, "Hello"),
new Tuple3<>(2, 3, "World"),
new Tuple3<>(2, 4, "World"),
new Tuple3<>(2, 5, "World"),
new Tuple3<>(2, 13, "Hello World"),
new Tuple3<>(3, 8, "Say"),
new Tuple3<>(4, 0, "Why"),
new Tuple3<>(4, 2, "Java"),
new Tuple3<>(4, 11, "Say Hello"),
new Tuple3<>(5, 1, "Hi Java!"),
new Tuple3<>(5, 2, "Hi Java?"),
new Tuple3<>(5, 3, "Hi Java again")
);
DataSet<Boolean> result = DataSetUtils
.partitionByRange(input, dist, 0, 1)
.mapPartition(new RichMapPartitionFunction<Tuple3<Integer, Integer, String>, Boolean>() {
@Override
public void mapPartition(Iterable<Tuple3<Integer, Integer, String>> values, Collector<Boolean> out) throws Exception {
int pIdx = getRuntimeContext().getIndexOfThisSubtask();
boolean correctlyPartitioned = true;
for (Tuple3<Integer, Integer, String> s : values) {
if (pIdx == 0) {
Integer[] upper = dist.boundaries[0];
if (s.f0.compareTo(upper[0]) > 0 ||
(s.f0.compareTo(upper[0]) == 0 && s.f1.compareTo(upper[1]) > 0)) {
correctlyPartitioned = false;
}
}
else if (pIdx > 0 && pIdx < dist.getParallelism() - 1) {
Integer[] lower = dist.boundaries[pIdx - 1];
Integer[] upper = dist.boundaries[pIdx];
if (s.f0.compareTo(upper[0]) > 0 ||
(s.f0.compareTo(upper[0]) == 0 && s.f1.compareTo(upper[1]) > 0) ||
(s.f0.compareTo(lower[0]) < 0) ||
(s.f0.compareTo(lower[0]) == 0 && s.f1.compareTo(lower[1]) <= 0)) {
correctlyPartitioned = false;
}
}
else {
Integer[] lower = dist.boundaries[pIdx - 1];
if ((s.f0.compareTo(lower[0]) < 0) ||
(s.f0.compareTo(lower[0]) == 0 && s.f1.compareTo(lower[1]) <= 0)) {
correctlyPartitioned = false;
}
}
if (!correctlyPartitioned) {
fail("Record was not correctly partitioned: " + s.toString());
}
}
}
}
);
result.output(new DiscardingOutputFormat<Boolean>());
env.execute();
}
/*
* Test the number of partition keys less than the number of distribution fields
*/
@Test
public void testPartitionKeyLessDistribution() throws Exception {
final TestDataDist2 dist = new TestDataDist2();
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(dist.getParallelism());
DataSet<Tuple3<Integer, Long, String>> input = CollectionDataSets.get3TupleDataSet(env);
DataSet<Boolean> result = DataSetUtils
.partitionByRange(input, dist, 0)
.mapPartition(new RichMapPartitionFunction<Tuple3<Integer, Long, String>, Boolean>() {
@Override
public void mapPartition(Iterable<Tuple3<Integer, Long, String>> values, Collector<Boolean> out) throws Exception {
int pIdx = getRuntimeContext().getIndexOfThisSubtask();
for (Tuple3<Integer, Long, String> s : values) {
boolean correctlyPartitioned = true;
if (pIdx == 0) {
Integer[] upper = dist.boundaries[0];
if (s.f0.compareTo(upper[0]) > 0) {
correctlyPartitioned = false;
}
}
else if (pIdx > 0 && pIdx < dist.getParallelism() - 1) {
Integer[] lower = dist.boundaries[pIdx - 1];
Integer[] upper = dist.boundaries[pIdx];
if (s.f0.compareTo(upper[0]) > 0 || (s.f0.compareTo(lower[0]) <= 0)) {
correctlyPartitioned = false;
}
}
else {
Integer[] lower = dist.boundaries[pIdx - 1];
if ((s.f0.compareTo(lower[0]) <= 0)) {
correctlyPartitioned = false;
}
}
if (!correctlyPartitioned) {
fail("Record was not correctly partitioned: " + s.toString());
}
}
}
}
);
result.output(new DiscardingOutputFormat<Boolean>());
env.execute();
}
/*
* Test the number of partition keys larger than the number of distribution fields
*/
@Test(expected = IllegalArgumentException.class)
public void testPartitionMoreThanDistribution() throws Exception {
final TestDataDist2 dist = new TestDataDist2();
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple3<Integer, Long, String>> input = CollectionDataSets.get3TupleDataSet(env);
DataSetUtils.partitionByRange(input, dist, 0, 1, 2);
}
/**
* The class is used to do the tests of range partition with one key.
*/
public static class TestDataDist1 implements DataDistribution {
public Integer[][] boundaries = new Integer[][]{
new Integer[]{4},
new Integer[]{9},
new Integer[]{13},
new Integer[]{18}
};
public TestDataDist1() {}
public int getParallelism() {
return boundaries.length;
}
@Override
public Object[] getBucketBoundary(int bucketNum, int totalNumBuckets) {
return boundaries[bucketNum];
}
@Override
public int getNumberOfFields() {
return 1;
}
@Override
public TypeInformation[] getKeyTypes() {
return new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO};
}
@Override
public void write(DataOutputView out) throws IOException {}
@Override
public void read(DataInputView in) throws IOException {}
}
/**
* The class is used to do the tests of range partition with two keys.
*/
public static class TestDataDist2 implements DataDistribution {
public Integer[][] boundaries = new Integer[][]{
new Integer[]{1, 6},
new Integer[]{2, 4},
new Integer[]{3, 9},
new Integer[]{4, 1},
new Integer[]{5, 2}
};
public TestDataDist2() {}
public int getParallelism() {
return boundaries.length;
}
@Override
public Object[] getBucketBoundary(int bucketNum, int totalNumBuckets) {
return boundaries[bucketNum];
}
@Override
public int getNumberOfFields() {
return 2;
}
@Override
public TypeInformation[] getKeyTypes() {
return new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO};
}
@Override
public void write(DataOutputView out) throws IOException {}
@Override
public void read(DataInputView in) throws IOException {}
}
}
| |
/*
* Zorbage: an algebraic data hierarchy for use in numeric processing.
*
* Copyright (c) 2016-2021 Barry DeZonia All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* Neither the name of the <copyright holder> nor the names of its contributors may
* be used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*/
package nom.bdezonia.zorbage.dataview;
import nom.bdezonia.zorbage.algebra.Dimensioned;
import nom.bdezonia.zorbage.data.DimensionedDataSource;
import nom.bdezonia.zorbage.datasource.IndexedDataSource;
/**
*
* @author Barry DeZonia
*
* @param <U>
*/
public class ThirtySixDView<U> implements Dimensioned {
private final long d0;
private final long d1;
private final long d2;
private final long d3;
private final long d4;
private final long d5;
private final long d6;
private final long d7;
private final long d8;
private final long d9;
private final long d10;
private final long d11;
private final long d12;
private final long d13;
private final long d14;
private final long d15;
private final long d16;
private final long d17;
private final long d18;
private final long d19;
private final long d20;
private final long d21;
private final long d22;
private final long d23;
private final long d24;
private final long d25;
private final long d26;
private final long d27;
private final long d28;
private final long d29;
private final long d30;
private final long d31;
private final long d32;
private final long d33;
private final long d34;
private final long d35;
private final IndexedDataSource<U> list;
/**
* Construct a view from an {@link IndexedDataSource} and some dimensions.
*
* @param d0 0th dimension in the view.
* @param d1 1th dimension in the view.
* @param d2 2th dimension in the view.
* @param d3 3th dimension in the view.
* @param d4 4th dimension in the view.
* @param d5 5th dimension in the view.
* @param d6 6th dimension in the view.
* @param d7 7th dimension in the view.
* @param d8 8th dimension in the view.
* @param d9 9th dimension in the view.
* @param d10 10th dimension in the view.
* @param d11 11th dimension in the view.
* @param d12 12th dimension in the view.
* @param d13 13th dimension in the view.
* @param d14 14th dimension in the view.
* @param d15 15th dimension in the view.
* @param d16 16th dimension in the view.
* @param d17 17th dimension in the view.
* @param d18 18th dimension in the view.
* @param d19 19th dimension in the view.
* @param d20 20th dimension in the view.
* @param d21 21th dimension in the view.
* @param d22 22th dimension in the view.
* @param d23 23th dimension in the view.
* @param d24 24th dimension in the view.
* @param d25 25th dimension in the view.
* @param d26 26th dimension in the view.
* @param d27 27th dimension in the view.
* @param d28 28th dimension in the view.
* @param d29 29th dimension in the view.
* @param d30 30th dimension in the view.
* @param d31 31th dimension in the view.
* @param d32 32th dimension in the view.
* @param d33 33th dimension in the view.
* @param d34 34th dimension in the view.
* @param d35 35th dimension in the view.
* @param data The 1-d list the view is being built around.
*/
public ThirtySixDView(long d0, long d1, long d2, long d3, long d4, long d5, long d6, long d7, long d8, long d9, long d10, long d11, long d12, long d13, long d14, long d15, long d16, long d17, long d18, long d19, long d20, long d21, long d22, long d23, long d24, long d25, long d26, long d27, long d28, long d29, long d30, long d31, long d32, long d33, long d34, long d35, IndexedDataSource<U> data) {
DViewUtils.checkDims(data.size(),d0,d1,d2,d3,d4,d5,d6,d7,d8,d9,d10,d11,d12,d13,d14,d15,d16,d17,d18,d19,d20,d21,d22,d23,d24,d25,d26,d27,d28,d29,d30,d31,d32,d33,d34,d35);
this.d0 = d0;
this.d1 = d1;
this.d2 = d2;
this.d3 = d3;
this.d4 = d4;
this.d5 = d5;
this.d6 = d6;
this.d7 = d7;
this.d8 = d8;
this.d9 = d9;
this.d10 = d10;
this.d11 = d11;
this.d12 = d12;
this.d13 = d13;
this.d14 = d14;
this.d15 = d15;
this.d16 = d16;
this.d17 = d17;
this.d18 = d18;
this.d19 = d19;
this.d20 = d20;
this.d21 = d21;
this.d22 = d22;
this.d23 = d23;
this.d24 = d24;
this.d25 = d25;
this.d26 = d26;
this.d27 = d27;
this.d28 = d28;
this.d29 = d29;
this.d30 = d30;
this.d31 = d31;
this.d32 = d32;
this.d33 = d33;
this.d34 = d34;
this.d35 = d35;
this.list = data;
}
/**
* Construct a view from a {@link DimensionedDataSource}.
*
* @param ds The n-d data set that the view is being built around.
*/
public ThirtySixDView(DimensionedDataSource<U> ds) {
if (ds.numDimensions() != 36)
throw new IllegalArgumentException("36-d view passed a data source that is "+ds.numDimensions()+"-d");
this.d0 = ds.dimension(0);
this.d1 = ds.dimension(1);
this.d2 = ds.dimension(2);
this.d3 = ds.dimension(3);
this.d4 = ds.dimension(4);
this.d5 = ds.dimension(5);
this.d6 = ds.dimension(6);
this.d7 = ds.dimension(7);
this.d8 = ds.dimension(8);
this.d9 = ds.dimension(9);
this.d10 = ds.dimension(10);
this.d11 = ds.dimension(11);
this.d12 = ds.dimension(12);
this.d13 = ds.dimension(13);
this.d14 = ds.dimension(14);
this.d15 = ds.dimension(15);
this.d16 = ds.dimension(16);
this.d17 = ds.dimension(17);
this.d18 = ds.dimension(18);
this.d19 = ds.dimension(19);
this.d20 = ds.dimension(20);
this.d21 = ds.dimension(21);
this.d22 = ds.dimension(22);
this.d23 = ds.dimension(23);
this.d24 = ds.dimension(24);
this.d25 = ds.dimension(25);
this.d26 = ds.dimension(26);
this.d27 = ds.dimension(27);
this.d28 = ds.dimension(28);
this.d29 = ds.dimension(29);
this.d30 = ds.dimension(30);
this.d31 = ds.dimension(31);
this.d32 = ds.dimension(32);
this.d33 = ds.dimension(33);
this.d34 = ds.dimension(34);
this.d35 = ds.dimension(35);
this.list = ds.rawData();
}
/**
* Returns the 0th dimension of the view.
*/
public long d0() { return d0; }
/**
* Returns the 1th dimension of the view.
*/
public long d1() { return d1; }
/**
* Returns the 2th dimension of the view.
*/
public long d2() { return d2; }
/**
* Returns the 3th dimension of the view.
*/
public long d3() { return d3; }
/**
* Returns the 4th dimension of the view.
*/
public long d4() { return d4; }
/**
* Returns the 5th dimension of the view.
*/
public long d5() { return d5; }
/**
* Returns the 6th dimension of the view.
*/
public long d6() { return d6; }
/**
* Returns the 7th dimension of the view.
*/
public long d7() { return d7; }
/**
* Returns the 8th dimension of the view.
*/
public long d8() { return d8; }
/**
* Returns the 9th dimension of the view.
*/
public long d9() { return d9; }
/**
* Returns the 10th dimension of the view.
*/
public long d10() { return d10; }
/**
* Returns the 11th dimension of the view.
*/
public long d11() { return d11; }
/**
* Returns the 12th dimension of the view.
*/
public long d12() { return d12; }
/**
* Returns the 13th dimension of the view.
*/
public long d13() { return d13; }
/**
* Returns the 14th dimension of the view.
*/
public long d14() { return d14; }
/**
* Returns the 15th dimension of the view.
*/
public long d15() { return d15; }
/**
* Returns the 16th dimension of the view.
*/
public long d16() { return d16; }
/**
* Returns the 17th dimension of the view.
*/
public long d17() { return d17; }
/**
* Returns the 18th dimension of the view.
*/
public long d18() { return d18; }
/**
* Returns the 19th dimension of the view.
*/
public long d19() { return d19; }
/**
* Returns the 20th dimension of the view.
*/
public long d20() { return d20; }
/**
* Returns the 21th dimension of the view.
*/
public long d21() { return d21; }
/**
* Returns the 22th dimension of the view.
*/
public long d22() { return d22; }
/**
* Returns the 23th dimension of the view.
*/
public long d23() { return d23; }
/**
* Returns the 24th dimension of the view.
*/
public long d24() { return d24; }
/**
* Returns the 25th dimension of the view.
*/
public long d25() { return d25; }
/**
* Returns the 26th dimension of the view.
*/
public long d26() { return d26; }
/**
* Returns the 27th dimension of the view.
*/
public long d27() { return d27; }
/**
* Returns the 28th dimension of the view.
*/
public long d28() { return d28; }
/**
* Returns the 29th dimension of the view.
*/
public long d29() { return d29; }
/**
* Returns the 30th dimension of the view.
*/
public long d30() { return d30; }
/**
* Returns the 31th dimension of the view.
*/
public long d31() { return d31; }
/**
* Returns the 32th dimension of the view.
*/
public long d32() { return d32; }
/**
* Returns the 33th dimension of the view.
*/
public long d33() { return d33; }
/**
* Returns the 34th dimension of the view.
*/
public long d34() { return d34; }
/**
* Returns the 35th dimension of the view.
*/
public long d35() { return d35; }
/**
* A view.get() call will pull the value at the view input coordinates from the data set into val.
* No index out of bounds checking is done.
*
* @param i0 0th view input coord
* @param i1 1th view input coord
* @param i2 2th view input coord
* @param i3 3th view input coord
* @param i4 4th view input coord
* @param i5 5th view input coord
* @param i6 6th view input coord
* @param i7 7th view input coord
* @param i8 8th view input coord
* @param i9 9th view input coord
* @param i10 10th view input coord
* @param i11 11th view input coord
* @param i12 12th view input coord
* @param i13 13th view input coord
* @param i14 14th view input coord
* @param i15 15th view input coord
* @param i16 16th view input coord
* @param i17 17th view input coord
* @param i18 18th view input coord
* @param i19 19th view input coord
* @param i20 20th view input coord
* @param i21 21th view input coord
* @param i22 22th view input coord
* @param i23 23th view input coord
* @param i24 24th view input coord
* @param i25 25th view input coord
* @param i26 26th view input coord
* @param i27 27th view input coord
* @param i28 28th view input coord
* @param i29 29th view input coord
* @param i30 30th view input coord
* @param i31 31th view input coord
* @param i32 32th view input coord
* @param i33 33th view input coord
* @param i34 34th view input coord
* @param i35 35th view input coord
* @param val The output where the result is placed
*/
public void get(long i0, long i1, long i2, long i3, long i4, long i5, long i6, long i7, long i8, long i9, long i10, long i11, long i12, long i13, long i14, long i15, long i16, long i17, long i18, long i19, long i20, long i21, long i22, long i23, long i24, long i25, long i26, long i27, long i28, long i29, long i30, long i31, long i32, long i33, long i34, long i35, U val) {
long index = i35;
index = index*d34 + i34;
index = index*d33 + i33;
index = index*d32 + i32;
index = index*d31 + i31;
index = index*d30 + i30;
index = index*d29 + i29;
index = index*d28 + i28;
index = index*d27 + i27;
index = index*d26 + i26;
index = index*d25 + i25;
index = index*d24 + i24;
index = index*d23 + i23;
index = index*d22 + i22;
index = index*d21 + i21;
index = index*d20 + i20;
index = index*d19 + i19;
index = index*d18 + i18;
index = index*d17 + i17;
index = index*d16 + i16;
index = index*d15 + i15;
index = index*d14 + i14;
index = index*d13 + i13;
index = index*d12 + i12;
index = index*d11 + i11;
index = index*d10 + i10;
index = index*d9 + i9;
index = index*d8 + i8;
index = index*d7 + i7;
index = index*d6 + i6;
index = index*d5 + i5;
index = index*d4 + i4;
index = index*d3 + i3;
index = index*d2 + i2;
index = index*d1 + i1;
index = index*d0 + i0;
list.get(index, val);
}
/**
* A view.set() call will push the value at the view input coordinates into the data set.
* No index out of bounds checking is done.
*
* @param i0 0th view input coord
* @param i1 1th view input coord
* @param i2 2th view input coord
* @param i3 3th view input coord
* @param i4 4th view input coord
* @param i5 5th view input coord
* @param i6 6th view input coord
* @param i7 7th view input coord
* @param i8 8th view input coord
* @param i9 9th view input coord
* @param i10 10th view input coord
* @param i11 11th view input coord
* @param i12 12th view input coord
* @param i13 13th view input coord
* @param i14 14th view input coord
* @param i15 15th view input coord
* @param i16 16th view input coord
* @param i17 17th view input coord
* @param i18 18th view input coord
* @param i19 19th view input coord
* @param i20 20th view input coord
* @param i21 21th view input coord
* @param i22 22th view input coord
* @param i23 23th view input coord
* @param i24 24th view input coord
* @param i25 25th view input coord
* @param i26 26th view input coord
* @param i27 27th view input coord
* @param i28 28th view input coord
* @param i29 29th view input coord
* @param i30 30th view input coord
* @param i31 31th view input coord
* @param i32 32th view input coord
* @param i33 33th view input coord
* @param i34 34th view input coord
* @param i35 35th view input coord
* @param val The input that is stored in the underlying data set
*/
public void set(long i0, long i1, long i2, long i3, long i4, long i5, long i6, long i7, long i8, long i9, long i10, long i11, long i12, long i13, long i14, long i15, long i16, long i17, long i18, long i19, long i20, long i21, long i22, long i23, long i24, long i25, long i26, long i27, long i28, long i29, long i30, long i31, long i32, long i33, long i34, long i35, U val) {
long index = i35;
index = index*d34 + i34;
index = index*d33 + i33;
index = index*d32 + i32;
index = index*d31 + i31;
index = index*d30 + i30;
index = index*d29 + i29;
index = index*d28 + i28;
index = index*d27 + i27;
index = index*d26 + i26;
index = index*d25 + i25;
index = index*d24 + i24;
index = index*d23 + i23;
index = index*d22 + i22;
index = index*d21 + i21;
index = index*d20 + i20;
index = index*d19 + i19;
index = index*d18 + i18;
index = index*d17 + i17;
index = index*d16 + i16;
index = index*d15 + i15;
index = index*d14 + i14;
index = index*d13 + i13;
index = index*d12 + i12;
index = index*d11 + i11;
index = index*d10 + i10;
index = index*d9 + i9;
index = index*d8 + i8;
index = index*d7 + i7;
index = index*d6 + i6;
index = index*d5 + i5;
index = index*d4 + i4;
index = index*d3 + i3;
index = index*d2 + i2;
index = index*d1 + i1;
index = index*d0 + i0;
list.set(index, val);
}
/**
* A view.safeGet() call will do a get() call provided the passed index coordinate values
* fit within the view's dimensions. If not an exception is thrown instead.
*/
public void safeGet(long i0, long i1, long i2, long i3, long i4, long i5, long i6, long i7, long i8, long i9, long i10, long i11, long i12, long i13, long i14, long i15, long i16, long i17, long i18, long i19, long i20, long i21, long i22, long i23, long i24, long i25, long i26, long i27, long i28, long i29, long i30, long i31, long i32, long i33, long i34, long i35, U val) {
if (outOfBounds(i0, i1, i2, i3, i4, i5, i6, i7, i8, i9, i10, i11, i12, i13, i14, i15, i16, i17, i18, i19, i20, i21, i22, i23, i24, i25, i26, i27, i28, i29, i30, i31, i32, i33, i34, i35)) {
throw new IllegalArgumentException("view index out of bounds");
}
else
get(i0, i1, i2, i3, i4, i5, i6, i7, i8, i9, i10, i11, i12, i13, i14, i15, i16, i17, i18, i19, i20, i21, i22, i23, i24, i25, i26, i27, i28, i29, i30, i31, i32, i33, i34, i35, val);
}
/**
* A view.safeSet() call will do a set() call provided the passed index coordinate values
* fit within the view's dimensions. If not an exception is thrown instead.
*/
public void safeSet(long i0, long i1, long i2, long i3, long i4, long i5, long i6, long i7, long i8, long i9, long i10, long i11, long i12, long i13, long i14, long i15, long i16, long i17, long i18, long i19, long i20, long i21, long i22, long i23, long i24, long i25, long i26, long i27, long i28, long i29, long i30, long i31, long i32, long i33, long i34, long i35, U val) {
if (outOfBounds(i0, i1, i2, i3, i4, i5, i6, i7, i8, i9, i10, i11, i12, i13, i14, i15, i16, i17, i18, i19, i20, i21, i22, i23, i24, i25, i26, i27, i28, i29, i30, i31, i32, i33, i34, i35)) {
throw new IllegalArgumentException("view index out of bounds");
}
else
set(i0, i1, i2, i3, i4, i5, i6, i7, i8, i9, i10, i11, i12, i13, i14, i15, i16, i17, i18, i19, i20, i21, i22, i23, i24, i25, i26, i27, i28, i29, i30, i31, i32, i33, i34, i35, val);
}
private boolean outOfBounds(long i0, long i1, long i2, long i3, long i4, long i5, long i6, long i7, long i8, long i9, long i10, long i11, long i12, long i13, long i14, long i15, long i16, long i17, long i18, long i19, long i20, long i21, long i22, long i23, long i24, long i25, long i26, long i27, long i28, long i29, long i30, long i31, long i32, long i33, long i34, long i35) {
if (i0 < 0 || i0 >= d0) return true;
if (i1 < 0 || i1 >= d1) return true;
if (i2 < 0 || i2 >= d2) return true;
if (i3 < 0 || i3 >= d3) return true;
if (i4 < 0 || i4 >= d4) return true;
if (i5 < 0 || i5 >= d5) return true;
if (i6 < 0 || i6 >= d6) return true;
if (i7 < 0 || i7 >= d7) return true;
if (i8 < 0 || i8 >= d8) return true;
if (i9 < 0 || i9 >= d9) return true;
if (i10 < 0 || i10 >= d10) return true;
if (i11 < 0 || i11 >= d11) return true;
if (i12 < 0 || i12 >= d12) return true;
if (i13 < 0 || i13 >= d13) return true;
if (i14 < 0 || i14 >= d14) return true;
if (i15 < 0 || i15 >= d15) return true;
if (i16 < 0 || i16 >= d16) return true;
if (i17 < 0 || i17 >= d17) return true;
if (i18 < 0 || i18 >= d18) return true;
if (i19 < 0 || i19 >= d19) return true;
if (i20 < 0 || i20 >= d20) return true;
if (i21 < 0 || i21 >= d21) return true;
if (i22 < 0 || i22 >= d22) return true;
if (i23 < 0 || i23 >= d23) return true;
if (i24 < 0 || i24 >= d24) return true;
if (i25 < 0 || i25 >= d25) return true;
if (i26 < 0 || i26 >= d26) return true;
if (i27 < 0 || i27 >= d27) return true;
if (i28 < 0 || i28 >= d28) return true;
if (i29 < 0 || i29 >= d29) return true;
if (i30 < 0 || i30 >= d30) return true;
if (i31 < 0 || i31 >= d31) return true;
if (i32 < 0 || i32 >= d32) return true;
if (i33 < 0 || i33 >= d33) return true;
if (i34 < 0 || i34 >= d34) return true;
if (i35 < 0 || i35 >= d35) return true;
return false;
}
/**
* Return the number of dimensions in the view.
*/
@Override
public int numDimensions() {
return 36;
}
/**
* Retrieve each view dimension by index. Throws an exception if
* the dimension index number is outside the view dimensions.
*/
@Override
public long dimension(int d) {
if (d == 0) return d0;
if (d == 1) return d1;
if (d == 2) return d2;
if (d == 3) return d3;
if (d == 4) return d4;
if (d == 5) return d5;
if (d == 6) return d6;
if (d == 7) return d7;
if (d == 8) return d8;
if (d == 9) return d9;
if (d == 10) return d10;
if (d == 11) return d11;
if (d == 12) return d12;
if (d == 13) return d13;
if (d == 14) return d14;
if (d == 15) return d15;
if (d == 16) return d16;
if (d == 17) return d17;
if (d == 18) return d18;
if (d == 19) return d19;
if (d == 20) return d20;
if (d == 21) return d21;
if (d == 22) return d22;
if (d == 23) return d23;
if (d == 24) return d24;
if (d == 25) return d25;
if (d == 26) return d26;
if (d == 27) return d27;
if (d == 28) return d28;
if (d == 29) return d29;
if (d == 30) return d30;
if (d == 31) return d31;
if (d == 32) return d32;
if (d == 33) return d33;
if (d == 34) return d34;
if (d == 35) return d35;
throw new IllegalArgumentException("dimension out of bounds");
}
}
| |
package com.github.yuthura.bianca;
import java.sql.*;
import com.github.yuthura.bianca.helpers.*;
public class Helpers {
// ----------------------------------------------------------------------
// QUERIES
// ----------------------------------------------------------------------
public static Select select(Selectable... selection) {
return new Select(selection);
}
public static Select select(Table table) {
return new Select().from(table);
}
public static Insert insert(Table table, Column<?>... columns) {
return new Insert(table, columns);
}
public static Update update(Table table) {
return new Update(table);
}
public static Delete delete(Table table, Condition... conditions) {
return new Delete(table).where(conditions);
}
// ----------------------------------------------------------------------
// BASIC CONDITIONS
// ----------------------------------------------------------------------
public static Condition and(Object... partials) {
return new CompositeCondition("AND", partials);
}
public static Condition or(Object... partials) {
return new CompositeCondition("OR", partials);
}
public static Condition equalTo(Object left, Object right) {
return new BinaryCondition("=", left, right) {
@Override
public void buildStatement(StringBuilder sb, Partial left, Partial right) {
boolean lNull = left == null || Partial.NULL.equals(left);
boolean rNull = right == null || Partial.NULL.equals(right);
if(!lNull && !rNull) {
left.buildStatement(sb);
sb.append(" = ");
right.buildStatement(sb);
} else if(lNull && rNull) {
sb.append("NULL IS NULL");
} else if(!lNull) {
left.buildStatement(sb);
sb.append(" IS NULL");
} else if(!rNull) {
right.buildStatement(sb);
sb.append(" IS NULL");
} else {
throw new IllegalStateException();
}
}
};
}
public static Condition eq(Object left, Object right) {
return equalTo(left, right);
}
public static Condition notEqualTo(Object left, Object right) {
return new BinaryCondition("!=", left, right) {
@Override
public void buildStatement(StringBuilder sb, Partial left, Partial right) {
boolean lNull = Partial.NULL.equals(left);
boolean rNull = Partial.NULL.equals(right);
if(!lNull && !rNull) {
left.buildStatement(sb);
sb.append(" != ");
right.buildStatement(sb);
} else if(lNull && rNull) {
sb.append("NULL IS NOT NULL");
} else if(!lNull) {
left.buildStatement(sb);
sb.append(" IS NOT NULL");
} else if(!rNull) {
right.buildStatement(sb);
sb.append(" IS NOT NULL");
} else {
throw new IllegalStateException();
}
}
};
}
public static Condition neq(Object left, Object right) {
return notEqualTo(left, right);
}
public static Condition greaterThan(Object left, Object right) {
return new BinaryCondition(">", left, right);
}
public static Condition gt(Object left, Object right) {
return greaterThan(left, right);
}
public static Condition greaterThanOrEqualTo(Object left, Object right) {
return new BinaryCondition(">=", left, right);
}
public static Condition gte(Object left, Object right) {
return greaterThanOrEqualTo(left, right);
}
public static Condition lessThan(Object left, Object right) {
return new BinaryCondition("<", left, right);
}
public static Condition lt(Object left, Object right) {
return lessThan(left, right);
}
public static Condition lessThanOrEqualTo(Object left, Object right) {
return new BinaryCondition("<=", left, right);
}
public static Condition lte(Object left, Object right) {
return lessThanOrEqualTo(left, right);
}
public static Condition in(Object left, Object... right) {
return new InSupport("IN", left, right);
}
public static Condition notIn(Object left, Object... right) {
return new InSupport("NOT IN", left, right);
}
public static Condition nin(Object left, Object... right) {
return notIn(left, right);
}
public static Condition between(Object left, Object right) {
return new BetweenSupport("BETWEEN", left, right);
}
public static Condition notBetween(Object left, Object right) {
return new BetweenSupport("NOT BETWEEN", left, right);
}
public static Condition like(Object left, String pattern) {
return new BinaryCondition("LIKE", left, pattern);
}
public static Condition notLike(Object left, String pattern) {
return new BinaryCondition("NOT LIKE", left, pattern);
}
public static Condition contains(Object left, String pattern) {
return like(left, "%" + pattern + "%");
}
public static Condition startsWith(Object left, String pattern) {
return like(left, pattern + "%");
}
public static Condition sw(Object left, String pattern) {
return startsWith(left, pattern);
}
public static Condition endsWith(Object left, String pattern) {
return like(left, "%" + pattern);
}
public static Condition ew(Object left, String pattern) {
return endsWith(left, pattern);
}
public static Condition regexp(Object left, String pattern) {
return new BinaryCondition("REGEXP", left, pattern);
}
public static Condition notRegexp(Object left, String pattern) {
return new BinaryCondition("NOT REGEXP", left, pattern);
}
// ----------------------------------------------------------------------
// BASIC FUNCTIONS
// ----------------------------------------------------------------------
public static Function f(String function, Object... arguments) {
return new ArbitraryFunction(function, arguments);
}
public static Function ifElse(Object condition, Object ifTrue, Object ifFalse) {
return new ArbitraryFunction("IF", condition, ifTrue, ifFalse);
}
public static Function ifNull(Object when, Object otherwise) {
return new ArbitraryFunction("IFNULL", when, otherwise);
}
public static Function nullIf(Object a, Object b) {
return new ArbitraryFunction("NULLIF", a, b);
}
public static Function greatest(Object... arguments) {
return new ArbitraryFunction("GREATEST", arguments);
}
public static Function least(Object... arguments) {
return new ArbitraryFunction("LEAST", arguments);
}
public static Function coalesce(Object... arguments) {
return new ArbitraryFunction("COALESCE", arguments);
}
public static DistinctableUnaryFunction groupConcat(Object expression, boolean distinct) {
return new DistinctableUnaryFunction("GROUP_CONCAT", expression).distinct(distinct);
}
public static DistinctableUnaryFunction groupConcat(Object expression) {
return groupConcat(expression, false);
}
public static DistinctableUnaryFunction groupConcatDistinct(Object expression) {
return groupConcat(expression, true);
}
// ----------------------------------------------------------------------
// MATHEMTICAL OPERATIONS
// ----------------------------------------------------------------------
public static Operation add(Object left, Object right) {
return new BinaryOperation("+", left, right);
}
public static Operation sub(Object left, Object right) {
return new BinaryOperation("-", left, right);
}
public static Operation mul(Object left, Object right) {
return new BinaryOperation("*", left, right);
}
public static Operation div(Object left, Object right) {
return new BinaryOperation("/", left, right);
}
public static Operation mod(Object left, Object right) {
return new BinaryOperation("%", left, right);
}
// ----------------------------------------------------------------------
// MATHEMATICAL FUNCTIONS
// ----------------------------------------------------------------------
public static Function abs(Object object) {
return new UnaryFunction("abs", object);
}
public static Function acos(Object object) {
return new UnaryFunction("ACOS", object);
}
public static Function asin(Object object) {
return new UnaryFunction("ASIN", object);
}
public static Function atan2(Object first, Object second) {
return new ArbitraryFunction("ATAN2", first, second);
}
public static Function atan(Object object) {
return new UnaryFunction("ATAN", object);
}
public static Function ceiling(Object object) {
return new UnaryFunction("CEILING", object);
}
public static Function cos(Object object) {
return new UnaryFunction("COS", object);
}
public static Function cot(Object object) {
return new UnaryFunction("COT", object);
}
public static Function degrees(Object object) {
return new UnaryFunction("DEGREES", object);
}
public static Function exp(Object object) {
return new UnaryFunction("EXP", object);
}
public static Function floor(Object object) {
return new UnaryFunction("FLOOR", object);
}
public static Function ln(Object object) {
return new UnaryFunction("LN", object);
}
public static Function log10(Object object) {
return new UnaryFunction("LOG10", object);
}
public static Function log2(Object object) {
return new UnaryFunction("LOG2", object);
}
public static Function log(Object object) {
return new UnaryFunction("LOG", object);
}
public static Function pi() {
return new EmptyFunction("PI");
}
public static Function pow(Object first, Object second) {
return new ArbitraryFunction("POW", first, second);
}
public static Function radians(Object object) {
return new UnaryFunction("RADIANS", object);
}
public static Function rand() {
return new EmptyFunction("RAND");
}
public static Function round(Object object) {
return new UnaryFunction("ROUND", object);
}
public static Function round(Object number, Object decimals) {
return new ArbitraryFunction("ROUND", number, decimals);
}
public static Function sign(Object object) {
return new UnaryFunction("SIGN", object);
}
public static Function sin(Object object) {
return new UnaryFunction("SIN", object);
}
public static Function sqrt(Object object) {
return new UnaryFunction("SQRT", object);
}
public static Function tan(Object object) {
return new UnaryFunction("TAN", object);
}
public static Function truncate(Object number, Object decimals) {
return new ArbitraryFunction("TRUNCATE", number, decimals);
}
// ----------------------------------------------------------------------
// AGGREGATE FUNCTIONS
// ----------------------------------------------------------------------
public static DistinctableUnaryFunction avg(Object object) {
return new DistinctableUnaryFunction("AVG", object);
}
public static DistinctableUnaryFunction min(Object object) {
return new DistinctableUnaryFunction("MIN", object);
}
public static DistinctableUnaryFunction max(Object object) {
return new DistinctableUnaryFunction("MAX", object);
}
public static Function sum(Object object) {
return new UnaryFunction("SUM", object);
}
public static DistinctableUnaryFunction count(Object object, boolean distinct) {
return new DistinctableUnaryFunction("COUNT", object).distinct(distinct);
}
public static DistinctableUnaryFunction count(Object object) {
return count(object, false);
}
public static DistinctableUnaryFunction countDistinct(Object object) {
return count(object, true);
}
// ----------------------------------------------------------------------
// STRING FUNCTIONS
// ----------------------------------------------------------------------
public static Function charLength(Object string) {
return new UnaryFunction("CHAR_LENGTH", string);
}
public static Function concat(Object... strings) {
return new ArbitraryFunction("CONCAT", strings);
}
// TODO: This could be more performant if we simply include the separator argument into the strings argument,
// but it makes it less expressive.
public static Function concatWS(Object separator, Object... strings) {
Object[] arguments = new Object[strings.length + 1];
arguments[0] = separator;
System.arraycopy(strings, 0, arguments, 1, strings.length);
return new ArbitraryFunction("CONCAT_WS", arguments);
}
public static Function length(Object string) {
return new UnaryFunction("LENGTH", string);
}
public static Function lower(Object string) {
return new UnaryFunction("LOWER", string);
}
public static Function lpad(Object string, Object length, Object padding) {
return new ArbitraryFunction("LPAD", string, length, padding);
}
public static Function ltrim(Object string) {
return new UnaryFunction("LTRIM", string);
}
public static Function replace(Object string, Object from, String to) {
return new ArbitraryFunction("REPLACE", string, from, to);
}
public static Function reverse(Object string) {
return new UnaryFunction("REVERSE", string);
}
public static Function rpad(Object string, Object length, Object padding) {
return new ArbitraryFunction("RPAD", string, length, padding);
}
public static Function rtrim(Object string) {
return new UnaryFunction("RTRIM", string);
}
// TODO: Should we refactor this into SUBSTRING(string FROM position)?
public static Function substring(Object string, Object position) {
return new ArbitraryFunction("SUBSTRING", string, position);
}
// TODO: Should we refactor this into SUBSTRING(string FROM position FOR length)?
public static Function substring(Object string, Object position, Object length) {
return new ArbitraryFunction("SUBSTRING", string, position, length);
}
// TODO: Add the TRIM(remove FROM string) variant (needs new implementation).
// TODO: Add the BOTH, LEADING and TRAILING options (needs new implementation).
public static Function trim(Object string) {
return new UnaryFunction("TRIM", string);
}
public static Function upper(Object string) {
return new UnaryFunction("UPPER", string);
}
// ----------------------------------------------------------------------
// DATE AND TIME FUNCTIONS
// ----------------------------------------------------------------------
// 0 Sunday 0-53 with a Sunday in this year
public static final Integer WEEK_MODE_0 = Integer.valueOf(0);
// 1 Monday 0-53 with 4 or more days this year
public static final Integer WEEK_MODE_1 = Integer.valueOf(1);
// 2 Sunday 1-53 with a Sunday in this year
public static final Integer WEEK_MODE_2 = Integer.valueOf(2);
// 3 Monday 1-53 with 4 or more days this year
public static final Integer WEEK_MODE_3 = Integer.valueOf(3);
// 4 Sunday 0-53 with 4 or more days this year
public static final Integer WEEK_MODE_4 = Integer.valueOf(4);
// 5 Monday 0-53 with a Monday in this year
public static final Integer WEEK_MODE_5 = Integer.valueOf(5);
// 6 Sunday 1-53 with 4 or more days this year
public static final Integer WEEK_MODE_6 = Integer.valueOf(6);
// 7 Monday 1-53 with a Monday in this year
public static final Integer WEEK_MODE_7 = Integer.valueOf(7);
public static enum Interval {
MICROSECOND("MICROSECOND"),
SECOND("SECOND"),
MINUTE("MINUTE"),
HOUR("HOUR"),
DAY("DAY"),
WEEK("WEEK"),
MONTH("MONTH"),
QUARTER("QUARTER"),
YEAR("YEAR"),
SECOND_MICROSECOND("SECOND_MICROSECOND"),
MINUTE_MICROSECOND("MINUTE_MICROSECOND"),
MINUTE_SECOND("MINUTE_SECOND"),
HOUR_MICROSECOND("HOUR_MICROSECOND"),
HOUR_SECOND("HOUR_SECOND"),
HOUR_MINUTE("HOUR_MINUTE"),
DAY_MICROSECOND("DAY_MICROSECOND"),
DAY_SECOND("DAY_SECOND"),
DAY_MINUTE("DAY_MINUTE"),
DAY_HOUR("DAY_HOUR"),
YEAR_MONTH("YEAR_MONTH");
private final String sql;
Interval(String sql) {
this.sql = sql;
}
}
public static Partial interval(Object value, Interval unit) {
final Partial v = Partial.wrap(value);
return new Partial() {
@Override
public void buildStatement(StringBuilder sb) {
sb.append("INTERVAL ");
v.buildStatement(sb);
sb.append(" ");
sb.append(unit.sql);
}
@Override
public int prepareStatement(PreparedStatement statement, int index) throws SQLException {
return v.prepareStatement(statement, index);
}
};
}
public static Function addDate(Object argument, Object interval) {
return new ArbitraryFunction("ADDDATE", argument, interval);
}
public static Function addTime(Object first, Object second) {
return new ArbitraryFunction("ADDTIME", first, second);
}
public static Function curDate() {
return new EmptyFunction("CURDATE");
}
public static Function curTime() {
return new EmptyFunction("CURTIME");
}
public static Function dateAdd(Object argument, Object interval) {
return new ArbitraryFunction("DATE_ADD", argument, interval);
}
public static Function dateFormat(Object date, Object format) {
return new ArbitraryFunction("DATE_FORMAT", date, format);
}
public static Function dateSub(Object argument, Object interval) {
return new ArbitraryFunction("DATE_SUB", argument, interval);
}
public static Function date(Object argument) {
return new UnaryFunction("DATE", argument);
}
public static Function dateDiff(Object first, Object second) {
return new ArbitraryFunction("DATEDIFF", first, second);
}
public static Function dayOfMonth(Object argument) {
return new UnaryFunction("DAYOFMONTH", argument);
}
public static Function dayOfWeek(Object argument) {
return new UnaryFunction("DAYOFWEEK", argument);
}
public static Function dayOfYear(Object argument) {
return new UnaryFunction("DAYOFYEAR", argument);
}
public static Function hour(Object argument) {
return new UnaryFunction("HOUR", argument);
}
public static Function lastDay(Object argument) {
return new UnaryFunction("LAST_DAY", argument);
}
public static Function makeDate(Object year, Object dayOfYear) {
return new ArbitraryFunction("MAKEDATE", year, dayOfYear);
}
public static Function makeTime(Object hour, Object minute, Object second) {
return new ArbitraryFunction("MAKETIME", hour, minute, second);
}
public static Function minute(Object argument) {
return new UnaryFunction("MINUTE", argument);
}
public static Function month(Object argument) {
return new UnaryFunction("MONTH", argument);
}
public static Function now() {
return new EmptyFunction("NOW");
}
public static Function periodAdd(Object period, Object months) {
return new ArbitraryFunction("PERIOD_ADD", period, months);
}
public static Function period_diff(Object first, Object second) {
return new ArbitraryFunction("PERIOD_DIFF", first, second);
}
public static Function secToTime(Object argument) {
return new UnaryFunction("SEC_TO_TIME", argument);
}
public static Function second(Object argument) {
return new UnaryFunction("SECOND", argument);
}
public static Function strToDate(Object argument, Object format) {
return new ArbitraryFunction("STR_TO_DATE", argument, format);
}
public static Function subDate(Object argument, Object interval) {
return new ArbitraryFunction("SUBDATE", argument, interval);
}
public static Function subTime(Object first, Object second) {
return new ArbitraryFunction("SUBTIME", first, second);
}
public static Function timeFormat(Object time, Object format) {
return new ArbitraryFunction("TIME_FORMAT", time, format);
}
public static Function timeToSec(Object argument) {
return new UnaryFunction("TIME_TO_SEC", argument);
}
public static Function time(Object argument) {
return new UnaryFunction("TIME", argument);
}
public static Function timeDiff(Object first, Object second) {
return new ArbitraryFunction("TIMEDIFF", first, second);
}
public static Function toDays(Object argument) {
return new UnaryFunction("TO_DAYS", argument);
}
public static Function utcDate() {
return new EmptyFunction("UTC_DATE");
}
public static Function utcTime() {
return new EmptyFunction("UTC_TIME");
}
public static Function utcTimestamp() {
return new EmptyFunction("UTC_TIMESTAMP");
}
public static Function week(Object date) {
return new UnaryFunction("WEEK", date);
}
public static Function week(Object date, Object mode) {
return new ArbitraryFunction("WEEK", date, mode);
}
public static Function weekDay(Object argument) {
return new UnaryFunction("WEEKDAY", argument);
}
public static Function weekOfYear(Object argument) {
return new UnaryFunction("WEEKOFYEAR", argument);
}
public static Function year(Object argument) {
return new UnaryFunction("YEAR", argument);
}
public static Function yearWeek(Object argument) {
return new UnaryFunction("YEARWEEK", argument);
}
public static Function yearWeek(Object argument, Object mode) {
return new ArbitraryFunction("YEARWEEK", argument, mode);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.prometheus.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents the properties of an alert manager definition.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/amp-2020-08-01/AlertManagerDefinitionDescription"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AlertManagerDefinitionDescription implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The time when the alert manager definition was created.
* </p>
*/
private java.util.Date createdAt;
/**
* <p>
* The alert manager definition.
* </p>
*/
private java.nio.ByteBuffer data;
/**
* <p>
* The time when the alert manager definition was modified.
* </p>
*/
private java.util.Date modifiedAt;
/**
* <p>
* The status of alert manager definition.
* </p>
*/
private AlertManagerDefinitionStatus status;
/**
* <p>
* The time when the alert manager definition was created.
* </p>
*
* @param createdAt
* The time when the alert manager definition was created.
*/
public void setCreatedAt(java.util.Date createdAt) {
this.createdAt = createdAt;
}
/**
* <p>
* The time when the alert manager definition was created.
* </p>
*
* @return The time when the alert manager definition was created.
*/
public java.util.Date getCreatedAt() {
return this.createdAt;
}
/**
* <p>
* The time when the alert manager definition was created.
* </p>
*
* @param createdAt
* The time when the alert manager definition was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AlertManagerDefinitionDescription withCreatedAt(java.util.Date createdAt) {
setCreatedAt(createdAt);
return this;
}
/**
* <p>
* The alert manager definition.
* </p>
* <p>
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service.
* Users of the SDK should not perform Base64 encoding on this field.
* </p>
* <p>
* Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will
* be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or
* ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future
* major version of the SDK.
* </p>
*
* @param data
* The alert manager definition.
*/
public void setData(java.nio.ByteBuffer data) {
this.data = data;
}
/**
* <p>
* The alert manager definition.
* </p>
* <p>
* {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend
* using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent
* {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}.
* Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the
* {@code position}.
* </p>
*
* @return The alert manager definition.
*/
public java.nio.ByteBuffer getData() {
return this.data;
}
/**
* <p>
* The alert manager definition.
* </p>
* <p>
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service.
* Users of the SDK should not perform Base64 encoding on this field.
* </p>
* <p>
* Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will
* be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or
* ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future
* major version of the SDK.
* </p>
*
* @param data
* The alert manager definition.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AlertManagerDefinitionDescription withData(java.nio.ByteBuffer data) {
setData(data);
return this;
}
/**
* <p>
* The time when the alert manager definition was modified.
* </p>
*
* @param modifiedAt
* The time when the alert manager definition was modified.
*/
public void setModifiedAt(java.util.Date modifiedAt) {
this.modifiedAt = modifiedAt;
}
/**
* <p>
* The time when the alert manager definition was modified.
* </p>
*
* @return The time when the alert manager definition was modified.
*/
public java.util.Date getModifiedAt() {
return this.modifiedAt;
}
/**
* <p>
* The time when the alert manager definition was modified.
* </p>
*
* @param modifiedAt
* The time when the alert manager definition was modified.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AlertManagerDefinitionDescription withModifiedAt(java.util.Date modifiedAt) {
setModifiedAt(modifiedAt);
return this;
}
/**
* <p>
* The status of alert manager definition.
* </p>
*
* @param status
* The status of alert manager definition.
*/
public void setStatus(AlertManagerDefinitionStatus status) {
this.status = status;
}
/**
* <p>
* The status of alert manager definition.
* </p>
*
* @return The status of alert manager definition.
*/
public AlertManagerDefinitionStatus getStatus() {
return this.status;
}
/**
* <p>
* The status of alert manager definition.
* </p>
*
* @param status
* The status of alert manager definition.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AlertManagerDefinitionDescription withStatus(AlertManagerDefinitionStatus status) {
setStatus(status);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCreatedAt() != null)
sb.append("CreatedAt: ").append(getCreatedAt()).append(",");
if (getData() != null)
sb.append("Data: ").append(getData()).append(",");
if (getModifiedAt() != null)
sb.append("ModifiedAt: ").append(getModifiedAt()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AlertManagerDefinitionDescription == false)
return false;
AlertManagerDefinitionDescription other = (AlertManagerDefinitionDescription) obj;
if (other.getCreatedAt() == null ^ this.getCreatedAt() == null)
return false;
if (other.getCreatedAt() != null && other.getCreatedAt().equals(this.getCreatedAt()) == false)
return false;
if (other.getData() == null ^ this.getData() == null)
return false;
if (other.getData() != null && other.getData().equals(this.getData()) == false)
return false;
if (other.getModifiedAt() == null ^ this.getModifiedAt() == null)
return false;
if (other.getModifiedAt() != null && other.getModifiedAt().equals(this.getModifiedAt()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCreatedAt() == null) ? 0 : getCreatedAt().hashCode());
hashCode = prime * hashCode + ((getData() == null) ? 0 : getData().hashCode());
hashCode = prime * hashCode + ((getModifiedAt() == null) ? 0 : getModifiedAt().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
return hashCode;
}
@Override
public AlertManagerDefinitionDescription clone() {
try {
return (AlertManagerDefinitionDescription) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.prometheus.model.transform.AlertManagerDefinitionDescriptionMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
$Id$
Copyright (C) 2003-2010 Virginia Tech.
All rights reserved.
SEE LICENSE FOR MORE INFORMATION
Author: Middleware Services
Email: middleware@vt.edu
Version: $Revision$
Updated: $Date$
*/
package edu.vt.middleware.dictionary;
import java.io.FileReader;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import edu.vt.middleware.dictionary.sort.ArraysSort;
/**
* <code>TernaryTreeDictionary</code> provides fast searching for dictionary
* words using a ternary tree. The entire dictionary is stored in memory, so
* heap size may need to be adjusted to accommodate large dictionaries. It is
* highly recommended that sorted word lists be inserted using their median.
* This helps to produce a balanced ternary tree which improves search time.
* This class inherits the lower case property of the supplied word list.
*
* @author Middleware Services
* @version $Revision$ $Date$
*/
public class TernaryTreeDictionary implements Dictionary
{
/** Ternary tree used for searching. */
protected TernaryTree tree;
/**
* Creates a new balanced tree dictionary from the given {@link WordList}.
* This constructor creates a balanced tree by inserting from the median of
* the word list, which may require additional work depending on the {@link
* WordList} implementation.
*
* <p><strong>NOTE</strong> While using an unsorted word list produces correct
* results, it may dramatically reduce search efficiency. Using a sorted word
* list is recommended.</p>
*
* @param wordList List of words used to back the dictionary. This list is
* used exclusively to initialize the internal {@link TernaryTree} used by the
* dictionary, and may be safely discarded after dictionary creation.
*/
public TernaryTreeDictionary(final WordList wordList)
{
this(wordList, true);
}
/**
* Creates a new dictionary instance from the given {@link WordList}.
*
* @param wordList List of words used to back the dictionary. This list is
* used exclusively to initialize the internal {@link TernaryTree} used by the
* dictionary, and may be safely discarded after dictionary creation.
*
* <p><strong>NOTE</strong> While using an unsorted word list produces correct
* results, it may dramatically reduce search efficiency. Using a sorted word
* list is recommended.</p>
* @param useMedian Set to true to force creation of a balanced tree by
* inserting into the tree from the median of the {@link WordList} outward.
* Depending on the word list implementation, this may require additional work
* to access the median element on each insert.
*/
public TernaryTreeDictionary(final WordList wordList, final boolean useMedian)
{
// Respect case sensitivity of word list in ternary tree
if (wordList.getComparator().compare("A", "a") == 0) {
this.tree = new TernaryTree(false);
} else {
this.tree = new TernaryTree(true);
}
final Iterator<String> iterator;
if (useMedian) {
iterator = wordList.medianIterator();
} else {
iterator = wordList.iterator();
}
while (iterator.hasNext()) {
this.tree.insert(iterator.next());
}
}
/**
* Creates a dictionary that uses the given ternary tree for dictionary
* searches.
*
* @param tt Ternary tree used to back dictionary.
*/
public TernaryTreeDictionary(final TernaryTree tt)
{
this.tree = tt;
}
/** {@inheritDoc} */
public boolean search(final String word)
{
return this.tree.search(word);
}
/**
* This will return an array of strings which partially match the supplied
* word. This search is case sensitive by default. See {@link
* TernaryTree#partialSearch}.
*
* @param word <code>String</code> to search for
*
* @return <code>String[]</code> - of matching words
*/
public String[] partialSearch(final String word)
{
return this.tree.partialSearch(word);
}
/**
* This will return an array of strings which are near to the supplied word by
* the supplied distance. This search is case sensitive by default. See {@link
* TernaryTree#nearSearch}.
*
* @param word <code>String</code> to search for
* @param distance <code>int</code> for valid match
*
* @return <code>String[]</code> - of matching words
*/
public String[] nearSearch(final String word, final int distance)
{
return this.tree.nearSearch(word, distance);
}
/**
* Returns the underlying ternary tree used by this dictionary.
*
* @return <code>TernaryTree</code>
*/
public TernaryTree getTernaryTree()
{
return this.tree;
}
/**
* This provides command line access to a <code>TernaryTreeDictionary</code>.
*
* @param args <code>String[]</code>
*
* @throws Exception if an error occurs
*/
public static void main(final String[] args)
throws Exception
{
final List<FileReader> files = new ArrayList<FileReader>();
try {
if (args.length == 0) {
throw new ArrayIndexOutOfBoundsException();
}
// dictionary operations
boolean useMedian = false;
boolean caseSensitive = true;
boolean search = false;
boolean partialSearch = false;
boolean nearSearch = false;
boolean print = false;
// operation parameters
String word = null;
int distance = 0;
for (int i = 0; i < args.length; i++) {
if ("-m".equals(args[i])) {
useMedian = true;
} else if ("-ci".equals(args[i])) {
caseSensitive = false;
} else if ("-s".equals(args[i])) {
search = true;
word = args[++i];
} else if ("-ps".equals(args[i])) {
partialSearch = true;
word = args[++i];
} else if ("-ns".equals(args[i])) {
nearSearch = true;
word = args[++i];
distance = Integer.parseInt(args[++i]);
} else if ("-p".equals(args[i])) {
print = true;
} else if ("-h".equals(args[i])) {
throw new ArrayIndexOutOfBoundsException();
} else {
files.add(new FileReader(args[i]));
}
}
// insert data
final ArrayWordList awl = WordLists.createFromReader(
files.toArray(new FileReader[files.size()]),
caseSensitive,
new ArraysSort());
final TernaryTreeDictionary dict = new TernaryTreeDictionary(
awl,
useMedian);
// perform operation
if (search) {
if (dict.search(word)) {
System.out.println(
String.format("%s was found in this dictionary", word));
} else {
System.out.println(
String.format("%s was not found in this dictionary", word));
}
} else if (partialSearch) {
final String[] matches = dict.partialSearch(word);
System.out.println(
String.format(
"Found %s matches for %s in this dictionary : %s",
matches.length,
word,
Arrays.asList(matches)));
} else if (nearSearch) {
final String[] matches = dict.nearSearch(word, distance);
System.out.println(
String.format(
"Found %s matches for %s in this dictionary at a distance of %s " +
": %s",
matches.length,
word,
distance,
Arrays.asList(matches)));
} else if (print) {
dict.getTernaryTree().print(new PrintWriter(System.out, true));
} else {
throw new ArrayIndexOutOfBoundsException();
}
} catch (ArrayIndexOutOfBoundsException e) {
System.out.println(
"Usage: java " + TernaryTreeDictionary.class.getName() + " \\");
System.out.println(
" <dictionary1> <dictionary2> ... " +
"<options> <operation> \\");
System.out.println("");
System.out.println("where <options> includes:");
System.out.println(" -m (Insert dictionary using it's median) \\");
System.out.println(" -ci (Make search case-insensitive) \\");
System.out.println("");
System.out.println("where <operation> includes:");
System.out.println(" -s <word> (Search for a word) \\");
System.out.println(" -ps <word> (Partial search for a word) \\");
System.out.println(" (where word like '.a.a.a') \\");
System.out.println(
" -ns <word> <distance> " +
"(Near search for a word) \\");
System.out.println(
" -p (Print the entire dictionary " + "in tree form) \\");
System.out.println(" -h (Print this message) \\");
System.exit(1);
}
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.ios;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import cli.MonoTouch.Foundation.NSUrl;
import cli.MonoTouch.UIKit.UIApplication;
import cli.System.IO.Stream;
import cli.System.IO.StreamReader;
import cli.System.Net.HttpWebRequest;
import cli.System.Net.HttpWebResponse;
import cli.System.Net.WebHeaderCollection;
import cli.System.Net.WebRequest;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Net;
import com.badlogic.gdx.net.HttpStatus;
import com.badlogic.gdx.net.ServerSocket;
import com.badlogic.gdx.net.ServerSocketHints;
import com.badlogic.gdx.net.Socket;
import com.badlogic.gdx.net.SocketHints;
import com.badlogic.gdx.utils.StreamUtils;
public class IOSNet implements Net {
public static class InputStreamNetStreamImpl extends InputStream {
private final Stream stream;
public InputStreamNetStreamImpl (Stream stream) {
this.stream = stream;
}
@Override
public int read () throws IOException {
return stream.ReadByte();
}
@Override
public int read (byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read (byte[] b, int off, int len) throws IOException {
return stream.Read(b, off, len);
}
}
public static class OutputStreamNetStreamImpl extends OutputStream {
private Stream stream;
public OutputStreamNetStreamImpl (Stream stream) {
this.stream = stream;
}
@Override
public void write (int b) throws IOException {
// should be the first 8bits of the 32bits int.
stream.WriteByte((byte)b);
}
@Override
public void write (byte[] b) throws IOException {
write(b, 0, b.length);
}
@Override
public void write (byte[] b, int off, int len) throws IOException {
stream.Write(b, off, len);
}
}
static class IosHttpResponse implements HttpResponse {
private HttpWebResponse webResponse;
public IosHttpResponse (HttpWebResponse webResponse) {
this.webResponse = webResponse;
}
@Override
public HttpStatus getStatus () {
return new HttpStatus(webResponse.get_StatusCode().Value);
}
@Override
public String getResultAsString () {
StreamReader reader = new StreamReader(webResponse.GetResponseStream());
return reader.ReadToEnd();
}
@Override
public InputStream getResultAsStream () {
return new InputStreamNetStreamImpl(webResponse.GetResponseStream());
}
@Override
public byte[] getResult () {
int length = (int)webResponse.get_ContentLength();
byte[] result = new byte[length];
webResponse.GetResponseStream().Read(result, 0, length);
return result;
}
@Override
public String getHeader (String name) {
return webResponse.get_Headers().Get(name);
}
@Override
public Map<String, List<String>> getHeaders () {
WebHeaderCollection responseHeaders = webResponse.get_Headers();
Map<String, List<String>> headers = new HashMap<String, List<String>>();
for (int i = 0, j = responseHeaders.get_Count(); i < j; i++) {
String headerName = responseHeaders.GetKey(i);
List<String> headerValues = headers.get(headerName);
if (headerValues == null) {
headerValues = new ArrayList<String>();
headers.put(headerName, headerValues);
}
String[] responseHeaderValues = responseHeaders.GetValues(i);
for (int k = 0; k < responseHeaderValues.length; k++) {
headerValues.add(responseHeaderValues[k]);
}
}
return headers;
}
}
final UIApplication uiApp;
final ExecutorService executorService;
public IOSNet (IOSApplication app) {
uiApp = app.uiApp;
executorService = Executors.newCachedThreadPool();
}
@Override
public void sendHttpRequest (final HttpRequest httpRequest, final HttpResponseListener httpResultListener) {
Future<?> processHttpRequestFuture = executorService.submit(new Runnable() {
@Override
public void run () {
try {
String url = httpRequest.getUrl();
String method = httpRequest.getMethod();
if (method.equalsIgnoreCase(HttpMethods.GET)) {
String value = httpRequest.getContent();
if (value != null && !"".equals(value)) url += "?" + value;
}
HttpWebRequest httpWebRequest = (HttpWebRequest)WebRequest.Create(url);
int timeOut = httpRequest.getTimeOut();
if (timeOut > 0)
httpWebRequest.set_Timeout(timeOut);
else
httpWebRequest.set_Timeout(-1); // the value of the Infinite constant (see
// http://msdn.microsoft.com/en-us/library/system.threading.timeout.infinite.aspx)
httpWebRequest.set_Method(method);
Map<String, String> headers = httpRequest.getHeaders();
WebHeaderCollection webHeaderCollection = new WebHeaderCollection();
for (String key : headers.keySet())
webHeaderCollection.Add(key, headers.get(key));
httpWebRequest.set_Headers(webHeaderCollection);
if (method.equalsIgnoreCase(HttpMethods.POST) || method.equalsIgnoreCase(HttpMethods.PUT)) {
InputStream contentAsStream = httpRequest.getContentStream();
String contentAsString = httpRequest.getContent();
if (contentAsStream != null) {
httpWebRequest.set_ContentLength(contentAsStream.available());
Stream stream = httpWebRequest.GetRequestStream();
StreamUtils.copyStream(contentAsStream, new OutputStreamNetStreamImpl(stream));
stream.Close();
} else if (contentAsString != null) {
byte[] data = contentAsString.getBytes();
httpWebRequest.set_ContentLength(data.length);
Stream stream = httpWebRequest.GetRequestStream();
stream.Write(data, 0, data.length);
stream.Close();
}
}
final HttpWebResponse httpWebResponse = (HttpWebResponse)httpWebRequest.GetResponse();
httpResultListener.handleHttpResponse(new IosHttpResponse(httpWebResponse));
} catch (final Exception e) {
httpResultListener.failed(e);
}
}
});
}
@Override
public ServerSocket newServerSocket (Protocol protocol, int port, ServerSocketHints hints) {
return new IOSServerSocket(protocol, port, hints);
}
@Override
public Socket newClientSocket (Protocol protocol, String host, int port, SocketHints hints) {
return new IOSSocket(protocol, host, port, hints);
}
@Override
public void openURI (String URI) {
uiApp.OpenUrl(new NSUrl(URI));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.