gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package opencrypto.test;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.Security;
import java.util.Arrays;
import java.util.Random;
import javacard.framework.ISO7816;
import javax.smartcardio.ResponseAPDU;
import org.bouncycastle.jce.ECNamedCurveTable;
import org.bouncycastle.jce.interfaces.ECPublicKey;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.jce.spec.ECParameterSpec;
import org.bouncycastle.math.ec.ECCurve;
import org.bouncycastle.math.ec.ECPoint;
/**
*
* @author Vasilios Mavroudis and Petr Svenda
*/
public class Util {
public static String toHex(byte[] bytes) {
return toHex(bytes, 0, bytes.length);
}
public static String toHex(byte[] bytes, int offset, int len) {
// StringBuilder buff = new StringBuilder();
String result = "";
for (int i = offset; i < offset + len; i++) {
result += String.format("%02X", bytes[i]);
}
return result;
}
public static String bytesToHex(byte[] bytes) {
char[] hexArray = "0123456789ABCDEF".toCharArray();
char[] hexChars = new char[bytes.length * 2];
for (int j = 0; j < bytes.length; j++) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
/* Utils */
public static short getShort(byte[] buffer, int offset) {
return ByteBuffer.wrap(buffer, offset, 2).order(ByteOrder.BIG_ENDIAN).getShort();
}
public static short readShort(byte[] data, int offset) {
return (short) (((data[offset] << 8)) | ((data[offset + 1] & 0xff)));
}
public static byte[] shortToByteArray(int s) {
return new byte[]{(byte) ((s & 0xFF00) >> 8), (byte) (s & 0x00FF)};
}
public static byte[] joinArray(byte[]... arrays) {
int length = 0;
for (byte[] array : arrays) {
length += array.length;
}
final byte[] result = new byte[length];
int offset = 0;
for (byte[] array : arrays) {
System.arraycopy(array, 0, result, offset, array.length);
offset += array.length;
}
return result;
}
public static byte[] trimLeadingZeroes(byte[] array) {
short startOffset = 0;
for (int i = 0; i < array.length; i++) {
if (array[i] != 0) {
break;
} else {
// still zero
startOffset++;
}
}
byte[] result = new byte[array.length - startOffset];
System.arraycopy(array, startOffset, result, 0, array.length - startOffset);
return result;
}
public static byte[] concat(byte[] a, byte[] b) {
int aLen = a.length;
int bLen = b.length;
byte[] c = new byte[aLen + bLen];
System.arraycopy(a, 0, c, 0, aLen);
System.arraycopy(b, 0, c, aLen, bLen);
return c;
}
public static byte[] concat(byte[] a, byte[] b, byte[] c) {
byte[] tmp_conc = concat(a, b);
return concat(tmp_conc, c);
}
public static ECPoint randECPoint() throws Exception {
Security.addProvider(new BouncyCastleProvider());
ECParameterSpec ecSpec_named = ECNamedCurveTable.getParameterSpec("secp256r1"); // NIST P-256
KeyPairGenerator kpg = KeyPairGenerator.getInstance("ECDSA", "BC");
kpg.initialize(ecSpec_named);
KeyPair apair = kpg.generateKeyPair();
ECPublicKey apub = (ECPublicKey) apair.getPublic();
return apub.getQ();
}
public static byte[] IntToBytes(int val) {
byte[] data = new byte[5];
if (val < 0) {
data[0] = 0x01;
} else {
data[0] = 0x00;
}
int unsigned = Math.abs(val);
data[1] = (byte) (unsigned >>> 24);
data[2] = (byte) (unsigned >>> 16);
data[3] = (byte) (unsigned >>> 8);
data[4] = (byte) unsigned;
return data;
}
public static int BytesToInt(byte[] data) {
int val = (data[1] << 24)
| ((data[2] & 0xFF) << 16)
| ((data[3] & 0xFF) << 8)
| (data[4] & 0xFF);
if (data[0] == 0x01) {
val = val * -1;
}
return val;
}
private static boolean checkSW(ResponseAPDU response) {
if (response.getSW() != (ISO7816.SW_NO_ERROR & 0xffff)) {
System.err.printf("Received error status: %02X.\n",
response.getSW());
return false;
}
return true;
}
public static byte[] hexStringToByteArray(String s) {
String sanitized = s.replace(" ", "");
byte[] b = new byte[sanitized.length() / 2];
for (int i = 0; i < b.length; i++) {
int index = i * 2;
int v = Integer.parseInt(sanitized.substring(index, index + 2), 16);
b[i] = (byte) v;
}
return b;
}
/**
* *Math Stuff**
*/
public static BigInteger randomBigNat(int maxNumBitLength) {
Random rnd = new Random();
BigInteger aRandomBigInt;
while (true) {
do {
aRandomBigInt = new BigInteger(maxNumBitLength, rnd);
} while (aRandomBigInt.compareTo(new BigInteger("1")) < 1);
if ((Util.trimLeadingZeroes(aRandomBigInt.toByteArray()).length != maxNumBitLength / 8) || (aRandomBigInt.toByteArray()).length != maxNumBitLength / 8) {
// After serialization, number is longer or shorter - generate new one
} else {
// We have proper number
return aRandomBigInt;
}
}
}
public static byte[] SerializeBigInteger(BigInteger BigInt) {
int bnlen = BigInt.bitLength() / 8;
byte[] large_int_b = new byte[bnlen];
Arrays.fill(large_int_b, (byte) 0);
int int_len = BigInt.toByteArray().length;
if (int_len == bnlen) {
large_int_b = BigInt.toByteArray();
} else if (int_len > bnlen) {
large_int_b = Arrays.copyOfRange(BigInt.toByteArray(), int_len
- bnlen, int_len);
} else if (int_len < bnlen) {
System.arraycopy(BigInt.toByteArray(), 0, large_int_b,
large_int_b.length - int_len, int_len);
}
return large_int_b;
}
public static long pow_mod(long x, long n, long p) {
if (n == 0) {
return 1;
}
if ((n & 1) == 1) {
return (pow_mod(x, n - 1, p) * x) % p;
}
x = pow_mod(x, n / 2, p);
return (x * x) % p;
}
/* Takes as input an odd prime p and n < p and returns r
* such that r * r = n [mod p]. */
public static BigInteger tonelli_shanks(BigInteger n, BigInteger p) {
//1. By factoring out powers of 2, find Q and S such that p-1=Q2^S p-1=Q*2^S and Q is odd
BigInteger p_1 = p.subtract(BigInteger.ONE);
BigInteger S = BigInteger.ZERO;
BigInteger Q = p_1;
BigInteger two = BigInteger.valueOf(2);
while (Q.mod(two).compareTo(BigInteger.ONE) != 0) { //while Q is not odd
Q = Q.divide(two);
//Q = p_1.divide(two.modPow(S, p));
S = S.add(BigInteger.ONE);
}
//2. Find the first quadratic non-residue z by brute-force search
BigInteger z = BigInteger.ONE;
while (z.modPow(p_1.divide(BigInteger.valueOf(2)), p).compareTo(p_1) != 0) {
z = z.add(BigInteger.ONE);
}
System.out.println("n (y^2) : " + Util.bytesToHex(n.toByteArray()));
System.out.println("Q : " + Util.bytesToHex(Q.toByteArray()));
System.out.println("S : " + Util.bytesToHex(S.toByteArray()));
BigInteger R = n.modPow(Q.add(BigInteger.ONE).divide(BigInteger.valueOf(2)), p);
BigInteger c = z.modPow(Q, p);
BigInteger t = n.modPow(Q, p);
BigInteger M = S;
while (t.compareTo(BigInteger.ONE) != 0) {
BigInteger tt = t;
BigInteger i = BigInteger.ZERO;
while (tt.compareTo(BigInteger.ONE) != 0) {
System.out.println("t : " + tt.toString());
tt = tt.multiply(tt).mod(p);
i = i.add(BigInteger.ONE);
//if (i.compareTo(m)==0) return BigInteger.ZERO;
}
BigInteger M_i_1 = M.subtract(i).subtract(BigInteger.ONE);
System.out.println("M : " + M.toString());
System.out.println("i : " + i.toString());
System.out.println("M_i_1: " + M_i_1.toString());
System.out.println("===================");
BigInteger b = c.modPow(two.modPow(M_i_1, p_1), p);
BigInteger b2 = b.multiply(b).mod(p);
R = R.multiply(b).mod(p);
c = b2;
t = t.multiply(b2).mod(p);
M = i;
}
if (R.multiply(R).mod(p).compareTo(n) == 0) {
return R;
} else {
return BigInteger.ZERO;
}
}
/* Takes as input an odd prime p and n < p and returns r
* such that r * r = n [mod p]. */
public static BigInteger tonellishanks(BigInteger n, BigInteger p) {
//1. By factoring out powers of 2, find Q and S such that p-1=Q2^S p-1=Q*2^S and Q is odd
BigInteger p_1 = p.subtract(BigInteger.ONE);
BigInteger S = BigInteger.ZERO;
BigInteger Q = p_1;
BigInteger two = BigInteger.valueOf(2);
System.out.println("p : " + Util.bytesToHex(p.toByteArray()));
System.out.println("p is prime: " + p.isProbablePrime(10));
System.out.println("n : " + Util.bytesToHex(n.toByteArray()));
System.out.println("Q : " + Util.bytesToHex(Q.toByteArray()));
System.out.println("S : " + Util.bytesToHex(S.toByteArray()));
while (Q.mod(two).compareTo(BigInteger.ONE) != 0) { //while Q is not odd
Q = p_1.divide(two.modPow(S, p));
S = S.add(BigInteger.ONE);
//System.out.println("Iter n: " + bytesToHex(n.toByteArray()));
//System.out.println("Iter Q: " + bytesToHex(Q.toByteArray()));
//System.out.println("Iter S: " + bytesToHex(S.toByteArray()));
}
//System.out.println("n: " + bytesToHex(n.toByteArray()));
//System.out.println("Q: " + bytesToHex(Q.toByteArray()));
//System.out.println("S: " + bytesToHex(S.toByteArray()));
return n;
}
private static ECPoint ECPointDeSerialization(byte[] serialized_point,
int offset, int pointLength, ECCurve curve) {
byte[] x_b = new byte[pointLength / 2];
byte[] y_b = new byte[pointLength / 2];
// System.out.println("Serialized Point: " + toHex(serialized_point));
// src -- This is the source array.
// srcPos -- This is the starting position in the source array.
// dest -- This is the destination array.
// destPos -- This is the starting position in the destination data.
// length -- This is the number of array elements to be copied.
System.arraycopy(serialized_point, offset + 1, x_b, 0, pointLength / 2);
BigInteger x = new BigInteger(bytesToHex(x_b), 16);
// System.out.println("X:" + toHex(x_b));
System.arraycopy(serialized_point, offset + (pointLength / 2 + 1), y_b, 0, pointLength / 2);
BigInteger y = new BigInteger(bytesToHex(y_b), 16);
// System.out.println("Y:" + toHex(y_b));
ECPoint point = curve.createPoint(x, y);
return point;
}
}
| |
/*
* Copyright (C) 2019 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.iot.m2m.base;
import com.google.common.util.concurrent.ListenableFuture;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.net.URI;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
/**
* These test cases cover the default method implementations.
*/
public class TechnologyTest {
volatile URI nativeUri;
volatile boolean isHosted;
Thing fe = new Thing() {
@Override
public boolean isLocal() {
return false;
}
@Override
public <T> ListenableFuture<?> setProperty(PropertyKey<T> key, @Nullable T value, Modifier... modifiers) {
return null;
}
@Override
public <T extends Number> ListenableFuture<?> incrementProperty(PropertyKey<T> key, T value, Modifier... modifiers) {
return null;
}
@Override
public <T> ListenableFuture<?> insertValueIntoProperty(PropertyKey<T[]> key, T value, Modifier... modifiers) {
return null;
}
@Override
public <T> ListenableFuture<?> removeValueFromProperty(PropertyKey<T[]> key, T value, Modifier... modifiers) {
return null;
}
@Override
public ListenableFuture<?> toggleProperty(PropertyKey<Boolean> key, Modifier... modifiers) {
return null;
}
@Override
public <T> ListenableFuture<T> fetchProperty(PropertyKey<T> key, Modifier... modifiers) {
return null;
}
@Override
public <T> @Nullable T getCachedProperty(PropertyKey<T> key) {
return null;
}
@Override
public ListenableFuture<Set<PropertyKey<?>>> fetchSupportedPropertyKeys() {
return null;
}
@Override
public ListenableFuture<Map<String, Object>> fetchSection(Section section, Modifier... mods) {
return null;
}
@Override
public Map<String, Object> copyCachedSection(Section section) {
return null;
}
@Override
public ListenableFuture<?> applyProperties(Map<String, Object> properties) {
return null;
}
@Override
public ListenableFuture<Boolean> delete() {
return null;
}
@Override
public <T> ListenableFuture<T> invokeMethod(MethodKey<T> methodKey, Map<String, Object> arguments) {
return null;
}
@Override
public ListenableFuture<Collection<Thing>> fetchChildrenForTrait(String traitShortId) {
return null;
}
@Override
public @Nullable String getTraitForChild(Thing child) {
return null;
}
@Override
public @Nullable String getIdForChild(Thing child) {
return null;
}
@Override
public @Nullable Thing getChild(String traitShortId, String childId) {
return null;
}
@Override
public @Nullable Thing getParentThing() {
return null;
}
@Override
public <T> void registerPropertyListener(Executor executor, PropertyKey<T> key, PropertyListener<T> listener) {
}
@Override
public <T> void unregisterPropertyListener(PropertyKey<T> key, PropertyListener<T> listener) {
}
@Override
public void registerSectionListener(Executor executor, Section section, SectionListener listener) {
}
@Override
public void unregisterSectionListener(SectionListener listener) {
}
@Override
public void registerChildListener(Executor executor, ChildListener listener, String traitId) {
}
@Override
public void unregisterChildListener(ChildListener listener, String traitId) {
}
@Override
public void unregisterAllListeners() {
}
};
Technology technology = new Technology() {
@Override
public void prepareToHost() throws IOException, TechnologyCannotHostException {
}
@Override
public void host(Thing fe) throws UnacceptableThingException, TechnologyCannotHostException {
}
@Override
public void unhost(Thing fe) {
}
@Override
public URI getNativeUriForProperty(Thing fe, PropertyKey<?> propertyKey, Operation op, Modifier... modifiers) throws UnassociatedResourceException {
return null;
}
@Override
public URI getNativeUriForSection(Thing fe, Section section, Modifier... modifiers) throws UnassociatedResourceException {
return null;
}
@Override
public DiscoveryBuilder createDiscoveryQueryBuilder() {
return null;
}
@Override
public Thing getThingForNativeUri(URI uri) throws UnknownResourceException {
return null;
}
@Override
public URI getNativeUriForThing(Thing fe) throws UnassociatedResourceException {
return nativeUri;
}
@Override
public boolean isHosted(Thing fe) {
return isHosted;
}
};
@Test
public void getRelativeUriForThing() throws Exception {
nativeUri = URI.create("coap://1.2.3.4/1/");
isHosted = false;
assertEquals(
URI.create("/2/?blah#frag"),
technology.getRelativeUriForThing(fe,
URI.create("coap://1.2.3.4/2/?blah#frag"))
);
assertEquals(
URI.create("coap://4.5.6.7/2/?blah#frag"),
technology.getRelativeUriForThing(fe,
URI.create("coap://4.5.6.7/2/?blah#frag"))
);
assertEquals(
URI.create("uid://abcdefg/10/"),
technology.getRelativeUriForThing(fe,
URI.create("uid://abcdefg/10/"))
);
assertThrows(UnassociatedResourceException.class,
()->technology.getRelativeUriForThing(fe,
URI.create("/2/?blah#frag")));
assertEquals(
URI.create("/"),
technology.getRelativeUriForThing(fe,
URI.create("coap://1.2.3.4/"))
);
assertEquals(
URI.create(""),
technology.getRelativeUriForThing(fe,
URI.create("coap://1.2.3.4"))
);
assertEquals(
URI.create("coap://4.5.6.7"),
technology.getRelativeUriForThing(fe,
URI.create("coap://4.5.6.7"))
);
nativeUri = URI.create("/1/");
isHosted = true;
assertEquals(
URI.create("coap://1.2.3.4/2/?blah#frag"),
technology.getRelativeUriForThing(fe,
URI.create("coap://1.2.3.4/2/?blah#frag"))
);
assertEquals(
URI.create("uid://abcdefg/10/"),
technology.getRelativeUriForThing(fe,
URI.create("uid://abcdefg/10/"))
);
assertEquals(
URI.create("/2/?blah#frag"),
technology.getRelativeUriForThing(fe,
URI.create("/2/?blah#frag"))
);
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pink.madis.apk.arsc;
import com.google.auto.value.AutoValue;
import com.google.common.base.Preconditions;
import com.google.common.io.LittleEndianDataOutputStream;
import com.google.common.primitives.UnsignedBytes;
import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.TreeMap;
import javax.annotation.Nullable;
/**
* Represents a type chunk, which contains the resource values for a specific resource type and
* configuration in a {@link PackageChunk}. The resource values in this chunk correspond to
* the array of type strings in the enclosing {@link PackageChunk}.
*
* <p>A {@link PackageChunk} can have multiple of these chunks for different
* (configuration, resource type) combinations.
*/
public final class TypeChunk extends Chunk {
/** The type identifier of the resource type this chunk is holding. */
private final int id;
/** The number of resources of this type at creation time. */
private final int entryCount;
/** The offset (from {@code offset}) in the original buffer where {@code entries} start. */
private final int entriesStart;
/** The resource configuration that these resource entries correspond to. */
private ResourceConfiguration configuration;
/** A sparse list of resource entries defined by this chunk. */
private final Map<Integer, Entry> entries = new TreeMap<>();
protected TypeChunk(ByteBuffer buffer, @Nullable Chunk parent) {
super(buffer, parent);
id = UnsignedBytes.toInt(buffer.get());
buffer.position(buffer.position() + 3); // Skip 3 bytes for packing
entryCount = buffer.getInt();
entriesStart = buffer.getInt();
configuration = ResourceConfiguration.create(buffer);
}
@Override
protected void init(ByteBuffer buffer) {
int offset = this.offset + entriesStart;
for (int i = 0; i < entryCount; ++i) {
Entry entry = Entry.create(buffer, offset, this);
if (entry != null) {
entries.put(i, entry);
}
}
}
/** Returns the (1-based) type id of the resource types that this {@link TypeChunk} is holding. */
public int getId() {
return id;
}
/** Returns the name of the type this chunk represents (e.g. string, attr, id). */
public String getTypeName() {
PackageChunk packageChunk = getPackageChunk();
Preconditions.checkNotNull(packageChunk, "%s has no parent package.", getClass());
StringPoolChunk typePool = packageChunk.getTypeStringPool();
Preconditions.checkNotNull(typePool, "%s's parent package has no type pool.", getClass());
return typePool.getString(getId() - 1); // - 1 here to convert to 0-based index
}
/** Returns the resource configuration that these resource entries correspond to. */
public ResourceConfiguration getConfiguration() {
return configuration;
}
/**
* Sets the resource configuration that this chunk's entries correspond to.
*
* @param configuration The new configuration.
*/
public void setConfiguration(ResourceConfiguration configuration) {
this.configuration = configuration;
}
/** Returns the total number of entries for this type + configuration, including null entries. */
public int getTotalEntryCount() {
return entryCount;
}
/** Returns a sparse list of 0-based indices to resource entries defined by this chunk. */
public Map<Integer, Entry> getEntries() {
return Collections.unmodifiableMap(entries);
}
/** Returns true if this chunk contains an entry for {@code resourceId}. */
public boolean containsResource(ResourceIdentifier resourceId) {
PackageChunk packageChunk = Preconditions.checkNotNull(getPackageChunk());
int packageId = packageChunk.getId();
int typeId = getId();
return resourceId.packageId() == packageId
&& resourceId.typeId() == typeId
&& entries.containsKey(resourceId.entryId());
}
/**
* Overrides the entries in this chunk at the given index:entry pairs in {@code entries}.
* For example, if the current list of entries is {0: foo, 1: bar, 2: baz}, and {@code entries}
* is {1: qux, 3: quux}, then the entries will be changed to {0: foo, 1: qux, 2: baz}. If an entry
* has an index that does not exist in the dense entry list, then it is considered a no-op for
* that single entry.
*
* @param entries A sparse list containing index:entry pairs to override.
*/
public void overrideEntries(Map<Integer, Entry> entries) {
for (Map.Entry<Integer, Entry> entry : entries.entrySet()) {
int index = entry.getKey() != null ? entry.getKey() : -1;
overrideEntry(index, entry.getValue());
}
}
/**
* Overrides an entry at the given index. Passing null for the {@code entry} will remove that
* entry from {@code entries}. Indices < 0 or >= {@link #getTotalEntryCount()} are a no-op.
*
* @param index The 0-based index for the entry to override.
* @param entry The entry to override, or null if the entry should be removed at this location.
*/
public void overrideEntry(int index, @Nullable Entry entry) {
if (index >= 0 && index < entryCount) {
if (entry != null) {
entries.put(index, entry);
} else {
entries.remove(index);
}
}
}
protected String getString(int index) {
ResourceTableChunk resourceTable = getResourceTableChunk();
Preconditions.checkNotNull(resourceTable, "%s has no resource table.", getClass());
return resourceTable.getStringPool().getString(index);
}
protected String getKeyName(int index) {
PackageChunk packageChunk = getPackageChunk();
Preconditions.checkNotNull(packageChunk, "%s has no parent package.", getClass());
StringPoolChunk keyPool = packageChunk.getKeyStringPool();
Preconditions.checkNotNull(keyPool, "%s's parent package has no key pool.", getClass());
return keyPool.getString(index);
}
@Nullable
private ResourceTableChunk getResourceTableChunk() {
Chunk chunk = getParent();
while (chunk != null && !(chunk instanceof ResourceTableChunk)) {
chunk = chunk.getParent();
}
return chunk != null && chunk instanceof ResourceTableChunk ? (ResourceTableChunk) chunk : null;
}
/** Returns the package enclosing this chunk, if any. Else, returns null. */
@Nullable
public PackageChunk getPackageChunk() {
Chunk chunk = getParent();
while (chunk != null && !(chunk instanceof PackageChunk)) {
chunk = chunk.getParent();
}
return chunk != null && chunk instanceof PackageChunk ? (PackageChunk) chunk : null;
}
@Override
protected Type getType() {
return Chunk.Type.TABLE_TYPE;
}
/** Returns the number of bytes needed for offsets based on {@code entries}. */
private int getOffsetSize() {
return entryCount * 4;
}
private int writeEntries(DataOutput payload, ByteBuffer offsets, boolean shrink)
throws IOException {
int entryOffset = 0;
for (int i = 0; i < entryCount; ++i) {
Entry entry = entries.get(i);
if (entry == null) {
offsets.putInt(Entry.NO_ENTRY);
} else {
byte[] encodedEntry = entry.toByteArray(shrink);
payload.write(encodedEntry);
offsets.putInt(entryOffset);
entryOffset += encodedEntry.length;
}
}
entryOffset = writePad(payload, entryOffset);
return entryOffset;
}
@Override
protected void writeHeader(ByteBuffer output) {
int entriesStart = getHeaderSize() + getOffsetSize();
output.putInt(id); // Write an unsigned byte with 3 bytes padding
output.putInt(entryCount);
output.putInt(entriesStart);
output.put(configuration.toByteArray(false));
}
@Override
protected void writePayload(DataOutput output, ByteBuffer header, boolean shrink)
throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ByteBuffer offsets = ByteBuffer.allocate(getOffsetSize()).order(ByteOrder.LITTLE_ENDIAN);
try (LittleEndianDataOutputStream payload = new LittleEndianDataOutputStream(baos)) {
writeEntries(payload, offsets, shrink);
}
output.write(offsets.array());
output.write(baos.toByteArray());
}
/** An {@link Entry} in a {@link TypeChunk}. Contains one or more {@link ResourceValue}. */
@AutoValue
public abstract static class Entry implements SerializableResource {
/** An entry offset that indicates that a given resource is not present. */
public static final int NO_ENTRY = 0xFFFFFFFF;
/** Set if this is a complex resource. Otherwise, it's a simple resource. */
private static final int FLAG_COMPLEX = 0x0001;
/** Size of a single resource id + value mapping entry. */
private static final int MAPPING_SIZE = 4 + ResourceValue.SIZE;
/** Number of bytes in the header of the {@link Entry}. */
public abstract int headerSize();
/** Resource entry flags. */
public abstract int flags();
/** Index into {@link PackageChunk#getKeyStringPool} identifying this entry. */
public abstract int keyIndex();
/** The value of this resource entry, if this is not a complex entry. Else, null. */
@Nullable
public abstract ResourceValue value();
/** The extra values in this resource entry if this {@link #isComplex}. */
public abstract Map<Integer, ResourceValue> values();
/**
* Entry into {@link PackageChunk} that is the parent {@link Entry} to this entry.
* This value only makes sense when this is complex ({@link #isComplex} returns true).
*/
public abstract int parentEntry();
/** The {@link TypeChunk} that this resource entry belongs to. */
public abstract TypeChunk parent();
/** Returns the name of the type this chunk represents (e.g. string, attr, id). */
public final String typeName() {
return parent().getTypeName();
}
/** The total number of bytes that this {@link Entry} takes up. */
public final int size() {
return headerSize() + (isComplex() ? values().size() * MAPPING_SIZE : ResourceValue.SIZE);
}
/** Returns the key name identifying this resource entry. */
public final String key() {
return parent().getKeyName(keyIndex());
}
/** Returns true if this is a complex resource. */
public final boolean isComplex() {
return (flags() & FLAG_COMPLEX) != 0;
}
/**
* Creates a new {@link Entry} whose contents start at the 0-based position in
* {@code buffer} given by a 4-byte value read from {@code buffer} and then added to
* {@code baseOffset}. If the value read from {@code buffer} is equal to {@link #NO_ENTRY}, then
* null is returned as there is no resource at that position.
*
* <p>Otherwise, this position is parsed and returned as an {@link Entry}.
*
* @param buffer A buffer positioned at an offset to an {@link Entry}.
* @param baseOffset Offset that must be added to the value at {@code buffer}'s position.
* @param parent The {@link TypeChunk} that this resource entry belongs to.
* @return New {@link Entry} or null if there is no resource at this location.
*/
@Nullable
public static Entry create(ByteBuffer buffer, int baseOffset, TypeChunk parent) {
int offset = buffer.getInt();
if (offset == NO_ENTRY) {
return null;
}
int position = buffer.position();
buffer.position(baseOffset + offset); // Set buffer position to resource entry start
Entry result = newInstance(buffer, parent);
buffer.position(position); // Restore buffer position
return result;
}
@Nullable
private static Entry newInstance(ByteBuffer buffer, TypeChunk parent) {
int headerSize = buffer.getShort() & 0xFFFF;
int flags = buffer.getShort() & 0xFFFF;
int keyIndex = buffer.getInt();
ResourceValue value = null;
Map<Integer, ResourceValue> values = new LinkedHashMap<>();
int parentEntry = 0;
if ((flags & FLAG_COMPLEX) != 0) {
parentEntry = buffer.getInt();
int valueCount = buffer.getInt();
for (int i = 0; i < valueCount; ++i) {
values.put(buffer.getInt(), ResourceValue.create(buffer));
}
} else {
value = ResourceValue.create(buffer);
}
return new AutoValue_TypeChunk_Entry(
headerSize, flags, keyIndex, value, values, parentEntry, parent);
}
@Override
public final byte[] toByteArray() {
return toByteArray(false);
}
@Override
public final byte[] toByteArray(boolean shrink) {
ByteBuffer buffer = ByteBuffer.allocate(size());
buffer.order(ByteOrder.LITTLE_ENDIAN);
buffer.putShort((short) headerSize());
buffer.putShort((short) flags());
buffer.putInt(keyIndex());
if (isComplex()) {
buffer.putInt(parentEntry());
buffer.putInt(values().size());
for (Map.Entry<Integer, ResourceValue> entry : values().entrySet()) {
buffer.putInt(entry.getKey());
buffer.put(entry.getValue().toByteArray(shrink));
}
} else {
ResourceValue value = value();
Preconditions.checkNotNull(value, "A non-complex TypeChunk entry must have a value.");
buffer.put(value.toByteArray());
}
return buffer.array();
}
@Override
public final String toString() {
return String.format("Entry{key=%s}", key());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.client.console.approvals;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.syncope.client.console.commons.MapChoiceRenderer;
import org.apache.syncope.client.console.panels.MultilevelPanel;
import org.apache.syncope.client.console.wicket.markup.html.form.AjaxDropDownChoicePanel;
import org.apache.syncope.client.console.wicket.markup.html.form.AjaxSpinnerFieldPanel;
import org.apache.syncope.client.console.wicket.markup.html.form.AjaxTextFieldPanel;
import org.apache.syncope.client.console.wicket.markup.html.form.DateTimeFieldPanel;
import org.apache.syncope.client.console.wicket.markup.html.form.FieldPanel;
import org.apache.syncope.common.lib.to.WorkflowFormPropertyTO;
import org.apache.syncope.common.lib.to.WorkflowFormTO;
import org.apache.syncope.common.lib.types.StandardEntitlement;
import org.apache.wicket.PageReference;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.authroles.authorization.strategies.role.metadata.MetaDataRoleAuthorizationStrategy;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.ListView;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.LoadableDetachableModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class Approval extends Panel {
private static final long serialVersionUID = -8847854414429745216L;
protected static final Logger LOG = LoggerFactory.getLogger(Approval.class);
public Approval(final PageReference pageRef, final WorkflowFormTO formTO) {
super(MultilevelPanel.FIRST_LEVEL_ID);
IModel<List<WorkflowFormPropertyTO>> formProps = new LoadableDetachableModel<List<WorkflowFormPropertyTO>>() {
private static final long serialVersionUID = 3169142472626817508L;
@Override
protected List<WorkflowFormPropertyTO> load() {
return formTO.getProperties();
}
};
final ListView<WorkflowFormPropertyTO> propView = new ListView<WorkflowFormPropertyTO>("propView", formProps) {
private static final long serialVersionUID = 9101744072914090143L;
@Override
@SuppressWarnings({ "unchecked", "rawtypes" })
protected void populateItem(final ListItem<WorkflowFormPropertyTO> item) {
final WorkflowFormPropertyTO prop = item.getModelObject();
String label = StringUtils.isBlank(prop.getName()) ? prop.getId() : prop.getName();
FieldPanel field;
switch (prop.getType()) {
case Boolean:
field = new AjaxDropDownChoicePanel("value", label, new PropertyModel<String>(prop, "value") {
private static final long serialVersionUID = -3743432456095828573L;
@Override
public String getObject() {
return StringUtils.isBlank(prop.getValue())
? null
: prop.getValue().equals("true") ? "Yes" : "No";
}
@Override
public void setObject(final String object) {
prop.setValue(String.valueOf(object.equalsIgnoreCase("yes")));
}
}, false).setChoices(Arrays.asList(new String[] { "Yes", "No" }));
break;
case Date:
final FastDateFormat formatter = FastDateFormat.getInstance(prop.getDatePattern());
field = new DateTimeFieldPanel("value", label, new PropertyModel<Date>(prop, "value") {
private static final long serialVersionUID = -3743432456095828573L;
@Override
public Date getObject() {
try {
if (StringUtils.isBlank(prop.getValue())) {
return null;
} else {
return formatter.parse(prop.getValue());
}
} catch (ParseException e) {
LOG.error("Unparsable date: {}", prop.getValue(), e);
return null;
}
}
@Override
public void setObject(final Date object) {
prop.setValue(formatter.format(object));
}
}, prop.getDatePattern());
break;
case Enum:
MapChoiceRenderer<String, String> enumCR = new MapChoiceRenderer<>(prop.getEnumValues());
final Map<String, String> map = MapUtils.invertMap(prop.getEnumValues());
field = new AjaxDropDownChoicePanel(
"value", label, new PropertyModel<String>(prop, "value"), false).
setChoiceRenderer(enumCR).setChoices(new Model<ArrayList<String>>() {
private static final long serialVersionUID = -858521070366432018L;
@Override
public ArrayList<String> getObject() {
return new ArrayList<>(prop.getEnumValues().keySet());
}
});
break;
case Long:
field = new AjaxSpinnerFieldPanel.Builder<Long>().build(
"value",
label,
Long.class,
new PropertyModel<Long>(prop, "value") {
private static final long serialVersionUID = -7688359318035249200L;
@Override
public Long getObject() {
return StringUtils.isBlank(prop.getValue())
? null
: NumberUtils.toLong(prop.getValue());
}
@Override
public void setObject(final Long object) {
prop.setValue(String.valueOf(object));
}
});
break;
case String:
default:
field = new AjaxTextFieldPanel("value", label, new PropertyModel<String>(prop, "value"), false);
break;
}
field.setReadOnly(!prop.isWritable());
if (prop.isRequired()) {
field.addRequiredLabel();
}
item.add(field);
}
};
final AjaxLink<String> userDetails = new AjaxLink<String>("userDetails") {
private static final long serialVersionUID = -4804368561204623354L;
@Override
public void onClick(final AjaxRequestTarget target) {
viewDetails(formTO, target);
}
};
MetaDataRoleAuthorizationStrategy.authorize(userDetails, ENABLE, StandardEntitlement.USER_READ);
add(propView);
add(userDetails);
}
protected abstract void viewDetails(final WorkflowFormTO formTO, final AjaxRequestTarget target);
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.resources.impl;
import io.atomix.copycat.client.CopycatClient;
import io.atomix.resource.AbstractResource;
import io.atomix.resource.ResourceTypeInfo;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Predicate;
import org.onlab.util.Match;
import org.onosproject.store.primitives.TransactionId;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.Clear;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.ContainsKey;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.ContainsValue;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.EntrySet;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.Get;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.IsEmpty;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.KeySet;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.Listen;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.Size;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.TransactionCommit;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.TransactionPrepare;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.TransactionPrepareAndCommit;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.TransactionRollback;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.Unlisten;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.UpdateAndGet;
import org.onosproject.store.primitives.resources.impl.AtomixConsistentMapCommands.Values;
import org.onosproject.store.service.AsyncConsistentMap;
import org.onosproject.store.service.MapEvent;
import org.onosproject.store.service.MapEventListener;
import org.onosproject.store.service.MapTransaction;
import org.onosproject.store.service.Versioned;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* Distributed resource providing the {@link AsyncConsistentMap} primitive.
*/
@ResourceTypeInfo(id = -151, factory = AtomixConsistentMapFactory.class)
public class AtomixConsistentMap extends AbstractResource<AtomixConsistentMap>
implements AsyncConsistentMap<String, byte[]> {
private final Set<Consumer<Status>> statusChangeListeners = Sets.newCopyOnWriteArraySet();
private final Map<MapEventListener<String, byte[]>, Executor> mapEventListeners = Maps.newIdentityHashMap();
public static final String CHANGE_SUBJECT = "changeEvents";
public AtomixConsistentMap(CopycatClient client, Properties properties) {
super(client, properties);
}
@Override
public String name() {
return null;
}
@Override
public CompletableFuture<AtomixConsistentMap> open() {
return super.open().thenApply(result -> {
client.onEvent(CHANGE_SUBJECT, this::handleEvent);
return result;
});
}
private void handleEvent(List<MapEvent<String, byte[]>> events) {
events.forEach(event ->
mapEventListeners.forEach((listener, executor) -> executor.execute(() -> listener.event(event))));
}
@Override
public CompletableFuture<Boolean> isEmpty() {
return submit(new IsEmpty());
}
@Override
public CompletableFuture<Integer> size() {
return submit(new Size());
}
@Override
public CompletableFuture<Boolean> containsKey(String key) {
return submit(new ContainsKey(key));
}
@Override
public CompletableFuture<Boolean> containsValue(byte[] value) {
return submit(new ContainsValue(value));
}
@Override
public CompletableFuture<Versioned<byte[]>> get(String key) {
return submit(new Get(key));
}
@Override
public CompletableFuture<Set<String>> keySet() {
return submit(new KeySet());
}
@Override
public CompletableFuture<Collection<Versioned<byte[]>>> values() {
return submit(new Values());
}
@Override
public CompletableFuture<Set<Entry<String, Versioned<byte[]>>>> entrySet() {
return submit(new EntrySet());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Versioned<byte[]>> put(String key, byte[] value) {
return submit(new UpdateAndGet(key, value, Match.ANY, Match.ANY))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.oldValue());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Versioned<byte[]>> putAndGet(String key, byte[] value) {
return submit(new UpdateAndGet(key, value, Match.ANY, Match.ANY))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.newValue());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Versioned<byte[]>> putIfAbsent(String key, byte[] value) {
return submit(new UpdateAndGet(key, value, Match.NULL, Match.ANY))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.oldValue());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Versioned<byte[]>> remove(String key) {
return submit(new UpdateAndGet(key, null, Match.ANY, Match.ANY))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.oldValue());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Boolean> remove(String key, byte[] value) {
return submit(new UpdateAndGet(key, null, Match.ifValue(value), Match.ANY))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.updated());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Boolean> remove(String key, long version) {
return submit(new UpdateAndGet(key, null, Match.ANY, Match.ifValue(version)))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.updated());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Versioned<byte[]>> replace(String key, byte[] value) {
return submit(new UpdateAndGet(key, value, Match.NOT_NULL, Match.ANY))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.oldValue());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Boolean> replace(String key, byte[] oldValue, byte[] newValue) {
return submit(new UpdateAndGet(key,
newValue,
Match.ifValue(oldValue),
Match.ANY))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.updated());
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Boolean> replace(String key, long oldVersion, byte[] newValue) {
return submit(new UpdateAndGet(key,
newValue,
Match.ANY,
Match.ifValue(oldVersion)))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.updated());
}
@Override
public CompletableFuture<Void> clear() {
return submit(new Clear())
.whenComplete((r, e) -> throwIfLocked(r))
.thenApply(v -> null);
}
@Override
@SuppressWarnings("unchecked")
public CompletableFuture<Versioned<byte[]>> computeIf(String key,
Predicate<? super byte[]> condition,
BiFunction<? super String, ? super byte[], ? extends byte[]> remappingFunction) {
return get(key).thenCompose(r1 -> {
byte[] existingValue = r1 == null ? null : r1.value();
// if the condition evaluates to false, return existing value.
if (!condition.test(existingValue)) {
return CompletableFuture.completedFuture(r1);
}
AtomicReference<byte[]> computedValue = new AtomicReference<>();
// if remappingFunction throws an exception, return the exception.
try {
computedValue.set(remappingFunction.apply(key, existingValue));
} catch (Exception e) {
CompletableFuture<Versioned<byte[]>> future = new CompletableFuture<>();
future.completeExceptionally(e);
return future;
}
if (computedValue.get() == null && r1 == null) {
return CompletableFuture.completedFuture(null);
}
Match<byte[]> valueMatch = r1 == null ? Match.NULL : Match.ANY;
Match<Long> versionMatch = r1 == null ? Match.ANY : Match.ifValue(r1.version());
return submit(new UpdateAndGet(key,
computedValue.get(),
valueMatch,
versionMatch))
.whenComplete((r, e) -> throwIfLocked(r.status()))
.thenApply(v -> v.newValue());
});
}
@Override
public synchronized CompletableFuture<Void> addListener(MapEventListener<String, byte[]> listener,
Executor executor) {
if (mapEventListeners.isEmpty()) {
return submit(new Listen()).thenRun(() -> mapEventListeners.putIfAbsent(listener, executor));
} else {
mapEventListeners.put(listener, executor);
return CompletableFuture.completedFuture(null);
}
}
@Override
public synchronized CompletableFuture<Void> removeListener(MapEventListener<String, byte[]> listener) {
if (mapEventListeners.remove(listener) != null && mapEventListeners.isEmpty()) {
return submit(new Unlisten()).thenApply(v -> null);
}
return CompletableFuture.completedFuture(null);
}
private void throwIfLocked(MapEntryUpdateResult.Status status) {
if (status == MapEntryUpdateResult.Status.WRITE_LOCK) {
throw new ConcurrentModificationException("Cannot update map: Another transaction in progress");
}
}
@Override
public CompletableFuture<Boolean> prepare(MapTransaction<String, byte[]> transaction) {
return submit(new TransactionPrepare(transaction)).thenApply(v -> v == PrepareResult.OK);
}
@Override
public CompletableFuture<Void> commit(TransactionId transactionId) {
return submit(new TransactionCommit(transactionId)).thenApply(v -> null);
}
@Override
public CompletableFuture<Void> rollback(TransactionId transactionId) {
return submit(new TransactionRollback(transactionId))
.thenApply(v -> null);
}
@Override
public CompletableFuture<Boolean> prepareAndCommit(MapTransaction<String, byte[]> transaction) {
return submit(new TransactionPrepareAndCommit(transaction)).thenApply(v -> v == PrepareResult.OK);
}
@Override
public void addStatusChangeListener(Consumer<Status> listener) {
statusChangeListeners.add(listener);
}
@Override
public void removeStatusChangeListener(Consumer<Status> listener) {
statusChangeListeners.remove(listener);
}
@Override
public Collection<Consumer<Status>> statusChangeListeners() {
return ImmutableSet.copyOf(statusChangeListeners);
}
}
| |
/* -----------------------------------------------------------------------------
* Rule_cmdUshrInt2addr.java
* -----------------------------------------------------------------------------
*
* Producer : com.parse2.aparse.Parser 2.3
* Produced : Fri Apr 12 10:40:21 MUT 2013
*
* -----------------------------------------------------------------------------
*/
package com.litecoding.smali2java.parser.cmd.int3long2.ushr;
import java.util.ArrayList;
import com.litecoding.smali2java.builder.Visitor;
import com.litecoding.smali2java.parser.ParserContext;
import com.litecoding.smali2java.parser.Rule;
import com.litecoding.smali2java.parser.Terminal_StringValue;
import com.litecoding.smali2java.parser.smali.Rule_codeRegister;
import com.litecoding.smali2java.parser.smali.Rule_codeRegisterVDst;
import com.litecoding.smali2java.parser.smali.Rule_commentSequence;
import com.litecoding.smali2java.parser.smali.Rule_listSeparator;
import com.litecoding.smali2java.parser.smali.Rule_optPadding;
import com.litecoding.smali2java.parser.smali.Rule_padding;
import com.litecoding.smali2java.parser.text.Rule_CRLF;
final public class Rule_cmdUshrInt2addr extends Rule
{
private Rule_cmdUshrInt2addr(String spelling, ArrayList<Rule> rules)
{
super(spelling, rules);
}
public Object accept(Visitor visitor)
{
return visitor.visit(this);
}
public static Rule_cmdUshrInt2addr parse(ParserContext context)
{
context.push("cmdUshrInt2addr");
boolean parsed = true;
int s0 = context.index;
ArrayList<Rule> e0 = new ArrayList<Rule>();
Rule rule;
parsed = false;
if (!parsed)
{
{
ArrayList<Rule> e1 = new ArrayList<Rule>();
int s1 = context.index;
parsed = true;
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_optPadding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Terminal_StringValue.parse(context, "ushr-int/2addr");
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_padding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegisterVDst.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_listSeparator.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegister.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_optPadding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
@SuppressWarnings("unused")
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
int g1 = context.index;
parsed = false;
if (!parsed)
{
{
ArrayList<Rule> e2 = new ArrayList<Rule>();
int s2 = context.index;
parsed = true;
if (parsed)
{
boolean f2 = true;
int c2 = 0;
for (int i2 = 0; i2 < 1 && f2; i2++)
{
rule = Rule_padding.parse(context);
if ((f2 = rule != null))
{
e2.add(rule);
c2++;
}
}
parsed = c2 == 1;
}
if (parsed)
{
boolean f2 = true;
int c2 = 0;
for (int i2 = 0; i2 < 1 && f2; i2++)
{
rule = Rule_commentSequence.parse(context);
if ((f2 = rule != null))
{
e2.add(rule);
c2++;
}
}
parsed = c2 == 1;
}
if (parsed)
e1.addAll(e2);
else
context.index = s2;
}
}
f1 = context.index > g1;
if (parsed) c1++;
}
parsed = true;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_CRLF.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
e0.addAll(e1);
else
context.index = s1;
}
}
rule = null;
if (parsed)
rule = new Rule_cmdUshrInt2addr(context.text.substring(s0, context.index), e0);
else
context.index = s0;
context.pop("cmdUshrInt2addr", parsed);
return (Rule_cmdUshrInt2addr)rule;
}
}
/* -----------------------------------------------------------------------------
* eof
* -----------------------------------------------------------------------------
*/
| |
package cn.com.newcapec.citycard.common.po.base;
import java.lang.Comparable;
import java.io.Serializable;
/**
* This is an object that contains data related to the INTERFACE_PACKAGE table.
* Do not modify this class because it will be overwritten if the configuration file
* related to this class is modified.
*
* @hibernate.class
* table="INTERFACE_PACKAGE"
*/
public abstract class BaseInterfacePackage implements Comparable, Serializable {
public static String REF = "InterfacePackage";
public static String PROP_FILEID = "fileid";
public static String PROP_INDUSTRYCODE = "industrycode";
public static String PROP_LINENO = "lineno";
public static String PROP_FILENAME = "filename";
public static String PROP_MERCHANTCODE = "merchantcode";
public static String PROP_OWNERCITYCODE = "ownercitycode";
public static String PROP_LOCALCSTACCFC = "localcstaccfc";
public static String PROP_TRADECITYCODE = "tradecitycode";
public static String PROP_TESTFLAG = "testflag";
public static String PROP_BUSINESSTYPE = "businesstype";
public static String PROP_ID = "id";
public static String PROP_OPDT = "opdt";
public static String PROP_RESPCODE = "respcode";
public static String PROP_ACCOUNTDATE = "accountdate";
// constructors
public BaseInterfacePackage () {
initialize();
}
/**
* Constructor for primary key
*/
public BaseInterfacePackage (java.lang.String id) {
this.setId(id);
initialize();
}
protected void initialize () {}
private int hashCode = Integer.MIN_VALUE;
// primary key
private java.lang.String id;
// fields
private java.lang.Integer fileid;
private java.lang.Integer localcstaccfc;
private java.lang.String filename;
private java.lang.Integer respcode;
private java.lang.String merchantcode;
private java.lang.String tradecitycode;
private java.lang.String ownercitycode;
private java.lang.String industrycode;
private java.lang.String testflag;
private java.lang.String businesstype;
private java.util.Date opdt;
private java.lang.Integer lineno;
private java.util.Date accountdate;
/**
* Return the unique identifier of this class
* @hibernate.id
* generator-class="uuid.hex"
* column="ID"
*/
public java.lang.String getId () {
return id;
}
/**
* Set the unique identifier of this class
* @param id the new ID
*/
public void setId (java.lang.String id) {
this.id = id;
this.hashCode = Integer.MIN_VALUE;
}
/**
* Return the value associated with the column: FILEID
*/
public java.lang.Integer getFileid () {
return fileid;
}
/**
* Set the value related to the column: FILEID
* @param fileid the FILEID value
*/
public void setFileid (java.lang.Integer fileid) {
this.fileid = fileid;
}
/**
* Return the value associated with the column: LOCALCSTACCFC
*/
public java.lang.Integer getLocalcstaccfc () {
return localcstaccfc;
}
/**
* Set the value related to the column: LOCALCSTACCFC
* @param localcstaccfc the LOCALCSTACCFC value
*/
public void setLocalcstaccfc (java.lang.Integer localcstaccfc) {
this.localcstaccfc = localcstaccfc;
}
/**
* Return the value associated with the column: FILENAME
*/
public java.lang.String getFilename () {
return filename;
}
/**
* Set the value related to the column: FILENAME
* @param filename the FILENAME value
*/
public void setFilename (java.lang.String filename) {
this.filename = filename;
}
/**
* Return the value associated with the column: RESPCODE
*/
public java.lang.Integer getRespcode () {
return respcode;
}
/**
* Set the value related to the column: RESPCODE
* @param respcode the RESPCODE value
*/
public void setRespcode (java.lang.Integer respcode) {
this.respcode = respcode;
}
/**
* Return the value associated with the column: MERCHANTCODE
*/
public java.lang.String getMerchantcode () {
return merchantcode;
}
/**
* Set the value related to the column: MERCHANTCODE
* @param merchantcode the MERCHANTCODE value
*/
public void setMerchantcode (java.lang.String merchantcode) {
this.merchantcode = merchantcode;
}
/**
* Return the value associated with the column: TRADECITYCODE
*/
public java.lang.String getTradecitycode () {
return tradecitycode;
}
/**
* Set the value related to the column: TRADECITYCODE
* @param tradecitycode the TRADECITYCODE value
*/
public void setTradecitycode (java.lang.String tradecitycode) {
this.tradecitycode = tradecitycode;
}
/**
* Return the value associated with the column: OWNERCITYCODE
*/
public java.lang.String getOwnercitycode () {
return ownercitycode;
}
/**
* Set the value related to the column: OWNERCITYCODE
* @param ownercitycode the OWNERCITYCODE value
*/
public void setOwnercitycode (java.lang.String ownercitycode) {
this.ownercitycode = ownercitycode;
}
/**
* Return the value associated with the column: INDUSTRYCODE
*/
public java.lang.String getIndustrycode () {
return industrycode;
}
/**
* Set the value related to the column: INDUSTRYCODE
* @param industrycode the INDUSTRYCODE value
*/
public void setIndustrycode (java.lang.String industrycode) {
this.industrycode = industrycode;
}
/**
* Return the value associated with the column: TESTFLAG
*/
public java.lang.String getTestflag () {
return testflag;
}
/**
* Set the value related to the column: TESTFLAG
* @param testflag the TESTFLAG value
*/
public void setTestflag (java.lang.String testflag) {
this.testflag = testflag;
}
/**
* Return the value associated with the column: BUSINESSTYPE
*/
public java.lang.String getBusinesstype () {
return businesstype;
}
/**
* Set the value related to the column: BUSINESSTYPE
* @param businesstype the BUSINESSTYPE value
*/
public void setBusinesstype (java.lang.String businesstype) {
this.businesstype = businesstype;
}
/**
* Return the value associated with the column: OPDT
*/
public java.util.Date getOpdt () {
return opdt;
}
/**
* Set the value related to the column: OPDT
* @param opdt the OPDT value
*/
public void setOpdt (java.util.Date opdt) {
this.opdt = opdt;
}
/**
* Return the value associated with the column: LINENO
*/
public java.lang.Integer getLineno () {
return lineno;
}
/**
* Set the value related to the column: LINENO
* @param lineno the LINENO value
*/
public void setLineno (java.lang.Integer lineno) {
this.lineno = lineno;
}
/**
* Return the value associated with the column: ACCOUNTDATE
*/
public java.util.Date getAccountdate () {
return accountdate;
}
/**
* Set the value related to the column: ACCOUNTDATE
* @param accountdate the ACCOUNTDATE value
*/
public void setAccountdate (java.util.Date accountdate) {
this.accountdate = accountdate;
}
public boolean equals (Object obj) {
if (null == obj) return false;
if (!(obj instanceof cn.com.newcapec.citycard.common.po.InterfacePackage)) return false;
else {
cn.com.newcapec.citycard.common.po.InterfacePackage interfacePackage = (cn.com.newcapec.citycard.common.po.InterfacePackage) obj;
if (null == this.getId() || null == interfacePackage.getId()) return false;
else return (this.getId().equals(interfacePackage.getId()));
}
}
public int hashCode () {
if (Integer.MIN_VALUE == this.hashCode) {
if (null == this.getId()) return super.hashCode();
else {
String hashStr = this.getClass().getName() + ":" + this.getId().hashCode();
this.hashCode = hashStr.hashCode();
}
}
return this.hashCode;
}
public int compareTo (Object obj) {
if (obj.hashCode() > hashCode()) return 1;
else if (obj.hashCode() < hashCode()) return -1;
else return 0;
}
public String toString () {
return super.toString();
}
}
| |
package contagionJVM.System;
import contagionJVM.Helper.ColorToken;
import contagionJVM.GameObject.PlayerGO;
import contagionJVM.NWNX.ChatMessage;
import contagionJVM.NWNX.NWNX_Chat;
import contagionJVM.NWNX.NWNX_Events;
import org.nwnx.nwnx2.jvm.NWObject;
import org.nwnx.nwnx2.jvm.NWScript;
import java.util.Objects;
public class RadioSystem {
// The number of channels available to players using radios.
// Default: 10
final int RADIO_NUMBER_OF_CHANNELS = 10;
// Tag and resref of the radio item
final String RADIO_RESREF = "reo_radio";
// Name of the variable which determines if a radio is turned on or not.
final String RADIO_POWER = "RADIO_POWER";
// Name of the variable which determines which radio a PC is currently tuned into.
// A PC may only be tuned into one station at a time even if they have more than one
// radio in their inventory.
final String RADIO_CHANNEL = "RADIO_CHANNEL";
// The name of the variable which tracks the PC ID number the radio was turned by.
// This is used to ensure the radios don't get turned off after a server reset,
// as the game fires the OnAcquire event for all items on module entry. Normally this would
// reset the radio's status but if the PC ID matches then we can ignore it.
final String RADIO_PC_ID_ENABLED_BY = "RADIO_PC_ID_ENABLED_BY";
// Resref and tag of the radio NPC which handles distributing messages.
final String RADIO_NPC = "radio_npc";
public void OnNWNXChat(NWObject oPC)
{
PlayerGO pcGO = new PlayerGO(oPC);
NWObject oDatabase = pcGO.GetDatabaseItem();
int iChannel = NWScript.getLocalInt(oDatabase, RADIO_CHANNEL);
ChatMessage stMessage = NWNX_Chat.GetMessage();
// This only matters when a PC uses the party chat channel
if(stMessage.getMode() != NWNX_Chat.CHAT_CHANNEL_PARTY) return;
NWNX_Chat.SuppressMessage();
NWObject oNPC = NWScript.getObjectByTag(RADIO_NPC, 0);
// Can't send messages without a radio turned on.
if(iChannel <= 0)
{
NWScript.sendMessageToPC(oPC, ColorToken.Red() + "You must have a radio to communicate over party chat." + ColorToken.End());
return;
}
String sSenderName = ColorToken.Custom(115, 101, 206) + "(Ch. " + iChannel + ") " + NWScript.getName(oPC, false) + ": " + ColorToken.End();
// Why is there no CHAT_CHANNEL_DM??
//NWNXChat_SendMessage(oNPC, CHAT_CHANNEL_DM, sSenderName = stMessage.Text + ColorTokenEnd());
NWObject[] oMembers = NWScript.getPCs();
for(NWObject member : oMembers)
{
pcGO = new PlayerGO(member);
NWObject oMemberDatabase = pcGO.GetDatabaseItem();
int iMemberChannel = NWScript.getLocalInt(oMemberDatabase, RADIO_CHANNEL);
// Message is sent to anyone if they've got a radio tuned in to the correct channel. They do not need to be
// in the same party.
if(iMemberChannel == iChannel || NWScript.getIsDM(member))
{
NWNX_Chat.SendMessage(oNPC, NWNX_Chat.CHAT_CHANNEL_PRIVATE, sSenderName + ColorToken.White() + stMessage.getText() + ColorToken.End(), member);
}
}
}
public void OnModuleEnter()
{
NWObject oPC = NWScript.getEnteringObject();
if(!NWScript.getIsPC(oPC) || NWScript.getIsDM(oPC)) return;
NWNX_Chat.PCEnter(oPC);
}
public void OnModuleLeave()
{
NWObject oPC = NWScript.getExitingObject();
if(!NWScript.getIsPC(oPC) || NWScript.getIsDM(oPC)) return;
NWNX_Chat.PCExit(oPC);
}
public void ChangeChannel(NWObject oPC)
{
PlayerGO pcGO = new PlayerGO(oPC);
NWObject oRadio = NWNX_Events.GetEventItem();
NWObject oDatabase = pcGO.GetDatabaseItem();
int iRadioChannel = NWScript.getLocalInt(oDatabase, RADIO_CHANNEL);
int bPoweredOn = NWScript.getLocalInt(oRadio, RADIO_POWER);
// Can't change channel unless the radio is turned on
if(bPoweredOn == 0)
{
NWScript.sendMessageToPC(oPC, ColorToken.Red() + "You must turn the radio on first." + ColorToken.End());
return;
}
iRadioChannel++;
// Can't cycle beyond the maximum number of channels
if(iRadioChannel > RADIO_NUMBER_OF_CHANNELS)
{
iRadioChannel = 1;
}
// Mark the new channel, inform player of new channel, and update the radio's name to reflect the new channel
NWScript.setLocalInt(oDatabase, RADIO_CHANNEL, iRadioChannel);
NWScript.sendMessageToPC(oPC, ColorToken.Purple() + "Radio Channel: " + iRadioChannel + ColorToken.End());
UpdateItemName(oRadio);
}
public void TogglePower(NWObject oPC)
{
PlayerGO pcGO = new PlayerGO(oPC);
NWObject oRadio = NWNX_Events.GetEventItem();
NWObject oDatabase = pcGO.GetDatabaseItem();
int iRadioChannel = NWScript.getLocalInt(oDatabase, RADIO_CHANNEL);
int bPoweredOn = NWScript.getLocalInt(oRadio, RADIO_POWER);
String sUUID = pcGO.getUUID();
// Another radio is already turned on. Can't turn on another.
if(iRadioChannel > 0 && bPoweredOn == 0)
{
NWScript.sendMessageToPC(oPC, ColorToken.Red() + "Another radio is already turned on. You may only have one radio turned on at a time." + ColorToken.End());
return;
}
// It's powered on right now, but we're turning it off. Remove variables from the owner and the radio itself
if(bPoweredOn == 1)
{
NWScript.deleteLocalInt(oDatabase, RADIO_CHANNEL);
NWScript.deleteLocalInt(oRadio, RADIO_POWER);
NWScript.deleteLocalInt(oRadio, RADIO_PC_ID_ENABLED_BY);
NWScript.sendMessageToPC(oPC, ColorToken.Purple() + "Radio powered off." + ColorToken.End());
}
// It's powered off right now, and we're turning it on. Add variables to the owner and the radio itself
else
{
NWScript.setLocalInt(oDatabase, RADIO_CHANNEL, 1);
NWScript.setLocalInt(oRadio, RADIO_POWER, 1);
NWScript.setLocalString(oRadio, RADIO_PC_ID_ENABLED_BY, sUUID);
NWScript.sendMessageToPC(oPC, ColorToken.Purple() + "Radio powered on." + ColorToken.End());
NWScript.sendMessageToPC(oPC, ColorToken.Purple() + "Radio Channel: 1" + ColorToken.End());
}
UpdateItemName(oRadio);
}
public void OnModuleUnacquire()
{
NWObject oPC = NWScript.getModuleItemLostBy();
PlayerGO pcGO = new PlayerGO(oPC);
NWObject oRadio = NWScript.getModuleItemLost();
String sResref = NWScript.getResRef(oRadio);
if(Objects.equals(sResref, RADIO_RESREF))
{
NWObject oDatabase = pcGO.GetDatabaseItem();
int bPoweredOn = NWScript.getLocalInt(oRadio, RADIO_POWER);
if(bPoweredOn == 1)
{
NWScript.deleteLocalInt(oDatabase, RADIO_CHANNEL);
NWScript.deleteLocalInt(oRadio, RADIO_POWER);
UpdateItemName(oRadio);
}
}
}
public void OnModuleAcquire()
{
NWObject oPC = NWScript.getModuleItemAcquiredBy();
PlayerGO pcGO = new PlayerGO(oPC);
NWObject oDatabase = pcGO.GetDatabaseItem();
NWObject oRadio = NWScript.getModuleItemAcquired();
String sResref = NWScript.getResRef(oRadio);
String sUUID = pcGO.getUUID();
String sRadioPCID = NWScript.getLocalString(oRadio, RADIO_PC_ID_ENABLED_BY);
if(Objects.equals(sResref, RADIO_RESREF))
{
int bPoweredOn = NWScript.getLocalInt(oRadio, RADIO_POWER);
// The radio must be turned on and the person who turned it on must not be
// the current owner. I.E: When the server resets, the OnAcquire event is fired
// for all items. This check prevents the radio's status from being reset when
// that happens.
if(bPoweredOn == 1 && !Objects.equals(sUUID, sRadioPCID))
{
NWScript.deleteLocalInt(oDatabase, RADIO_CHANNEL);
NWScript.deleteLocalInt(oRadio, RADIO_POWER);
UpdateItemName(oRadio);
}
}
}
private void UpdateItemName(NWObject radio)
{
NWObject oPC = NWScript.getItemPossessor(radio);
PlayerGO pcGO = new PlayerGO(oPC);
NWObject oDatabase = pcGO.GetDatabaseItem();
String sName = NWScript.getName(radio, true);
String sNewName = "";
int iChannel = NWScript.getLocalInt(oDatabase, RADIO_CHANNEL);
if(iChannel > 0)
{
sNewName = sName + ColorToken.Custom(0, 255, 0) + " (Channel " + iChannel + ")";
}
// Update item name
NWScript.setName(radio, sNewName);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.azure.storage;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.FilterInputStream;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import com.microsoft.azure.storage.OperationContext;
import org.apache.commons.codec.DecoderException;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.azure.AbstractAzureBlobProcessor;
import org.apache.nifi.processors.azure.storage.utils.AzureBlobClientSideEncryptionUtils;
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
import com.microsoft.azure.storage.StorageException;
import com.microsoft.azure.storage.blob.BlobProperties;
import com.microsoft.azure.storage.blob.CloudBlob;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.BlobRequestOptions;
@Tags({ "azure", "microsoft", "cloud", "storage", "blob" })
@SeeAlso({ ListAzureBlobStorage.class, FetchAzureBlobStorage.class, DeleteAzureBlobStorage.class })
@CapabilityDescription("Puts content into an Azure Storage Blob")
@InputRequirement(Requirement.INPUT_REQUIRED)
@WritesAttributes({ @WritesAttribute(attribute = "azure.container", description = "The name of the Azure container"),
@WritesAttribute(attribute = "azure.blobname", description = "The name of the Azure blob"),
@WritesAttribute(attribute = "azure.primaryUri", description = "Primary location for blob content"),
@WritesAttribute(attribute = "azure.etag", description = "Etag for the Azure blob"),
@WritesAttribute(attribute = "azure.length", description = "Length of the blob"),
@WritesAttribute(attribute = "azure.timestamp", description = "The timestamp in Azure for the blob")})
public class PutAzureBlobStorage extends AbstractAzureBlobProcessor {
public static final PropertyDescriptor BLOB_NAME = new PropertyDescriptor.Builder()
.name("blob")
.displayName("Blob")
.description("The filename of the blob")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(true)
.build();
public static final PropertyDescriptor CREATE_CONTAINER = new PropertyDescriptor.Builder()
.name("azure-create-container")
.displayName("Create Container")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(true)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.allowableValues("true", "false")
.defaultValue("false")
.description("Specifies whether to check if the container exists and to automatically create it if it does not. " +
"Permission to list containers is required. If false, this check is not made, but the Put operation " +
"will fail if the container does not exist.")
.build();
@Override
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>(super.customValidate(validationContext));
results.addAll(AzureBlobClientSideEncryptionUtils.validateClientSideEncryptionProperties(validationContext));
return results;
}
@Override
public List<PropertyDescriptor> getSupportedPropertyDescriptors() {
List<PropertyDescriptor> properties = new ArrayList<>(super.getSupportedPropertyDescriptors());
properties.remove(BLOB);
properties.add(BLOB_NAME);
properties.add(CREATE_CONTAINER);
properties.add(AzureBlobClientSideEncryptionUtils.CSE_KEY_TYPE);
properties.add(AzureBlobClientSideEncryptionUtils.CSE_KEY_ID);
properties.add(AzureBlobClientSideEncryptionUtils.CSE_SYMMETRIC_KEY_HEX);
return properties;
}
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final long startNanos = System.nanoTime();
String containerName = context.getProperty(AzureStorageUtils.CONTAINER).evaluateAttributeExpressions(flowFile).getValue();
String blobPath = context.getProperty(BLOB_NAME).evaluateAttributeExpressions(flowFile).getValue();
final boolean createContainer = context.getProperty(CREATE_CONTAINER).asBoolean();
AtomicReference<Exception> storedException = new AtomicReference<>();
try {
CloudBlobClient blobClient = AzureStorageUtils.createCloudBlobClient(context, getLogger(), flowFile);
CloudBlobContainer container = blobClient.getContainerReference(containerName);
if (createContainer)
container.createIfNotExists();
CloudBlob blob = container.getBlockBlobReference(blobPath);
final OperationContext operationContext = new OperationContext();
AzureStorageUtils.setProxy(operationContext, context);
BlobRequestOptions blobRequestOptions = createBlobRequestOptions(context);
final Map<String, String> attributes = new HashMap<>();
long length = flowFile.getSize();
session.read(flowFile, rawIn -> {
InputStream in = rawIn;
if (!(in instanceof BufferedInputStream)) {
// do not double-wrap
in = new BufferedInputStream(rawIn);
}
// If markSupported() is true and a file length is provided,
// Blobs are not uploaded in blocks resulting in OOME for large
// files. The UnmarkableInputStream wrapper class disables
// mark() and reset() to help force uploading files in chunks.
if (in.markSupported()) {
in = new UnmarkableInputStream(in);
}
try {
uploadBlob(blob, operationContext, blobRequestOptions, in);
BlobProperties properties = blob.getProperties();
attributes.put("azure.container", containerName);
attributes.put("azure.primaryUri", blob.getSnapshotQualifiedUri().toString());
attributes.put("azure.etag", properties.getEtag());
attributes.put("azure.length", String.valueOf(length));
attributes.put("azure.timestamp", String.valueOf(properties.getLastModified()));
} catch (StorageException | URISyntaxException | IOException e) {
storedException.set(e);
throw e instanceof IOException ? (IOException) e : new IOException(e);
}
});
if (!attributes.isEmpty()) {
flowFile = session.putAllAttributes(flowFile, attributes);
}
session.transfer(flowFile, REL_SUCCESS);
final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
session.getProvenanceReporter().send(flowFile, blob.getSnapshotQualifiedUri().toString(), transferMillis);
} catch (IllegalArgumentException | URISyntaxException | StorageException | ProcessException | DecoderException e) {
if (e instanceof ProcessException && storedException.get() == null) {
throw (ProcessException) e;
} else {
Exception failureException = Optional.ofNullable(storedException.get()).orElse(e);
getLogger().error("Failed to put Azure blob {}", new Object[]{blobPath}, failureException);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
}
}
void uploadBlob(CloudBlob blob, OperationContext operationContext, BlobRequestOptions blobRequestOptions, InputStream in) throws StorageException, IOException {
blob.upload(in, -1, null, blobRequestOptions, operationContext);
}
// Used to help force Azure Blob SDK to write in blocks
private static class UnmarkableInputStream extends FilterInputStream {
public UnmarkableInputStream(InputStream in) {
super(in);
}
@Override
public void mark(int readlimit) {
}
@Override
public void reset() throws IOException {
}
@Override
public boolean markSupported() {
return false;
}
}
}
| |
/* ************************************************************************
#
# designCraft.io
#
# http://designcraft.io/
#
# Copyright:
# Copyright 2014 eTimeline, LLC. All rights reserved.
#
# License:
# See the license.txt file in the project's top-level directory for details.
#
# Authors:
# * Andy White
#
************************************************************************ */
package dcraft.schema;
import java.util.HashMap;
import dcraft.bus.Message;
import dcraft.util.ArrayUtil;
import dcraft.util.StringUtil;
import dcraft.xml.XElement;
public class ServiceSchema {
protected SchemaManager man = null;
protected HashMap<String, Service> services = new HashMap<String, Service>();
public ServiceSchema(SchemaManager man) {
this.man = man;
}
public void load(Schema schema, XElement db) {
for (XElement serel : db.selectAll("Service")) {
String sname = serel.getAttribute("Name");
//String sclass = serel.getAttribute("Class");
if (StringUtil.isEmpty(sname))
continue;
Service ser = this.services.get(sname);
if (ser == null) {
ser = new Service();
ser.name = sname;
this.services.put(sname, ser);
}
for (XElement secel : serel.selectAll("Secure")) {
String[] tags = secel.hasAttribute("Tags")
? secel.getAttribute("Tags").split(",")
: new String[] { "Guest", "User" };
for (XElement ftel : secel.selectAll("Feature")) {
String fname = ftel.getAttribute("Name");
if (StringUtil.isEmpty(fname))
continue;
Feature fet = ser.features.get(fname);
if (fet == null) {
fet = new Feature();
fet.name = fname;
ser.features.put(fname, fet);
}
for (XElement opel : ftel.selectAll("Op")) {
String oname = opel.getAttribute("Name");
if (StringUtil.isEmpty(oname))
continue;
Op opt = new Op();
opt.name = oname;
opt.securityTags = tags;
fet.ops.put(oname, opt);
XElement req = opel.find("Request", "RecRequest");
if (req != null)
opt.request = this.man.loadDataType(schema, req);
XElement resp = opel.find("Response", "RecResponse");
if (resp != null)
opt.response = this.man.loadDataType(schema, resp);
}
for (XElement opel : ftel.selectAll("OpMod")) {
String oname = opel.getAttribute("Name");
if (StringUtil.isEmpty(oname))
continue;
String[] curr = fet.opmods.get(oname);
if (curr != null)
curr = ArrayUtil.addAll(curr, tags);
else
curr = tags;
fet.opmods.put(oname, curr);
}
}
}
}
// TODO we need to have additional table info stored locally - but at moment at least we can validate against Services
}
public DataType getRequestType(Message msg) {
Op opt = this.getOp(msg.getFieldAsString("Service"), msg.getFieldAsString("Feature"), msg.getFieldAsString("Op"));
if (opt != null)
return opt.request;
return null;
}
public DataType getResponseType(String service, String feature, String op) {
Op opt = this.getOp(service, feature, op);
if (opt != null)
return opt.response;
return null;
}
public Op getOp(Message msg) {
return this.getOp(msg.getFieldAsString("Service"), msg.getFieldAsString("Feature"), msg.getFieldAsString("Op"));
}
public Op getOp(String service, String feature, String op) {
if (StringUtil.isEmpty(service))
return null;
Service s = this.services.get(service);
if (s == null)
return null;
if (StringUtil.isEmpty(feature))
feature = "default";
Feature f = s.features.get(feature);
if (f == null)
return null;
if (StringUtil.isEmpty(op))
op = "default";
return f.ops.get(op);
}
public String[] getOpSecurity(Message msg) {
return this.getOpSecurity(msg.getFieldAsString("Service"), msg.getFieldAsString("Feature"), msg.getFieldAsString("Op"));
}
public String[] getOpSecurity(String service, String feature, String op) {
if (StringUtil.isEmpty(service))
return null;
Service s = this.services.get(service);
if (s == null)
return null;
if (StringUtil.isEmpty(feature))
feature = "default";
Feature f = s.features.get(feature);
if (f == null)
return null;
if (StringUtil.isEmpty(op))
op = "default";
return f.opmods.get(op);
}
public class Service {
protected String name = null;
protected HashMap<String, Feature> features = new HashMap<String, Feature>();
}
public class Feature {
protected String name = null;
protected HashMap<String, Op> ops = new HashMap<String, Op>();
protected HashMap<String, String[]> opmods = new HashMap<String, String[]>();
}
public class Op {
protected String name = null;
protected String[] securityTags = null;
protected DataType request = null;
protected DataType response = null;
public boolean isTagged(String... tags) {
if (this.securityTags == null)
return false;
for (int i = 0; i < this.securityTags.length; i++) {
String has = this.securityTags[i];
for (String wants : tags) {
if (has.equals(wants))
return true;
}
}
return false;
}
}
public void compile() {
for (Service s : this.services.values()) {
for (Feature f : s.features.values()) {
for (Op o : f.ops.values()) {
if (o.request != null)
o.request.compile();
if (o.response != null)
o.response.compile();
}
}
}
}
public void remove(String name) {
this.services.remove(name);
}
}
| |
/* Copyright 2016 Google Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.codegen.transformer.py;
import com.google.api.codegen.GapicContext;
import com.google.api.codegen.InterfaceView;
import com.google.api.codegen.SnippetSetRunner;
import com.google.api.codegen.TargetLanguage;
import com.google.api.codegen.config.FlatteningConfig;
import com.google.api.codegen.config.GapicProductConfig;
import com.google.api.codegen.config.PackageMetadataConfig;
import com.google.api.codegen.gapic.GapicProvider;
import com.google.api.codegen.transformer.DefaultFeatureConfig;
import com.google.api.codegen.transformer.DynamicLangApiMethodTransformer;
import com.google.api.codegen.transformer.GapicInterfaceContext;
import com.google.api.codegen.transformer.GapicMethodContext;
import com.google.api.codegen.transformer.InitCodeTransformer;
import com.google.api.codegen.transformer.ModelToViewTransformer;
import com.google.api.codegen.transformer.ModelTypeTable;
import com.google.api.codegen.transformer.PackageMetadataNamer;
import com.google.api.codegen.transformer.PackageMetadataTransformer;
import com.google.api.codegen.transformer.TestCaseTransformer;
import com.google.api.codegen.util.py.PythonTypeTable;
import com.google.api.codegen.util.testing.PythonValueProducer;
import com.google.api.codegen.util.testing.ValueProducer;
import com.google.api.codegen.viewmodel.ApiMethodView;
import com.google.api.codegen.viewmodel.OptionalArrayMethodView;
import com.google.api.codegen.viewmodel.SimpleViewModel;
import com.google.api.codegen.viewmodel.ViewModel;
import com.google.api.tools.framework.model.Interface;
import com.google.api.tools.framework.model.Method;
import com.google.api.tools.framework.model.Model;
import com.google.api.tools.framework.snippet.Doc;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import java.io.File;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Responsible for producing package metadata related views for Python
*
* <p>TODO(geigerj): Once Python is on MVVM it will not be necessary to store copies of the other
* GAPIC providers in this class. The only information that is needed from these providers is the
* names of the output files they produce. With MVVM should be possible to derive this information
* from the corresponding transformers/view models without actually rendering the templates.
*/
public class PythonPackageMetadataTransformer implements ModelToViewTransformer {
private static final String TEST_PREFIX = "test.";
private static final String GITHUB_DOC_HOST =
"https://googlecloudplatform.github.io/google-cloud-python/stable";
private static final String GITHUB_REPO_HOST =
"https://github.com/GoogleCloudPlatform/google-cloud-python";
private static final String AUTH_DOC_PATH = "/google-cloud-auth";
private static final String LIB_DOC_PATH = "/%s-usage";
private static final String MAIN_README_PATH = "/blob/master/README.rst";
private final PackageMetadataConfig packageConfig;
private final PackageMetadataTransformer metadataTransformer = new PackageMetadataTransformer();
private final ValueProducer valueProducer = new PythonValueProducer();
private final TestCaseTransformer testCaseTransformer = new TestCaseTransformer(valueProducer);
private final List<GapicProvider<? extends Object>> gapicProviders;
private final PythonSurfaceNamer surfaceNamer;
private List<String> apiModules = null;
private List<String> typeModules = null;
public PythonPackageMetadataTransformer(
PackageMetadataConfig packageConfig, List<GapicProvider<? extends Object>> gapicProviders) {
this.packageConfig = packageConfig;
this.gapicProviders = gapicProviders;
this.surfaceNamer = new PythonSurfaceNamer(packageConfig.packageName(TargetLanguage.PYTHON));
}
@Override
public List<ViewModel> transform(final Model model, final GapicProductConfig productConfig) {
String version = packageConfig.apiVersion();
List<ViewModel> metadata =
computeInitFiles(computePackages(productConfig.getPackageName()), version);
PackageMetadataNamer namer = new PackageMetadataNamer();
for (String templateFileName : getTopLevelTemplateFileNames()) {
metadata.add(generateMetadataView(model, productConfig, templateFileName, namer));
}
return metadata;
}
@Override
public List<String> getTemplateFileNames() {
List<String> templates = new ArrayList<>();
templates.addAll(getTopLevelTemplateFileNames());
templates.addAll(getInitTemplateFileNames());
return templates;
}
public List<String> getTopLevelTemplateFileNames() {
return Lists.newArrayList(
"LICENSE.snip",
"py/MANIFEST.in.snip",
"py/PUBLISHING.rst.snip",
"py/setup.py.snip",
"py/requirements.txt.snip",
"py/README.rst.snip",
"py/tox.ini.snip",
"py/docs/apis.rst.snip",
"py/docs/conf.py.snip",
"py/docs/index.rst.snip",
"py/docs/starting.rst.snip");
}
public List<String> getInitTemplateFileNames() {
return Lists.newArrayList("py/__init__.py.snip", "py/namespace__init__.py.snip");
}
private ViewModel generateMetadataView(
Model model, GapicProductConfig productConfig, String template, PackageMetadataNamer namer) {
List<ApiMethodView> exampleMethods = generateExampleMethods(model, productConfig);
String noLeadingPyDir = template.startsWith("py/") ? template.substring(3) : template;
int extensionIndex = noLeadingPyDir.lastIndexOf(".");
String outputPath = noLeadingPyDir.substring(0, extensionIndex);
computeModules(gapicProviders);
return metadataTransformer
.generateMetadataView(packageConfig, model, template, outputPath, TargetLanguage.PYTHON)
.namespacePackages(
computeNamespacePackages(productConfig.getPackageName(), packageConfig.apiVersion()))
.developmentStatus(
surfaceNamer.getReleaseAnnotation(packageConfig.releaseLevel(TargetLanguage.PYTHON)))
.developmentStatusTitle(
namer.getReleaseAnnotation(packageConfig.releaseLevel(TargetLanguage.PYTHON)))
.apiModules(apiModules)
.typeModules(typeModules)
.exampleMethods(exampleMethods)
.targetLanguage("Python")
.mainReadmeLink(GITHUB_REPO_HOST + MAIN_README_PATH)
.libraryDocumentationLink(
GITHUB_DOC_HOST + String.format(LIB_DOC_PATH, packageConfig.shortName()))
.authDocumentationLink(GITHUB_DOC_HOST + AUTH_DOC_PATH)
.versioningDocumentationLink(GITHUB_REPO_HOST + MAIN_README_PATH)
.build();
}
// Generates methods used as examples for the README.md file.
// This currently generates a list of methods that have smoke test configuration. In the future,
// the example methods may be configured separately.
private List<ApiMethodView> generateExampleMethods(
Model model, GapicProductConfig productConfig) {
ImmutableList.Builder<ApiMethodView> exampleMethods = ImmutableList.builder();
for (Interface apiInterface : new InterfaceView().getElementIterable(model)) {
GapicInterfaceContext context = createContext(apiInterface, productConfig);
if (context.getInterfaceConfig().getSmokeTestConfig() != null) {
Method method = context.getInterfaceConfig().getSmokeTestConfig().getMethod();
FlatteningConfig flatteningGroup =
testCaseTransformer.getSmokeTestFlatteningGroup(
context.getMethodConfig(method), context.getInterfaceConfig().getSmokeTestConfig());
GapicMethodContext flattenedMethodContext =
context.asFlattenedMethodContext(method, flatteningGroup);
exampleMethods.add(createExampleApiMethodView(flattenedMethodContext));
}
}
return exampleMethods.build();
}
private OptionalArrayMethodView createExampleApiMethodView(GapicMethodContext context) {
DynamicLangApiMethodTransformer apiMethodTransformer =
new DynamicLangApiMethodTransformer(
new PythonApiMethodParamTransformer(),
new InitCodeTransformer(new PythonImportSectionTransformer()));
return apiMethodTransformer.generateMethod(context);
}
/** Determines the Python files generated in the main phase of generation. */
private void computeModules(List<GapicProvider<? extends Object>> gapicProviders) {
// Only run generation once.
if (apiModules != null && typeModules != null) {
return;
}
apiModules = new ArrayList<>();
typeModules = new ArrayList<>();
for (GapicProvider<? extends Object> provider : gapicProviders) {
Map<String, Doc> result = provider.generate();
for (String fileName : result.keySet()) {
if (!Files.getFileExtension(fileName).equals("py")) {
continue;
}
String moduleName =
fileName.substring(0, fileName.length() - ".py".length()).replace("/", ".");
if (moduleName.startsWith(TEST_PREFIX)) {
continue;
}
if (moduleName.endsWith(GapicContext.API_WRAPPER_SUFFIX.toLowerCase())) {
apiModules.add(moduleName);
} else {
typeModules.add(moduleName);
}
}
}
}
/**
* Computes all Python packages present under the given package name. For example, for input
* "foo.bar.baz", returns ["foo", "foo.bar", "foo.bar.baz"].
*/
private List<String> computePackages(String packageName) {
List<String> packages = new ArrayList<>();
List<String> parts = Lists.newArrayList(Splitter.on(".").split(packageName));
for (int i = 0; i < parts.size(); i++) {
packages.add(Joiner.on(".").join(parts.subList(0, i + 1)));
}
return packages;
}
private List<String> computeNamespacePackages(String packageName, final String apiVersion) {
List<String> namespacePackages = new ArrayList<>();
for (String subPackage : computePackages(packageName)) {
if (isNamespacePackage(subPackage, apiVersion)) {
namespacePackages.add(subPackage);
}
}
return namespacePackages;
}
/** Set all packages to be namespace packages except for the version package (if present) */
private boolean isNamespacePackage(String packageName, String apiVersion) {
int lastDot = packageName.lastIndexOf(".");
return lastDot < 0 || !packageName.substring(lastDot + 1).equals(apiVersion);
}
/**
* Determines which __init__.py files to generate given a list of Python packages. Each Python
* package corresponds to exactly one __init__.py file, although the contents of that file depend
* on whether the package is a namespace package.
*/
private List<ViewModel> computeInitFiles(List<String> packages, final String apiVersion) {
List<ViewModel> initFiles = new ArrayList<>();
for (String packageName : packages) {
final String template;
if (isNamespacePackage(packageName, apiVersion)) {
template = "py/namespace__init__.py.snip";
} else {
template = "py/__init__.py.snip";
}
String outputPath =
Paths.get(packageName.replace(".", File.separator)).resolve("__init__.py").toString();
initFiles.add(
SimpleViewModel.create(SnippetSetRunner.SNIPPET_RESOURCE_ROOT, template, outputPath));
}
return initFiles;
}
private GapicInterfaceContext createContext(
Interface apiInterface, GapicProductConfig productConfig) {
return GapicInterfaceContext.create(
apiInterface,
productConfig,
new ModelTypeTable(
new PythonTypeTable(productConfig.getPackageName()),
new PythonModelTypeNameConverter(productConfig.getPackageName())),
new PythonSurfaceNamer(productConfig.getPackageName()),
new DefaultFeatureConfig());
}
}
| |
package no.oddsor.simulator3;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class Task implements ITask{
String fulfilledNeed;
int fulfilledAmount;
Map<String, Integer> requiredItem;
Map<String, Integer> resultingItem;
public int startTime;
public int endTime;
String taskName;
int durationMinutes;
double remainingSeconds;
String performedAt;
String type;
String label;
private final Set<String> poseSet;
private final Set<String> precond;
private final Set<String> pos;
private final Set<String> neg;
private double cooldownMax;
private double cooldown;
public Task(String taskName, String type, int durationMinutes, String performedAt, String label) {
this.taskName = taskName;
this.label = label;
this.type = type;
this.durationMinutes = durationMinutes;
this.remainingSeconds = (double) (durationMinutes * 60);
this.startTime = -1;
this.endTime = -1;
this.performedAt = performedAt;
fulfilledNeed = null;
fulfilledAmount = -1;
resultingItem = new HashMap<>();
requiredItem = new HashMap<>();
this.poseSet = new HashSet<>();
this.precond = new HashSet<>();
this.pos = new HashSet<>();
this.neg = new HashSet<>();
this.cooldownMax = 0.0;
this.cooldown = 0.0;
}
public Task(String taskName, String type, int durationMinutes, String performedAt, String label, int cooldown) {
this(taskName, type, durationMinutes, performedAt, label);
this.setCooldown(cooldown*60.0);
}
@Override
public Set<String> getPrecond() {
return precond;
}
@Override
public Set<String> getPos() {
return pos;
}
@Override
public Set<String> getNeg() {
return neg;
}
public Task(String taskName, String type, int durationSeconds,
int startTime, int endTime, String performedAt, String label) {
this(taskName, type, durationSeconds, performedAt, label);
this.startTime = startTime;
this.endTime = endTime;
}
public void addResult(String need, int amount){
fulfilledNeed = need;
fulfilledAmount = amount;
}
public void addRequiredItem(String item, int amount){
requiredItem.put(item, amount);
}
public void addResultingItem(String item, int amount){
resultingItem.put(item, amount);
}
@Override
public Collection<Appliance> getViableAppliances(Collection<Appliance> allObjects){
Collection<Appliance> viableObjects = new ArrayList<>();
if(performedAt != null){
for (Appliance obj : allObjects) {
if (performedAt.equals(obj.type)) {
Iterator<String> it = requiredItem.keySet().iterator();
boolean fulfilled = true;
while(it.hasNext()){
if(!obj.hasItem(it.next())) fulfilled = false;
}
if(fulfilled) viableObjects.add(obj);
}
}
}else System.out.println("Task " + taskName + " cannot be performed anywhere?");
return viableObjects;
}
public boolean completable(Person p, SimulationMap map){
Iterator<String> it = requiredItem.keySet().iterator();
while(it.hasNext()){
String item = it.next();
if(!p.hasItem(item, requiredItem.get(item))){
return false;
}
}
return true;
}
@Override
public String fulfilledNeed() {
return fulfilledNeed;
}
@Override
public boolean available(double time) {
if((startTime < 0 || endTime < 0) && cooldown == 0.0) return true;
int newEndTime = (endTime + (24 - startTime)) % 24;
int offset = (Time.getHours(time) + (24 - startTime)) % 24;
return offset < newEndTime && cooldown == 0.0;
}
@Override
public boolean itemsExist(Person p, SimulationMap map) {
for(String item: requiredItem.keySet()){
if(!map.hasItem(item, requiredItem.get(item)) && !p.hasItem(item, requiredItem.get(item))) return false;
}
return true;
}
@Override
public Map<String, Integer> getRequiredItems() {
return requiredItem;
}
@Override
public double getDurationSeconds() {
return (double)(durationMinutes * 60);
}
@Override
public boolean personMeetsRequirements(Person person) {
return personHasItems(person) && personHasStates(person);
}
private boolean personHasStates(Person person){
Set<String> personStates = person.getState();
return personStates.containsAll(precond);
}
private boolean personHasItems(Person person){
for(String item: requiredItem.keySet()){
if(!person.hasItem(item, requiredItem.get(item))) return false;
}
return true;
}
@Override
public Set<String> getCreatedItems() {
Set<String> str = new HashSet<>();
for(String item: resultingItem.keySet()){
str.add(item);
}
return str;
}
@Override
public Set<String> getRequiredItemsSet() {
return requiredItem.keySet();
}
@Override
public Collection<String> getUsedAppliances() {
Collection<String> appliances = new ArrayList<>();
appliances.add(performedAt);
return appliances;
}
@Override
public void completeTask(Person p, SimulationMap map) {
for(String state: pos){
p.addState("+"+state);
}
for(String state: neg){
p.addState("-"+state);
}
if(fulfilledNeed != null){
List<Need> needs = p.getNeeds();
for(Need need: needs){
if(need.name().equals(fulfilledNeed)) need.increaseValue(fulfilledAmount);
}
}
if(!resultingItem.isEmpty()){
for(String item: resultingItem.keySet()){
for(int i = 0; i < resultingItem.get(item); i++){
map.addItem(new Item(item, map.getClosestNode(p.currentLocation())));
map.items.size();
}
}
}
}
@Override
public void consumeItem(Person p) {
for(String item: requiredItem.keySet()){
p.removeItem(item, requiredItem.get(item));
}
}
@Override
public String toString() {
return taskName; //To change body of generated methods, choose Tools | Templates.
}
public static void main(String[] args){
Task t2 = new Task("Hei", "jepp", 22, null, "hei");
System.out.println(t2.available(100));
Task t = new Task("Hei", "jepp", 22, 9, 23, null, "hei");
System.out.println(t.available(100));
double tim = 80000;
System.out.println(Time.getHours(tim));
System.out.println(t.available(tim));
}
@Override
public String name() {
return taskName;
}
public void addPose(String poseString) {
poseSet.add(poseString);
}
@Override
public Set<String> getPoses() {
return poseSet;
}
public void addNeededState(String poseString) {
precond.add(poseString);
}
public void addPlusState(String substring) {
pos.add(substring);
}
public void addMinusState(String substring) {
neg.add(substring);
}
@Override
public String label() {
return label;
}
@Override
public boolean itemExists(Person person, SimulationMap map) {
for(String item: requiredItem.keySet()){
if(map.hasItem(item, requiredItem.get(item)) && !person.hasItem(item, requiredItem.get(item))) return true;
}
return false;
}
@Override
public String getType() {
return type;
}
@Override
public void recentlyCompleted() {
this.cooldown = cooldownMax;
}
public void setCooldown(double seconds) {
this.cooldownMax = seconds;
}
@Override
public void passTime(double d) {
this.cooldown -= d;
if(cooldown < 0.0) cooldown = 0.0;
}
}
| |
package saltchannel.dev;
import java.util.List;
import java.util.Locale;
import saltchannel.ByteChannel;
import saltchannel.Tunnel;
import saltchannel.util.CryptoTestData;
import saltchannel.util.Hex;
import saltchannel.util.KeyPair;
import saltchannel.v2.SaltClientSession;
import saltchannel.v2.SaltServerSession;
/**
* Example session data; used as an appendix to the
* Salt Channel v2 specification.
* An executable class that outputs data needed to reproduce a
* simple Salt Channel session.
*
* @author Frans Lundberg
*/
public class ExampleSession4 {
private Tunnel tunnel;
private KeyPair clientSigKeyPair;
private KeyPair clientEncKeyPair;
private KeyPair serverSigKeyPair;
private KeyPair serverEncKeyPair;
private byte[] appRequest = new byte[]{0x01, 0x05, 0x05, 0x05, 0x05, 0x05};
private byte[] appResponse;
private LoggingByteChannel loggingByteChannel;
private byte[] sessionKey;
public static void main(String[] args) {
new ExampleSession4().go();
}
public ExampleSession4() {
tunnel = new Tunnel();
clientSigKeyPair = CryptoTestData.aSig;
clientEncKeyPair = CryptoTestData.aEnc;
serverSigKeyPair = CryptoTestData.bSig;
serverEncKeyPair = CryptoTestData.bEnc;
}
private void go() {
startServer();
runClient();
outputResult();
}
private void startServer() {
Thread thread = new Thread(new Runnable() {
public void run() {
runServer();
}
});
thread.setName(thread.getName() + "-" + this.getClass().getSimpleName() + "-server");
thread.start();
}
private void runServer() {
SaltServerSession session = new SaltServerSession(serverSigKeyPair, tunnel.channel2());
session.setEncKeyPair(serverEncKeyPair);
session.setBufferM2(true);
session.handshake();
ByteChannel appChannel = session.getChannel();
appChannel.write(true, appChannel.read()); // echo once, LastFlag is set
}
private void runClient() {
loggingByteChannel = new LoggingByteChannel(tunnel.channel1());
SaltClientSession session = new SaltClientSession(clientSigKeyPair, loggingByteChannel);
session.setEncKeyPair(clientEncKeyPair);
session.setWantedServer(serverSigKeyPair.pub());
session.setBufferM4(true);
session.handshake();
ByteChannel appChannel = session.getChannel();
appChannel.write(false, appRequest);
appResponse = appChannel.read();
this.sessionKey = session.getSessionKey();
}
public void outputResult() {
boolean includeTime = false;
StringBuilder b = new StringBuilder();
List<LoggingByteChannel.Entry> entries = loggingByteChannel.getLog();
b.append("======== " + this.getClass().getSimpleName() + " ========\n");
b.append("\n");
b.append("Example session data for Salt Channel v2.\n");
b.append("\n");
b.append("1. Handshake with server sig key included.\n");
b.append("2. Client sends: 010505050505 in AppPacket and server echos the same data back.\n");
b.append("\n");
b.append("Time fields are used. Each peer sends 1 in the first messages, then 2, 3, ...\n");
b.append("The lastFlag is used by Server in the last message it sends.\n");
b.append("0x01 means ECHO command, and 0x03 means CLOSE command.\n");
b.append("\n");
b.append("---- key pairs, secret key first ----\n");
b.append("\n");
b.append(keyPairString());
b.append("\n");
b.append("--- Log entries ----\n");
b.append("\n");
long t0 = entries.get(0).time;
for (int i = 0; i < entries.size(); i++) {
LoggingByteChannel.Entry entry = entries.get(i);
long us = (entry.time - t0) / 1000;
String formattedTime = String.format(Locale.US, "%06d", us);
String sizeString = String.format(Locale.US, "%3d", entry.bytes.length);
String sizeAndArrowString = entry.type == LoggingByteChannel.ReadOrWrite.READ ?
"<-- " + sizeString
: sizeString + " -->";
if (includeTime) {
b.append(formattedTime + " ");
}
b.append(sizeAndArrowString + " " + entry.type.name() + "\n");
b.append(" " + Hex.create(entry.bytes) + "\n");
}
b.append("\n");
b.append("---- Other ----\n");
b.append("\n");
b.append("session key: " + Hex.create(sessionKey) + "\n");
b.append("app request: " + Hex.create(appRequest) + "\n");
b.append("app response: " + Hex.create(appResponse) + "\n");
b.append(totalsString());
b.append("\n");
System.out.println(b.toString());
}
private String keyPairString() {
StringBuffer b = new StringBuffer();
b.append("client signature key pair:\n");
b.append(" " + Hex.create(clientSigKeyPair.sec()) + "\n");
b.append(" " + Hex.create(clientSigKeyPair.pub()) + "\n");
b.append("client encryption key pair:\n");
b.append(" " + Hex.create(clientEncKeyPair.sec()) + "\n");
b.append(" " + Hex.create(clientEncKeyPair.pub()) + "\n");
b.append("server signature key pair:\n");
b.append(" " + Hex.create(serverSigKeyPair.sec()) + "\n");
b.append(" " + Hex.create(serverSigKeyPair.pub()) + "\n");
b.append("server encryption key pair:\n");
b.append(" " + Hex.create(serverEncKeyPair.sec()) + "\n");
b.append(" " + Hex.create(serverEncKeyPair.pub()) + "\n");
return b.toString();
}
private String totalsString() {
List<LoggingByteChannel.Entry> entries = loggingByteChannel.getLog();
StringBuffer b = new StringBuffer();
int total = 0;
int totalInHandshake = 0;
for (int i = 0; i < entries.size(); i++) {
LoggingByteChannel.Entry entry = entries.get(i);
if (i < 4) {
totalInHandshake += entry.bytes.length;
}
total += entry.bytes.length;
}
b.append("total bytes: " + total + "\n");
b.append("total bytes, handshake only: " + totalInHandshake + "\n");
b.append("\n");
return b.toString();
}
}
/*
OUTPUT 2017-11-01
======== ExampleSession4 ========
Example session data for Salt Channel v2.
1. Handshake with server sig key included.
2. Client sends: 010505050505 in AppPacket and server echos the same data back.
Time fields are not used.
The lastFlag is used by Server in the last message it sends.
---- key pairs, secret key first ----
client signature key pair:
55f4d1d198093c84de9ee9a6299e0f6891c2e1d0b369efb592a9e3f169fb0f795529ce8ccf68c0b8ac19d437ab0f5b32723782608e93c6264f184ba152c2357b
5529ce8ccf68c0b8ac19d437ab0f5b32723782608e93c6264f184ba152c2357b
client encryption key pair:
77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a
8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a
server signature key pair:
7a772fa9014b423300076a2ff646463952f141e2aa8d98263c690c0d72eed52d07e28d4ee32bfdc4b07d41c92193c0c25ee6b3094c6296f373413b373d36168b
07e28d4ee32bfdc4b07d41c92193c0c25ee6b3094c6296f373413b373d36168b
server encryption key pair:
5dab087e624a8a4b79e17f8b83800ee66f3bb1292618b6fd1c2f8b27ff88e0eb
de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f
--- Log entries ----
74 --> WRITE
534376320101000000008520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a07e28d4ee32bfdc4b07d41c92193c0c25ee6b3094c6296f373413b373d36168b
<-- 38 READ
020000000000de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f
<-- 120 READ
06000dfa318c6337d600252260503124352ec6cddb69b86e299a47a9b1f1c18666e5cf8b000742bad609bfd9bf2ef2798743ee092b07eb3207d89eb0ec2da1f0c21e5c744a12757e6c0e71c752d67cc866257ef47f5d80bf9517203d2326737f1355fafd73d50b01c50a306b09cebed4c68d0a7cd6938a2a
120 --> WRITE
060002bc1cc5f1f04c93319e47602d442ec1b32ffd053d58a54bdcc8eef60a47d0bf53057418b6054eb260cca4d827c068edff9efb48f0ebfd3ad7a2b6718d119bb64dbc149d002100f372763a43f1e81ed9d557f9958240d627ae0b78c89fd87a7e1d49800e9fa05452cb142cbf4b39635bf19b2f91ba7a
30 --> WRITE_WITH_PREVIOUS
06005089769da0def9f37289f9e5ff6e78710b9747d8a0971591abf2e4fb
<-- 30 READ
068082eb9d3660b82984f3c1c1051f8751ab5585b7d0ad354d9b5c56f755
---- Other ----
session key: 1b27556473e985d462cd51197a9a46c76009549eac6474f206c4ee0844f68389
app request: 010505050505
app response: 010505050505
total bytes: 412
total bytes, handshake only: 352
*/
| |
// This file was generated by Mendix Business Modeler.
//
// WARNING: Code you write here will be lost the next time you deploy the project.
package system.proxies;
/**
*
*/
public class ScheduledEventInformation
{
private final com.mendix.systemwideinterfaces.core.IMendixObject scheduledEventInformationMendixObject;
private final com.mendix.systemwideinterfaces.core.IContext context;
/**
* Internal name of this entity
*/
public static final java.lang.String entityName = "System.ScheduledEventInformation";
/**
* Enum describing members of this entity
*/
public enum MemberNames
{
Name("Name"),
Description("Description"),
StartTime("StartTime"),
EndTime("EndTime"),
Status("Status");
private java.lang.String metaName;
MemberNames(java.lang.String s)
{
metaName = s;
}
@Override
public java.lang.String toString()
{
return metaName;
}
}
public ScheduledEventInformation(com.mendix.systemwideinterfaces.core.IContext context)
{
this(context, com.mendix.core.Core.instantiate(context, "System.ScheduledEventInformation"));
}
protected ScheduledEventInformation(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject scheduledEventInformationMendixObject)
{
if (scheduledEventInformationMendixObject == null)
throw new java.lang.IllegalArgumentException("The given object cannot be null.");
if (!com.mendix.core.Core.isSubClassOf("System.ScheduledEventInformation", scheduledEventInformationMendixObject.getType()))
throw new java.lang.IllegalArgumentException("The given object is not a System.ScheduledEventInformation");
this.scheduledEventInformationMendixObject = scheduledEventInformationMendixObject;
this.context = context;
}
/**
* @deprecated Use 'ScheduledEventInformation.load(IContext, IMendixIdentifier)' instead.
*/
@Deprecated
public static system.proxies.ScheduledEventInformation initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
return system.proxies.ScheduledEventInformation.load(context, mendixIdentifier);
}
/**
* Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called.
* The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.getSudoContext() can be used to obtain sudo access).
*/
public static system.proxies.ScheduledEventInformation initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject mendixObject)
{
return new system.proxies.ScheduledEventInformation(context, mendixObject);
}
public static system.proxies.ScheduledEventInformation load(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
com.mendix.systemwideinterfaces.core.IMendixObject mendixObject = com.mendix.core.Core.retrieveId(context, mendixIdentifier);
return system.proxies.ScheduledEventInformation.initialize(context, mendixObject);
}
public static java.util.List<system.proxies.ScheduledEventInformation> load(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String xpathConstraint) throws com.mendix.core.CoreException
{
java.util.List<system.proxies.ScheduledEventInformation> result = new java.util.ArrayList<system.proxies.ScheduledEventInformation>();
for (com.mendix.systemwideinterfaces.core.IMendixObject obj : com.mendix.core.Core.retrieveXPathQuery(context, "//System.ScheduledEventInformation" + xpathConstraint))
result.add(system.proxies.ScheduledEventInformation.initialize(context, obj));
return result;
}
/**
* Commit the changes made on this proxy object.
*/
public final void commit() throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Commit the changes made on this proxy object using the specified context.
*/
public final void commit(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Delete the object.
*/
public final void delete()
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* Delete the object using the specified context.
*/
public final void delete(com.mendix.systemwideinterfaces.core.IContext context)
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* @return value of Name
*/
public final String getName()
{
return getName(getContext());
}
/**
* @param context
* @return value of Name
*/
public final String getName(com.mendix.systemwideinterfaces.core.IContext context)
{
return (String) getMendixObject().getValue(context, MemberNames.Name.toString());
}
/**
* Set value of Name
* @param name
*/
public final void setName(String name)
{
setName(getContext(), name);
}
/**
* Set value of Name
* @param context
* @param name
*/
public final void setName(com.mendix.systemwideinterfaces.core.IContext context, String name)
{
getMendixObject().setValue(context, MemberNames.Name.toString(), name);
}
/**
* @return value of Description
*/
public final String getDescription()
{
return getDescription(getContext());
}
/**
* @param context
* @return value of Description
*/
public final String getDescription(com.mendix.systemwideinterfaces.core.IContext context)
{
return (String) getMendixObject().getValue(context, MemberNames.Description.toString());
}
/**
* Set value of Description
* @param description
*/
public final void setDescription(String description)
{
setDescription(getContext(), description);
}
/**
* Set value of Description
* @param context
* @param description
*/
public final void setDescription(com.mendix.systemwideinterfaces.core.IContext context, String description)
{
getMendixObject().setValue(context, MemberNames.Description.toString(), description);
}
/**
* @return value of StartTime
*/
public final java.util.Date getStartTime()
{
return getStartTime(getContext());
}
/**
* @param context
* @return value of StartTime
*/
public final java.util.Date getStartTime(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.util.Date) getMendixObject().getValue(context, MemberNames.StartTime.toString());
}
/**
* Set value of StartTime
* @param starttime
*/
public final void setStartTime(java.util.Date starttime)
{
setStartTime(getContext(), starttime);
}
/**
* Set value of StartTime
* @param context
* @param starttime
*/
public final void setStartTime(com.mendix.systemwideinterfaces.core.IContext context, java.util.Date starttime)
{
getMendixObject().setValue(context, MemberNames.StartTime.toString(), starttime);
}
/**
* @return value of EndTime
*/
public final java.util.Date getEndTime()
{
return getEndTime(getContext());
}
/**
* @param context
* @return value of EndTime
*/
public final java.util.Date getEndTime(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.util.Date) getMendixObject().getValue(context, MemberNames.EndTime.toString());
}
/**
* Set value of EndTime
* @param endtime
*/
public final void setEndTime(java.util.Date endtime)
{
setEndTime(getContext(), endtime);
}
/**
* Set value of EndTime
* @param context
* @param endtime
*/
public final void setEndTime(com.mendix.systemwideinterfaces.core.IContext context, java.util.Date endtime)
{
getMendixObject().setValue(context, MemberNames.EndTime.toString(), endtime);
}
/**
* Set value of Status
* @param status
*/
public final system.proxies.EventStatus getStatus()
{
return getStatus(getContext());
}
/**
* @param context
* @return value of Status
*/
public final system.proxies.EventStatus getStatus(com.mendix.systemwideinterfaces.core.IContext context)
{
Object obj = getMendixObject().getValue(context, MemberNames.Status.toString());
if (obj == null)
return null;
return system.proxies.EventStatus.valueOf((java.lang.String) obj);
}
/**
* Set value of Status
* @param status
*/
public final void setStatus(system.proxies.EventStatus status)
{
setStatus(getContext(), status);
}
/**
* Set value of Status
* @param context
* @param status
*/
public final void setStatus(com.mendix.systemwideinterfaces.core.IContext context, system.proxies.EventStatus status)
{
if (status != null)
getMendixObject().setValue(context, MemberNames.Status.toString(), status.toString());
else
getMendixObject().setValue(context, MemberNames.Status.toString(), null);
}
/**
* @return the IMendixObject instance of this proxy for use in the Core interface.
*/
public final com.mendix.systemwideinterfaces.core.IMendixObject getMendixObject()
{
return scheduledEventInformationMendixObject;
}
/**
* @return the IContext instance of this proxy, or null if no IContext instance was specified at initialization.
*/
public final com.mendix.systemwideinterfaces.core.IContext getContext()
{
return context;
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (obj != null && getClass().equals(obj.getClass()))
{
final system.proxies.ScheduledEventInformation that = (system.proxies.ScheduledEventInformation) obj;
return getMendixObject().equals(that.getMendixObject());
}
return false;
}
@Override
public int hashCode()
{
return getMendixObject().hashCode();
}
/**
* @return String name of this class
*/
public static java.lang.String getType()
{
return "System.ScheduledEventInformation";
}
/**
* @return String GUID from this object, format: ID_0000000000
* @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object.
*/
@Deprecated
public java.lang.String getGUID()
{
return "ID_" + getMendixObject().getId().toLong();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
// Generated from protobuf
package org.apache.drill.exec.proto.beans;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.List;
import com.dyuproject.protostuff.GraphIOUtil;
import com.dyuproject.protostuff.Input;
import com.dyuproject.protostuff.Message;
import com.dyuproject.protostuff.Output;
import com.dyuproject.protostuff.Schema;
public final class SerializedField implements Externalizable, Message<SerializedField>, Schema<SerializedField>
{
public static Schema<SerializedField> getSchema()
{
return DEFAULT_INSTANCE;
}
public static SerializedField getDefaultInstance()
{
return DEFAULT_INSTANCE;
}
static final SerializedField DEFAULT_INSTANCE = new SerializedField();
private org.apache.drill.common.types.MajorType majorType;
private NamePart namePart;
private List<SerializedField> child;
private int valueCount;
private int varByteLength;
private int bufferLength;
public SerializedField()
{
}
// getters and setters
// majorType
public org.apache.drill.common.types.MajorType getMajorType()
{
return majorType;
}
public SerializedField setMajorType(org.apache.drill.common.types.MajorType majorType)
{
this.majorType = majorType;
return this;
}
// namePart
public NamePart getNamePart()
{
return namePart;
}
public SerializedField setNamePart(NamePart namePart)
{
this.namePart = namePart;
return this;
}
// child
public List<SerializedField> getChildList()
{
return child;
}
public SerializedField setChildList(List<SerializedField> child)
{
this.child = child;
return this;
}
// valueCount
public int getValueCount()
{
return valueCount;
}
public SerializedField setValueCount(int valueCount)
{
this.valueCount = valueCount;
return this;
}
// varByteLength
public int getVarByteLength()
{
return varByteLength;
}
public SerializedField setVarByteLength(int varByteLength)
{
this.varByteLength = varByteLength;
return this;
}
// bufferLength
public int getBufferLength()
{
return bufferLength;
}
public SerializedField setBufferLength(int bufferLength)
{
this.bufferLength = bufferLength;
return this;
}
// java serialization
public void readExternal(ObjectInput in) throws IOException
{
GraphIOUtil.mergeDelimitedFrom(in, this, this);
}
public void writeExternal(ObjectOutput out) throws IOException
{
GraphIOUtil.writeDelimitedTo(out, this, this);
}
// message method
public Schema<SerializedField> cachedSchema()
{
return DEFAULT_INSTANCE;
}
// schema methods
public SerializedField newMessage()
{
return new SerializedField();
}
public Class<SerializedField> typeClass()
{
return SerializedField.class;
}
public String messageName()
{
return SerializedField.class.getSimpleName();
}
public String messageFullName()
{
return SerializedField.class.getName();
}
public boolean isInitialized(SerializedField message)
{
return true;
}
public void mergeFrom(Input input, SerializedField message) throws IOException
{
for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
{
switch(number)
{
case 0:
return;
case 1:
message.majorType = input.mergeObject(message.majorType, org.apache.drill.common.types.MajorType.getSchema());
break;
case 2:
message.namePart = input.mergeObject(message.namePart, NamePart.getSchema());
break;
case 3:
if(message.child == null)
message.child = new ArrayList<SerializedField>();
message.child.add(input.mergeObject(null, SerializedField.getSchema()));
break;
case 4:
message.valueCount = input.readInt32();
break;
case 5:
message.varByteLength = input.readInt32();
break;
case 7:
message.bufferLength = input.readInt32();
break;
default:
input.handleUnknownField(number, this);
}
}
}
public void writeTo(Output output, SerializedField message) throws IOException
{
if(message.majorType != null)
output.writeObject(1, message.majorType, org.apache.drill.common.types.MajorType.getSchema(), false);
if(message.namePart != null)
output.writeObject(2, message.namePart, NamePart.getSchema(), false);
if(message.child != null)
{
for(SerializedField child : message.child)
{
if(child != null)
output.writeObject(3, child, SerializedField.getSchema(), true);
}
}
if(message.valueCount != 0)
output.writeInt32(4, message.valueCount, false);
if(message.varByteLength != 0)
output.writeInt32(5, message.varByteLength, false);
if(message.bufferLength != 0)
output.writeInt32(7, message.bufferLength, false);
}
public String getFieldName(int number)
{
switch(number)
{
case 1: return "majorType";
case 2: return "namePart";
case 3: return "child";
case 4: return "valueCount";
case 5: return "varByteLength";
case 7: return "bufferLength";
default: return null;
}
}
public int getFieldNumber(String name)
{
final Integer number = __fieldMap.get(name);
return number == null ? 0 : number.intValue();
}
private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
static
{
__fieldMap.put("majorType", 1);
__fieldMap.put("namePart", 2);
__fieldMap.put("child", 3);
__fieldMap.put("valueCount", 4);
__fieldMap.put("varByteLength", 5);
__fieldMap.put("bufferLength", 7);
}
}
| |
/**************************************************************************
Copyright (c) 2011:
Istituto Nazionale di Fisica Nucleare (INFN), Italy
Consorzio COMETA (COMETA), Italy
See http://www.infn.it and and http://www.consorzio-cometa.it for details on
the copyright holders.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author <a href="mailto:riccardo.bruno@ct.infn.it">Riccardo Bruno</a>(INFN)
****************************************************************************/
package it.infn.ct;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.log4j.Logger;
/**
* APIServerDaemon interface for TOSCA DB interface.
*
* @author <a href="mailto:riccardo.bruno@ct.infn.it">Riccardo Bruno</a>(INFN)
*/
public class ToscaIDCInterfaceDB {
/*
* Logger
*/
/**
* Logger object.
*/
private static final Logger LOG =
Logger.getLogger(ToscaIDCInterfaceDB.class.getName());
/**
* Line separator constant.
*/
public static final String LS = System.getProperty("line.separator");
/**
* APIServerDaemon database connection URL.
*/
private String connectionURL = null;
/*
* DB variables
*/
/**
* MySQL interface conneciton class.
*/
private Connection connect = null;
/**
* MySQL interface statemen class.
*/
private Statement statement = null;
/**
* MySQL interface preparedStatement class.
*/
private PreparedStatement preparedStatement = null;
/**
* MySQL interface resultSet class.
*/
private ResultSet resultSet = null;
/*
* APIServerDaemon database
*/
/**
* APIServerDaemon database host.
*/
private String asdbHost;
/**
* GridEngine UsersTrackingDB database port number.
*/
private String asdbPort;
/**
* GridEngine UsersTrackingDB database user name.
*/
private String asdbUser;
/**
* GridEngine UsersTrackingDB database password.
*/
private String asdbPass;
/**
* GridEngine UsersTrackingDB database name.
*/
private String asdbName;
/**
* Empty constructor for ToscaIDCInterfaceDB.
*/
public ToscaIDCInterfaceDB() {
LOG.debug("Initializing ToscaIDCInterfaceDB");
}
/**
* Constructor that uses directly the JDBC connection URL.
*
* @param connURL - jdbc connection URL containing:
* dbhost, dbport, dbuser, dbpass
* and dbname in a single line
*/
public ToscaIDCInterfaceDB(final String connURL) {
this();
LOG.debug("ToscaIDCInterfaceDB connection URL:"
+ LS + connURL);
this.connectionURL = connURL;
}
/**
* Initializing ToscaIDCInterfaceDB database database connection settings.
*
* @param host - APIServerDaemon database hostname
* @param port - APIServerDaemon database listening port
* @param user - APIServerDaemon database user name
* @param pass - APIServerDaemon database user password
* @param name - APIServerDaemon database name
*/
public ToscaIDCInterfaceDB(final String host,
final String port,
final String user,
final String pass,
final String name) {
this();
this.asdbHost = host;
this.asdbPort = port;
this.asdbUser = user;
this.asdbPass = pass;
this.asdbName = name;
prepareConnectionURL();
}
/**
* Close all db opened elements: resultset,statement,cursor,connection
*
* public void close() { closeSQLActivity();
*
* try { if (connect != null) { connect.close(); connect = null; } } catch
* (Exception e) { _log.fatal("Unable to close DB: '"
* + this.connectionURL +
* "'"); _log.fatal(e.toString()); }
*
* _log.info("Closed DB: '" + this.connectionURL + "'"); }
*/
/**
* Close all db opened elements except the connection.
*/
public final void closeSQLActivity() {
try {
if (resultSet != null) {
LOG.debug("closing resultSet");
resultSet.close();
resultSet = null;
}
if (statement != null) {
LOG.debug("closing statement");
statement.close();
statement = null;
}
if (preparedStatement != null) {
LOG.debug("closing preparedStatement");
preparedStatement.close();
preparedStatement = null;
}
if (connect != null) {
LOG.debug("closing connect");
connect.close();
connect = null;
}
} catch (SQLException e) {
LOG.fatal("Unable to close SQLActivities "
+ "(resultSet, statement, preparedStatement, connect)");
LOG.fatal(e.toString());
}
}
/**
* Connect to the GridEngineDaemon database.
*
* @return connect object
*/
private boolean connect() {
try {
Class.forName("com.mysql.jdbc.Driver");
connect = DriverManager.getConnection(this.connectionURL);
} catch (Exception e) {
LOG.fatal("Unable to connect DB: '" + this.connectionURL + "'");
LOG.fatal(e.toString());
}
LOG.debug("Connected to DB: '" + this.connectionURL + "'");
return (connect != null);
}
/**
* Prepare a connectionURL from detailed conneciton settings.
*/
private void prepareConnectionURL() {
this.connectionURL = "jdbc:mysql://" + asdbHost
+ ":" + asdbPort
+ "/" + asdbName
+ "?user=" + asdbUser
+ "&password=" + asdbPass;
LOG.debug("ToscaIDCInterfaceDB connectionURL: '"
+ this.connectionURL + "'");
}
/**
* Register the tId of the given toscaCommand.
* @param toscaCommand - Queue command
* @param toscaId - TOSCA UUID
* @param toscaEndPoint - TOSCA orchestrator endpoint
* @param status - Status of TOSCA deployment process
* @return Tosca executor interface table record id
*/
public final int registerToscaId(
final APIServerDaemonCommand toscaCommand,
final String toscaId,
final String toscaEndPoint,
final String status) {
int tId = 0;
if (!connect()) {
LOG.fatal("Not connected to database");
return tId;
}
try {
String sql;
// Lock ge_queue table first
sql = "lock tables tosca_idc write, tosca_idc as st read;";
statement = connect.createStatement();
statement.execute(sql);
// Insert new entry for tosca_idc
sql = "insert into tosca_idc (id,task_id," + LS
+ " tosca_id," + LS
+ " tosca_endpoint," + LS
+ " tosca_status," + LS
+ " creation," + LS
+ " last_change)" + LS
+ "select (select if(max(id)>0,max(id)+1,1)" + LS
+ "from tosca_idc st),?,?,?,?,now(),now();";
int paramNum = 1;
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setInt(paramNum++, toscaCommand.getTaskId());
preparedStatement.setString(paramNum++, toscaId);
preparedStatement.setString(paramNum++, toscaEndPoint);
preparedStatement.setString(paramNum++, status);
preparedStatement.execute();
// Get the new Id
sql = "select id from tosca_idc where tosca_id = ?;";
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setString(1, toscaId);
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
tId = resultSet.getInt("id");
}
// Unlock tables
sql = "unlock tables;";
statement.execute(sql);
} catch (SQLException e) {
LOG.fatal(e.toString());
} finally {
closeSQLActivity();
}
return tId;
}
/**
* update the toscaId value into an existing tosca_idc record.
*
* @param toscaIDCId - The id record index in tosca_idc table
* @param toscaUUID - tosca submission UUID field
*/
public final void updateToscaId(final int toscaIDCId,
final String toscaUUID) {
if (!connect()) {
LOG.fatal("Not connected to database");
return;
}
try {
String sql;
// Lock ge_queue table first
sql = "lock tables tosca_idc write;";
statement = connect.createStatement();
statement.execute(sql);
// Insert new entry for simple tosca
sql = "update tosca_idc set tosca_id=?," + LS
+ " tosca_status='SUBMITTED'," + LS
+ " creation=now()," + LS
+ " last_change=now()" + LS
+ "where id=?;";
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setString(1, toscaUUID);
preparedStatement.setInt(2, toscaIDCId);
preparedStatement.execute();
sql = "unlock tables;";
statement.execute(sql);
} catch (SQLException e) {
LOG.fatal(e.toString());
} finally {
closeSQLActivity();
}
}
/**
* update the tosca status value into an existing tosca_idc record.
*
* @param simpleToscaId - record index in tosca_idc table
* @param toscaStatus - tosca submission status
*/
public final void updateToscaStatus(final int simpleToscaId,
final String toscaStatus) {
if (!connect()) {
LOG.fatal("Not connected to database");
return;
}
try {
String sql;
// Lock ge_queue table first
sql = "lock tables tosca_idc write;";
statement = connect.createStatement();
statement.execute(sql);
// Insert new entry for simple tosca
sql = "update tosca_idc set tosca_status=?," + LS
+ " last_change=now() where id=?;";
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setString(1, toscaStatus);
preparedStatement.setInt(2, simpleToscaId);
preparedStatement.execute();
sql = "unlock tables;";
statement.execute(sql);
} catch (SQLException e) {
LOG.fatal(e.toString());
} finally {
closeSQLActivity();
}
}
/**
* Return object' connection URL.
*
* @return ToscaIDCInterface database connection URL
*/
public final String getConnectionURL() {
return this.connectionURL;
}
/**
* Get toscaId.
* Return the TOSCA UUID related to the given task_id. Since more
* task ids may exists on the tosca_idc table, it will be returned
* the one related to the last inserted record.
*
* @param toscaCommand - Queue command
* @return toscaid
*/
public final String getToscaId(final APIServerDaemonCommand toscaCommand) {
String toscaId = "";
if (!connect()) {
LOG.fatal("Not connected to database");
return toscaId;
}
try {
String sql;
sql = "select tosca_id" + LS
+ "from tosca_idc" + LS
+ "where task_id = ?" + LS
+ "order by id desc limit 1;";
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setInt(1, toscaCommand.getTaskId());
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
toscaId = resultSet.getString("tosca_id");
}
} catch (SQLException e) {
LOG.fatal(e.toString());
} finally {
closeSQLActivity();
}
return toscaId;
}
/**
* Get toscaEndPoint.
* Return the TOSCA URL endpoint related to the given task_id. Since
* mode task ids may exists on the tosca_idc table, it will be returned
* the one related to the last inserted record.
*
* @param toscaCommand - Queue command
* @return TOSCA identifier
*/
public final String toscaEndPoint(
final APIServerDaemonCommand toscaCommand) {
String toscaEndPoint = "";
if (!connect()) {
LOG.fatal("Not connected to database");
return toscaEndPoint;
}
try {
String sql;
sql = "select tosca_endpoint" + LS
+ "from tosca_idc" + LS
+ "where task_id = ?" + LS
+ "order by id desc limit 1;";
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setInt(1, toscaCommand.getTaskId());
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
toscaEndPoint = resultSet.getString("tosca_endpoint");
}
} catch (SQLException e) {
LOG.fatal(e.toString());
} finally {
closeSQLActivity();
}
return toscaEndPoint;
}
/**
* Retrieve session token from the given command looking up to.
*
* @param toscaCommand - Queue command
* @return command session token
*/
final String getToken(final APIServerDaemonCommand toscaCommand) {
String token = "";
String subject = "";
if (!connect()) {
LOG.fatal("Not connected to database");
return token;
}
try {
String sql;
sql = "select tk.token" + LS
+ " ,tk.subject" + LS
+ "from as_queue aq," + LS
+ " task t," + LS
+ " fg_token tk" + LS
+ "where aq.task_id=t.id" + LS
+ " and tk.user_id = (select id" + LS
+ " from fg_user u" + LS
+ " where u.name=t.user)" + LS
+ " and aq.task_id=?" + LS
+ "order by tk.creation desc limit 1;";
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setInt(1, toscaCommand.getTaskId());
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
token = resultSet.getString("tk.token");
subject = resultSet.getString("tk.subject");
}
} catch (SQLException e) {
LOG.fatal(e.toString());
} finally {
closeSQLActivity();
}
return token + "," + subject;
}
/**
* Retrieve the task_id associated to the given UUID.
* @param uuid - The TOSCA UUID identifier
* @return The task_id associated to the given UUID
*/
public final int getTaskIdByUUID(final String uuid) {
int taskId = 0;
if (!connect()) {
LOG.fatal("Not connected to database");
return taskId;
}
try {
String sql;
sql = "select task_id" + LS
+ "from tosca_idc" + LS
+ "where tosca_id=?;";
preparedStatement = connect.prepareStatement(sql);
preparedStatement.setString(1, uuid);
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
taskId = resultSet.getInt("task_id");
}
} catch (SQLException e) {
LOG.fatal(e.toString());
} finally {
closeSQLActivity();
}
return taskId;
}
}
| |
package li.strolch.search;
import static li.strolch.model.StrolchModelConstants.*;
import java.util.function.Function;
import java.util.function.Supplier;
import li.strolch.model.*;
import li.strolch.model.activity.Activity;
import li.strolch.model.parameter.Parameter;
import li.strolch.model.parameter.StringParameter;
import li.strolch.persistence.api.StrolchTransaction;
import li.strolch.utils.iso8601.ISO8601FormatFactory;
/**
* Implements search expressions to be statically imported when writing searches
*/
public class ExpressionsSupport {
public static <T extends StrolchRootElement> SearchExpression<T> not(SearchExpression<T> expression) {
return element -> !expression.matches(element);
}
public static <T extends StrolchRootElement> SearchExpression<T> predicate(Supplier<Boolean> predicate) {
return element -> predicate.get();
}
public static <T extends StrolchRootElement> SearchExpression<T> predicate(Function<T, Boolean> predicate) {
return predicate::apply;
}
public static ExpressionBuilder mapResource(Function<Resource, Object> extractor) {
return t -> extractor.apply((Resource) t);
}
public static ExpressionBuilder mapOrder(Function<Order, Object> extractor) {
return t -> extractor.apply((Order) t);
}
public static ExpressionBuilder mapActivity(Function<Activity, Object> extractor) {
return t -> extractor.apply((Activity) t);
}
public static <T extends StrolchRootElement> SearchExpression<T> id(SearchPredicate predicate) {
return element -> predicate.matches(element.getId());
}
public static ExpressionBuilder id() {
return StrolchElement::getId;
}
public static <T extends StrolchRootElement> SearchExpression<T> name(SearchPredicate predicate) {
return element -> predicate.matches(element.getName());
}
public static <T extends StrolchRootElement> ExpressionBuilder name() {
return element -> element.getName();
}
public static <T extends StrolchRootElement> SearchExpression<T> date(SearchPredicate predicate) {
ExpressionBuilder eb = date();
return element -> predicate.coerce(eb.getValueCoercer(element)).matches(eb);
}
public static <T extends StrolchRootElement> ExpressionBuilder date() {
return new ExpressionBuilder() {
@Override
public ValueCoercer getValueCoercer(StrolchRootElement context) {
return e -> {
if (!(e instanceof String))
return e;
return ISO8601FormatFactory.getInstance().parseDate((String) e);
};
}
@Override
public Object extract(StrolchRootElement element) {
return ((Order) element).getDate();
}
};
}
public static <T extends StrolchRootElement> SearchExpression<T> state(SearchPredicate predicate) {
ExpressionBuilder eb = state();
return element -> predicate.coerce(eb.getValueCoercer(element)).matches(eb.extract(element));
}
public static ExpressionBuilder state() {
return new ExpressionBuilder() {
@Override
public ValueCoercer getValueCoercer(StrolchRootElement context) {
return e -> {
if (!(e instanceof String))
return e;
return State.parse((String) e);
};
}
@Override
public Object extract(StrolchRootElement element) {
if (element instanceof Order)
return ((Order) element).getState();
if (element instanceof Activity)
return ((Activity) element).getState();
throw new IllegalArgumentException(element.getObjectType() + " does not have a state!");
}
};
}
public static <T extends StrolchRootElement> SearchExpression<T> param(String bagId, String paramId,
SearchPredicate predicate) {
ExpressionBuilder eb = param(bagId, paramId);
return element -> predicate.coerce(eb.getValueCoercer(element)).matches(eb.extract(element));
}
public static <T extends StrolchRootElement> ExpressionBuilder param(String paramId) {
return param(BAG_PARAMETERS, paramId);
}
public static <T extends StrolchRootElement> ExpressionBuilder relationParam(String paramId) {
return param(BAG_RELATIONS, paramId);
}
public static <T extends StrolchRootElement> ExpressionBuilder param(String bagId, String paramId) {
return new ExpressionBuilder() {
@Override
public ValueCoercer getValueCoercer(StrolchRootElement context) {
return e -> {
if (!(e instanceof String))
return e;
return getParamValue(e, context, bagId, paramId);
};
}
@Override
public Object extract(StrolchRootElement element) {
ParameterBag bag = element.getParameterBag(bagId);
if (bag == null)
return null;
Parameter<?> param = bag.getParameter(paramId);
return param == null ? null : param.getValue();
}
};
}
public static <T extends StrolchRootElement> SearchExpression<T> paramNull(String paramId) {
return paramNull(BAG_PARAMETERS, paramId);
}
public static <T extends StrolchRootElement> SearchExpression<T> relationNull(String paramId) {
return paramNull(BAG_RELATIONS, paramId);
}
public static <T extends StrolchRootElement> SearchExpression<T> paramNull(String bagId, String paramId) {
return element -> !element.hasParameter(bagId, paramId);
}
public static <T extends StrolchRootElement> SearchExpression<T> relationName(StrolchTransaction tx,
String relationParamId, SearchPredicate predicate) {
ExpressionBuilder eb = relationName(tx, relationParamId);
return element -> predicate.coerce(eb.getValueCoercer(element)).matches(eb.extract(element));
}
public static <T extends StrolchRootElement> ExpressionBuilder relationName(StrolchTransaction tx,
String relationParamId) {
return new ExpressionBuilder() {
@Override
public ValueCoercer getValueCoercer(StrolchRootElement context) {
return e -> {
if (!(e instanceof String))
return e;
StrolchRootElement relation = getRelation(context, tx, relationParamId);
return relation == null ? e : relation.getName();
};
}
@Override
public Object extract(StrolchRootElement element) {
StrolchRootElement relation = getRelation(element, tx, relationParamId);
return relation == null ? null : relation.getName();
}
};
}
public static <T extends StrolchRootElement> SearchExpression<T> relationParam(StrolchTransaction tx,
String relationParamId, String bagId, String paramId, SearchPredicate predicate) {
ExpressionBuilder eb = relationParam(tx, relationParamId, bagId, paramId);
return element -> predicate.coerce(eb.getValueCoercer(element)).matches(eb.extract(element));
}
public static <T extends StrolchRootElement> ExpressionBuilder relationParam(StrolchTransaction tx,
String relationParamId, String bagId, String paramId) {
return new ExpressionBuilder() {
@Override
public ValueCoercer getValueCoercer(StrolchRootElement context) {
return e -> {
if (!(e instanceof String))
return e;
StrolchRootElement relation = getRelation(context, tx, relationParamId);
if (relation == null)
return e;
return getParamValue(e, relation, bagId, paramId);
};
}
@Override
public Object extract(StrolchRootElement element) {
StrolchRootElement relation = getRelation(element, tx, relationParamId);
if (relation == null)
return null;
ParameterBag bag = relation.getParameterBag(bagId);
if (bag == null)
return null;
Parameter<?> param = bag.getParameter(paramId);
return param == null ? null : param.getValue();
}
};
}
private static Object getParamValue(Object e, StrolchRootElement relation, String bagId, String paramId) {
ParameterBag bag = relation.getParameterBag(bagId);
if (bag == null)
return e;
Parameter<?> param = bag.getParameter(paramId);
if (param == null)
return e;
return param.getValueType().parseValue((String) e);
}
private static StrolchRootElement getRelation(StrolchRootElement element, StrolchTransaction tx, String paramId) {
ParameterBag bag = element.getParameterBag(BAG_RELATIONS);
if (bag == null)
return null;
Parameter<?> param = bag.getParameter(paramId);
if (param == null || param.isEmpty() || !StrolchValueType.STRING.getType().equals(param.getType()))
return null;
StrolchRootElement relation;
switch (param.getInterpretation()) {
case INTERPRETATION_RESOURCE_REF:
return tx.getResourceBy((StringParameter) param);
case INTERPRETATION_ORDER_REF:
return tx.getOrderBy((StringParameter) param);
case INTERPRETATION_ACTIVITY_REF:
return tx.getActivityBy((StringParameter) param);
}
return null;
}
}
| |
package technology.tabula;
import java.awt.geom.Line2D;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Formatter;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@SuppressWarnings("serial")
public class Ruling extends Line2D.Float {
private static int PERPENDICULAR_PIXEL_EXPAND_AMOUNT = 2;
private static int COLINEAR_OR_PARALLEL_PIXEL_EXPAND_AMOUNT = 1;
private enum SOType { VERTICAL, HRIGHT, HLEFT };
public Ruling(float top, float left, float width, float height) {
this(new Point2D.Float(left, top), new Point2D.Float(left+width, top+height));
}
public Ruling(Point2D p1, Point2D p2) {
super(p1, p2);
// normalize almost vertical or almost horizontal lines
double angle = this.getAngle();
if (Utils.within(angle, 0, 1) || Utils.within(angle, 180, 1)) { // almost horizontal
this.setLine(this.x1, this.y1, this.x2, this.y1);
}
else if (Utils.within(angle, 90, 1) || Utils.within(angle, 270, 1)) { // almost vertical
this.setLine(this.x1, this.y1, this.x1, this.y2);
}
// else {
// System.out.println("oblique: " + this + " ("+ this.getAngle() + ")");
// }
}
public boolean vertical() {
return this.length() > 0 && Utils.feq(this.x1, this.x2); //diff < ORIENTATION_CHECK_THRESHOLD;
}
public boolean horizontal() {
return this.length() > 0 && Utils.feq(this.y1, this.y2); //diff < ORIENTATION_CHECK_THRESHOLD;
}
public boolean oblique() {
return !(this.vertical() || this.horizontal());
}
// attributes that make sense only for non-oblique lines
// these are used to have a single collapse method (in page, currently)
public float getPosition() {
if (this.oblique()) {
throw new UnsupportedOperationException();
}
return this.vertical() ? this.getLeft() : this.getTop();
}
public void setPosition(float v) {
if (this.oblique()) {
throw new UnsupportedOperationException();
}
if (this.vertical()) {
this.setLeft(v);
this.setRight(v);
}
else {
this.setTop(v);
this.setBottom(v);
}
}
public float getStart() {
if (this.oblique()) {
throw new UnsupportedOperationException();
}
return this.vertical() ? this.getTop() : this.getLeft();
}
public void setStart(float v) {
if (this.oblique()) {
throw new UnsupportedOperationException();
}
if (this.vertical()) {
this.setTop(v);
}
else {
this.setLeft(v);
}
}
public float getEnd() {
if (this.oblique()) {
throw new UnsupportedOperationException();
}
return this.vertical() ? this.getBottom() : this.getRight();
}
public void setEnd(float v) {
if (this.oblique()) {
throw new UnsupportedOperationException();
}
if (this.vertical()) {
this.setBottom(v);
}
else {
this.setRight(v);
}
}
// -----
public boolean perpendicularTo(Ruling other) {
return this.vertical() == other.horizontal();
}
public boolean colinear(Point2D point) {
return point.getX() >= this.x1
&& point.getX() <= this.x2
&& point.getY() >= this.y1
&& point.getY() <= this.y2;
}
// if the lines we're comparing are colinear or parallel, we expand them by a only 1 pixel,
// because the expansions are additive
// (e.g. two vertical lines, at x = 100, with one having y2 of 98 and the other having y1 of 102 would
// erroneously be said to nearlyIntersect if they were each expanded by 2 (since they'd both terminate at 100).
// The COLINEAR_OR_PARALLEL_PIXEL_EXPAND_AMOUNT is only 1 so the total expansion is 2.
// A total expansion amount of 2 is empirically verified to work sometimes. It's not a magic number from any
// source other than a little bit of experience.)
public boolean nearlyIntersects(Ruling another) {
if (this.intersectsLine(another)) {
return true;
}
boolean rv = false;
if (this.perpendicularTo(another)) {
rv = this.expand(PERPENDICULAR_PIXEL_EXPAND_AMOUNT).intersectsLine(another);
}
else {
rv = this.expand(COLINEAR_OR_PARALLEL_PIXEL_EXPAND_AMOUNT)
.intersectsLine(another.expand(COLINEAR_OR_PARALLEL_PIXEL_EXPAND_AMOUNT));
}
return rv;
}
public double length() {
return Math.sqrt(Math.pow(this.x1 - this.x2, 2) + Math.pow(this.y1 - this.y2, 2));
}
public Ruling intersect(Rectangle2D clip) {
Line2D.Float clipee = (Line2D.Float) this.clone();
boolean clipped = new CohenSutherlandClipping(clip).clip(clipee);
if (clipped) {
return new Ruling(clipee.getP1(), clipee.getP2());
}
else {
return this;
}
}
public Ruling expand(float amount) {
Ruling r = (Ruling) this.clone();
r.setStart(this.getStart() - amount);
r.setEnd(this.getEnd() + amount);
return r;
}
public Point2D intersectionPoint(Ruling other) {
Ruling this_l = this.expand(PERPENDICULAR_PIXEL_EXPAND_AMOUNT);
Ruling other_l = other.expand(PERPENDICULAR_PIXEL_EXPAND_AMOUNT);
Ruling horizontal, vertical;
if (!this_l.intersectsLine(other_l)) {
return null;
}
if (this_l.horizontal() && other_l.vertical()) {
horizontal = this_l; vertical = other_l;
}
else if (this_l.vertical() && other_l.horizontal()) {
vertical = this_l; horizontal = other_l;
}
else {
throw new IllegalArgumentException("lines must be orthogonal, vertical and horizontal");
}
return new Point2D.Float(vertical.getLeft(), horizontal.getTop());
}
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (!(other instanceof Ruling))
return false;
Ruling o = (Ruling) other;
return this.getP1().equals(o.getP1()) && this.getP2().equals(o.getP2());
}
@Override
public int hashCode() {
return super.hashCode();
}
public float getTop() {
return this.y1;
}
public void setTop(float v) {
setLine(this.getLeft(), v, this.getRight(), this.getBottom());
}
public float getLeft() {
return this.x1;
}
public void setLeft(float v) {
setLine(v, this.getTop(), this.getRight(), this.getBottom());
}
public float getBottom() {
return this.y2;
}
public void setBottom(float v) {
setLine(this.getLeft(), this.getTop(), this.getRight(), v);
}
public float getRight() {
return this.x2;
}
public void setRight(float v) {
setLine(this.getLeft(), this.getTop(), v, this.getBottom());
}
public float getWidth() {
return this.getRight() - this.getLeft();
}
public float getHeight() {
return this.getBottom() - this.getTop();
}
public double getAngle() {
double angle = Math.toDegrees(Math.atan2(this.getP2().getY() - this.getP1().getY(),
this.getP2().getX() - this.getP1().getX()));
if (angle < 0) {
angle += 360;
}
return angle;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
Formatter formatter = new Formatter(sb);
String rv = formatter.format("%s[x1=%f y1=%f x2=%f y2=%f]", this.getClass().toString(), this.x1, this.y1, this.x2, this.y2).toString();
formatter.close();
return rv;
}
public static List<Ruling> cropRulingsToArea(List<Ruling> rulings, Rectangle2D area) {
ArrayList<Ruling> rv = new ArrayList<Ruling>();
for (Ruling r : rulings) {
if (r.intersects(area)) {
rv.add(r.intersect(area));
}
}
return rv;
}
// log(n) implementation of find_intersections
// based on http://people.csail.mit.edu/indyk/6.838-old/handouts/lec2.pdf
public static Map<Point2D, Ruling[]> findIntersections(List<Ruling> horizontals, List<Ruling> verticals) {
class SortObject {
protected SOType type;
protected float position;
protected Ruling ruling;
public SortObject(SOType type, float position, Ruling ruling) {
this.type = type;
this.position = position;
this.ruling = ruling;
}
}
List<SortObject> sos = new ArrayList<SortObject>();
TreeMap<Ruling, Boolean> tree = new TreeMap<Ruling, Boolean>(new Comparator<Ruling>() {
@Override
public int compare(Ruling o1, Ruling o2) {
return java.lang.Double.compare(o1.getTop(), o2.getTop());
}});
TreeMap<Point2D, Ruling[]> rv = new TreeMap<Point2D, Ruling[]>(new Comparator<Point2D>() {
@Override
public int compare(Point2D o1, Point2D o2) {
if (o1.getY() > o2.getY()) return 1;
if (o1.getY() < o2.getY()) return -1;
if (o1.getX() > o2.getX()) return 1;
if (o1.getX() < o2.getX()) return -1;
return 0;
}
});
for (Ruling h : horizontals) {
sos.add(new SortObject(SOType.HLEFT, h.getLeft() - PERPENDICULAR_PIXEL_EXPAND_AMOUNT, h));
sos.add(new SortObject(SOType.HRIGHT, h.getRight() + PERPENDICULAR_PIXEL_EXPAND_AMOUNT, h));
}
for (Ruling v : verticals) {
sos.add(new SortObject(SOType.VERTICAL, v.getLeft(), v));
}
Collections.sort(sos, new Comparator<SortObject>() {
@Override
public int compare(SortObject a, SortObject b) {
int rv;
if (Utils.feq(a.position, b.position)) {
if (a.type == SOType.VERTICAL && b.type == SOType.HLEFT) {
rv = 1;
}
else if (a.type == SOType.VERTICAL && b.type == SOType.HRIGHT) {
rv = -1;
}
else if (a.type == SOType.HLEFT && b.type == SOType.VERTICAL) {
rv = -1;
}
else if (a.type == SOType.HRIGHT && b.type == SOType.VERTICAL) {
rv = 1;
}
else {
rv = java.lang.Double.compare(a.position, b.position);
}
}
else {
return java.lang.Double.compare(a.position, b.position);
}
return rv;
}
});
for (SortObject so : sos) {
switch(so.type) {
case VERTICAL:
for (Map.Entry<Ruling, Boolean> h : tree.entrySet()) {
Point2D i = h.getKey().intersectionPoint(so.ruling);
if (i == null) {
continue;
}
rv.put(i,
new Ruling[] { h.getKey().expand(PERPENDICULAR_PIXEL_EXPAND_AMOUNT),
so.ruling.expand(PERPENDICULAR_PIXEL_EXPAND_AMOUNT) });
}
break;
case HRIGHT:
tree.remove(so.ruling);
break;
case HLEFT:
tree.put(so.ruling, true);
break;
}
}
return rv;
}
public static List<Ruling> collapseOrientedRulings(List<Ruling> lines) {
ArrayList<Ruling> rv = new ArrayList<Ruling>();
if (lines.size() == 0) {
return rv;
}
Collections.sort(lines, new Comparator<Ruling>() {
@Override
public int compare(Ruling a, Ruling b) {
return (int) (!Utils.feq(a.getPosition(), b.getPosition()) ? a.getPosition() - b.getPosition() : a.getStart() - b.getStart());
}
});
rv.add(lines.remove(0));
for (Ruling next_line : lines) {
Ruling last = rv.get(rv.size() - 1);
// if current line colinear with next, and are "close enough": expand current line
if (Utils.feq(next_line.getPosition(), last.getPosition()) && last.nearlyIntersects(next_line)) {
last.setStart(next_line.getStart() < last.getStart() ? next_line.getStart() : last.getStart());
last.setEnd(next_line.getEnd() < last.getEnd() ? last.getEnd() : next_line.getEnd());
}
else if (next_line.length() == 0) {
continue;
}
else {
rv.add(next_line);
}
}
return rv;
}
}
| |
/*
* Copyright 2017 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.sdk;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.util.Arrays;
import io.getlime.security.powerauth.networking.response.IFetchKeysStrategy;
import io.getlime.security.powerauth.sdk.impl.DefaultFetchKeysStrategy;
/**
* Class representing a configuration of a single PowerAuthSDK instance.
*/
public class PowerAuthConfiguration {
private final @NonNull String instanceId;
private final @NonNull String baseEndpointUrl;
private final @NonNull String appKey;
private final @NonNull String appSecret;
private final @NonNull String masterServerPublicKey;
private final @Nullable byte[] externalEncryptionKey;
private final @NonNull IFetchKeysStrategy fetchKeysStrategy;
private final boolean disableAutomaticProtocolUpgrade;
/**
* Constant for default PowerAuthSDK instance identifier.
*/
public static final String DEFAULT_INSTANCE_ID = "defaultPowerAuthInstance";
/**
* @return Identifier of the PowerAuthSDK instance, used as a 'key' to store session state.
*/
public @NonNull String getInstanceId() {
return instanceId;
}
/**
* @return String with base URL to the PowerAuth Standard REST API (the URL part before {@code "/pa/..."}).
*/
public @NonNull String getBaseEndpointUrl() {
return baseEndpointUrl;
}
/**
* @return {@code APPLICATION_KEY} as defined in PowerAuth specification - a key identifying an application version.
*/
public @NonNull String getAppKey() {
return appKey;
}
/**
* @return {@code APPLICATION_SECRET} as defined in PowerAuth specification - a secret associated with an application version.
*/
public @NonNull String getAppSecret() {
return appSecret;
}
/**
* @return {@code KEY_SERVER_MASTER_PUBLIC} as defined in PowerAuth specification - a master server public key.
*/
public @NonNull String getMasterServerPublicKey() {
return masterServerPublicKey;
}
/**
* @return Encryption key provided by an external context, used to encrypt possession and biometry related factor keys under the hood.
*/
public @Nullable byte[] getExternalEncryptionKey() {
return externalEncryptionKey;
}
/**
* @return {@link IFetchKeysStrategy} interface for key providing strategy.
*/
public @NonNull IFetchKeysStrategy getFetchKeysStrategy() {
return fetchKeysStrategy;
}
/**
* If set to true, then PowerAuthSDK will not automatically upgrade activation to a newer protocol version.
* This option should be used only for the testing purposes.
*
* @return If set to {@code true}, then PowerAuthSDK will not automatically upgrade activation to a newer protocol version.
*/
public boolean isAutomaticProtocolUpgradeDisabled() {
return disableAutomaticProtocolUpgrade;
}
/**
* Validate the configuration. Be aware that the method performs just a formal validation, so it cannot detect if you
* provide a wrong cryptographic keys or secrets.
*
* @return {@code true} if configuration appears to be valid.
*/
public boolean validateConfiguration() {
if (externalEncryptionKey != null) {
if (externalEncryptionKey.length != 16) {
return false;
}
}
return true;
}
/**
* Private default constructor. Use {@link Builder} to create a new instance of this class.
*
* @param instanceId Identifier of the PowerAuthSDK instance, used as a 'key' to store session state.
* @param baseEndpointUrl Base URL to the PowerAuth Standard REST API (the URL part before {@code "/pa/..."}).
* @param appKey {@code APPLICATION_KEY} as defined in PowerAuth specification - a key identifying an application version.
* @param appSecret {@code APPLICATION_SECRET} as defined in PowerAuth specification - a secret associated with an application version.
* @param masterServerPublicKey {@code KEY_SERVER_MASTER_PUBLIC} as defined in PowerAuth specification - a master server public key.
* @param externalEncryptionKey Encryption key provided by an external context, used to encrypt possession and biometry related factor keys under the hood.
* @param fetchKeysStrategy {@link IFetchKeysStrategy} interface for key providing strategy.
* @param disableAutomaticProtocolUpgrade If set to {@code true}, then PowerAuthSDK will not automatically upgrade activation to a newer protocol version.
*/
private PowerAuthConfiguration(
@NonNull String instanceId,
@NonNull String baseEndpointUrl,
@NonNull String appKey,
@NonNull String appSecret,
@NonNull String masterServerPublicKey,
@Nullable byte[] externalEncryptionKey,
@NonNull IFetchKeysStrategy fetchKeysStrategy,
boolean disableAutomaticProtocolUpgrade) {
this.instanceId = instanceId;
this.baseEndpointUrl = baseEndpointUrl;
this.appKey = appKey;
this.appSecret = appSecret;
this.masterServerPublicKey = masterServerPublicKey;
this.externalEncryptionKey = externalEncryptionKey;
this.fetchKeysStrategy = fetchKeysStrategy;
this.disableAutomaticProtocolUpgrade = disableAutomaticProtocolUpgrade;
}
/**
* A builder that collects arguments for {@link PowerAuthConfiguration}.
*/
public static class Builder {
// mandatory
private final @NonNull String baseEndpointUrl;
private final @NonNull String appKey;
private final @NonNull String appSecret;
private final @NonNull String masterServerPublicKey;
// optional
private String instanceId;
private IFetchKeysStrategy fetchKeysStrategy = null;
private byte[] externalEncryptionKey = null;
private boolean disableAutomaticProtocolUpgrade = false;
/**
* Creates a builder for {@link PowerAuthConfiguration}.
*
* @param instanceId Identifier of the PowerAuthSDK instance, used as a 'key' to store session state. If {@code null}, then {@link #DEFAULT_INSTANCE_ID} is used.
* @param baseEndpointUrl Base URL to the PowerAuth Standard REST API (the URL part before {@code "/pa/..."}).
* @param appKey {@code APPLICATION_KEY} as defined in PowerAuth specification - a key identifying an application version.
* @param appSecret {@code APPLICATION_SECRET} as defined in PowerAuth specification - a secret associated with an application version.
* @param masterServerPublicKey {@code KEY_SERVER_MASTER_PUBLIC} as defined in PowerAuth specification - a master server public key.
*/
public Builder(@Nullable String instanceId, @NonNull String baseEndpointUrl, @NonNull String appKey, @NonNull String appSecret, @NonNull String masterServerPublicKey) {
this.instanceId = instanceId;
this.appKey = appKey;
this.appSecret = appSecret;
this.masterServerPublicKey = masterServerPublicKey;
if (baseEndpointUrl.endsWith("/")) { // make sure to remove trailing slash
this.baseEndpointUrl = baseEndpointUrl.substring(0, baseEndpointUrl.length() - 1);
} else {
this.baseEndpointUrl = baseEndpointUrl;
}
}
/**
* Set instance identifier.
*
* @param instanceId Identifier of the PowerAuthSDK instance, used as a 'key' to store session state.
* @return {@link Builder}
*/
public @NonNull Builder instanceId(@NonNull String instanceId) {
this.instanceId = instanceId;
return this;
}
/**
* Set
* @param fetchKeysStrategy {@link IFetchKeysStrategy} interface for key providing strategy.
* @return {@link Builder}
*/
public @NonNull Builder fetchKeysStrategy(@NonNull IFetchKeysStrategy fetchKeysStrategy) {
this.fetchKeysStrategy = fetchKeysStrategy;
return this;
}
/**
* Set external encryption key provided by an external context, used to encrypt possession and biometry related factor keys under the hood.
* @param externalEncryptionKey Encryption key provided by an external context, used to encrypt possession and biometry related factor keys under the hood.
* @return {@link Builder}
*/
public @NonNull Builder externalEncryptionKey(@NonNull byte[] externalEncryptionKey) {
this.externalEncryptionKey = externalEncryptionKey;
return this;
}
/**
* Disable automatic protocol upgrade. This option should be used only for the testing purposes.
* @return {@link Builder}
*/
public @NonNull Builder disableAutomaticProtocolUpgrade() {
this.disableAutomaticProtocolUpgrade = true;
return this;
}
/**
* Build a final {@link PowerAuthConfiguration} instance.
* @return New instance of {@link PowerAuthConfiguration}.
*/
public @NonNull PowerAuthConfiguration build() {
return new PowerAuthConfiguration(
instanceId != null ? instanceId : DEFAULT_INSTANCE_ID,
baseEndpointUrl,
appKey,
appSecret,
masterServerPublicKey,
externalEncryptionKey != null ? Arrays.copyOf(externalEncryptionKey, externalEncryptionKey.length) : null,
fetchKeysStrategy != null ? fetchKeysStrategy : new DefaultFetchKeysStrategy(),
disableAutomaticProtocolUpgrade);
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.value.DfaPsiType;
import com.intellij.psi.*;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.containers.ContainerUtil;
import one.util.streamex.EntryStream;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* Immutable class representing a number of non-primitive type constraints applied to some value.
* There are two types of constrains: value is instance of some type and value is not an instance of some type.
* Unlike usual Java semantics, the {@code null} value is considered to be instanceof any type (non-null instanceof can be expressed
* via additional restriction {@link DfaFactType#NULLABILITY} {@code = NOT_NULL}).
*/
public abstract class TypeConstraint {
@NotNull
public abstract String getPresentationText(@Nullable PsiType type);
@Nullable
public abstract TypeConstraint withInstanceofValue(@NotNull DfaPsiType type);
@Nullable
public abstract TypeConstraint withNotInstanceofValue(DfaPsiType type);
@NotNull
abstract TypeConstraint withoutType(@NotNull DfaPsiType type);
@Nullable
public abstract PsiType getPsiType();
abstract boolean isSuperStateOf(@NotNull TypeConstraint other);
@Nullable
public abstract TypeConstraint union(@NotNull TypeConstraint other);
@Nullable
abstract TypeConstraint intersect(@NotNull TypeConstraint right);
@NotNull
public abstract Set<DfaPsiType> getInstanceofValues();
@NotNull
public abstract Set<DfaPsiType> getNotInstanceofValues();
public abstract boolean isEmpty();
public abstract boolean isExact();
public abstract boolean isExact(String typeName);
static final class Exact extends TypeConstraint {
final @NotNull DfaPsiType myType;
Exact(@NotNull DfaPsiType type) {
myType = type;
}
@NotNull
@Override
public String getPresentationText(@Nullable PsiType type) {
return type != null && DfaPsiType.normalizeType(type).equals(myType.getPsiType()) ? "" : "exactly " + myType;
}
@Nullable
@Override
public TypeConstraint withInstanceofValue(@NotNull DfaPsiType type) {
return type.isAssignableFrom(myType) ? this : null;
}
@Nullable
@Override
public TypeConstraint withNotInstanceofValue(DfaPsiType type) {
return type.isAssignableFrom(myType) ? null : this;
}
@NotNull
@Override
TypeConstraint withoutType(@NotNull DfaPsiType type) {
return myType == type ? Constrained.EMPTY : this;
}
@NotNull
@Override
public PsiType getPsiType() {
return myType.getPsiType();
}
@Override
boolean isSuperStateOf(@NotNull TypeConstraint other) {
return this.equals(other);
}
@Nullable
@Override
public TypeConstraint union(@NotNull TypeConstraint other) {
if(isSuperStateOf(other)) return this;
if(other.isSuperStateOf(this)) return other;
return new Constrained(Collections.singleton(myType), Collections.emptySet()).union(other);
}
@Override
@Nullable
TypeConstraint intersect(@NotNull TypeConstraint right) {
if (right instanceof Exact) {
return right.equals(this) ? this : null;
}
TypeConstraint result = this;
for (DfaPsiType type : right.getInstanceofValues()) {
result = result.withInstanceofValue(type);
if (result == null) return null;
}
for (DfaPsiType type : right.getNotInstanceofValues()) {
result = result.withNotInstanceofValue(type);
if (result == null) return null;
}
return result;
}
@NotNull
@Override
public Set<DfaPsiType> getInstanceofValues() {
return Collections.singleton(myType);
}
@NotNull
@Override
public Set<DfaPsiType> getNotInstanceofValues() {
return Collections.emptySet();
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public boolean isExact() {
return true;
}
@Override
public boolean isExact(String typeName) {
return myType.getPsiType().equalsToText(typeName);
}
@Override
public int hashCode() {
return myType.hashCode();
}
@Override
public boolean equals(Object obj) {
return obj == this || obj instanceof Exact && ((Exact)obj).myType.equals(myType);
}
@Override
public String toString() {
return "exactly "+myType;
}
}
private static final class Constrained extends TypeConstraint {
/**
* An instance representing no constraints
*/
private static final TypeConstraint EMPTY = new Constrained(Collections.emptySet(), Collections.emptySet());
@NotNull private final Set<DfaPsiType> myInstanceofValues;
@NotNull private final Set<DfaPsiType> myNotInstanceofValues;
Constrained(@NotNull Set<DfaPsiType> instanceofValues, @NotNull Set<DfaPsiType> notInstanceofValues) {
myInstanceofValues = instanceofValues;
myNotInstanceofValues = notInstanceofValues;
}
@Override
@NotNull
public String getPresentationText(@Nullable PsiType type) {
Set<DfaPsiType> instanceOfTypes = myInstanceofValues;
if (type != null) {
instanceOfTypes = StreamEx.of(instanceOfTypes)
.removeBy(DfaPsiType::getPsiType, DfaPsiType.normalizeType(type))
.toSet();
}
return EntryStream.of("instanceof ", instanceOfTypes,
"not instanceof ", myNotInstanceofValues)
.removeValues(Set::isEmpty)
.mapKeyValue((prefix, set) -> StreamEx.of(set).map(DfaPsiType::toString).sorted().joining(", ", prefix, ""))
.joining("\n");
}
boolean checkInstanceofValue(@NotNull DfaPsiType dfaType) {
if (myInstanceofValues.contains(dfaType)) return true;
for (DfaPsiType dfaTypeValue : myNotInstanceofValues) {
if (dfaTypeValue.isAssignableFrom(dfaType)) return false;
}
for (DfaPsiType dfaTypeValue : myInstanceofValues) {
if (!dfaType.isConvertibleFrom(dfaTypeValue)) return false;
}
return true;
}
@Override
@Nullable
public TypeConstraint withInstanceofValue(@NotNull DfaPsiType type) {
PsiType psiType = type.getPsiType();
if (psiType instanceof PsiPrimitiveType || LambdaUtil.notInferredType(psiType)) return this;
PsiClass psiClass = PsiUtil.resolveClassInClassTypeOnly(psiType);
if (psiClass != null && psiClass.hasModifierProperty(PsiModifier.FINAL)) {
return new Exact(type).intersect(this);
}
if (!checkInstanceofValue(type)) {
return null;
}
List<DfaPsiType> moreGeneric = new ArrayList<>();
for (DfaPsiType alreadyInstanceof : myInstanceofValues) {
if (type.isAssignableFrom(alreadyInstanceof)) {
return this;
}
if (alreadyInstanceof.isAssignableFrom(type)) {
moreGeneric.add(alreadyInstanceof);
}
}
Set<DfaPsiType> newInstanceof = ContainerUtil.newHashSet(myInstanceofValues);
newInstanceof.removeAll(moreGeneric);
newInstanceof.add(type);
return create(newInstanceof, myNotInstanceofValues);
}
@Override
@Nullable
public TypeConstraint withNotInstanceofValue(DfaPsiType type) {
if (myNotInstanceofValues.contains(type)) return this;
for (DfaPsiType dfaTypeValue : myInstanceofValues) {
if (type.isAssignableFrom(dfaTypeValue)) return null;
}
List<DfaPsiType> moreSpecific = new ArrayList<>();
for (DfaPsiType alreadyNotInstanceof : myNotInstanceofValues) {
if (alreadyNotInstanceof.isAssignableFrom(type)) {
return this;
}
if (type.isAssignableFrom(alreadyNotInstanceof)) {
moreSpecific.add(alreadyNotInstanceof);
}
}
Set<DfaPsiType> newNotInstanceof = ContainerUtil.newHashSet(myNotInstanceofValues);
newNotInstanceof.removeAll(moreSpecific);
newNotInstanceof.add(type);
return create(myInstanceofValues, newNotInstanceof);
}
@Override
@NotNull
TypeConstraint withoutType(@NotNull DfaPsiType type) {
if (myInstanceofValues.contains(type)) {
HashSet<DfaPsiType> newInstanceof = ContainerUtil.newHashSet(myInstanceofValues);
newInstanceof.remove(type);
return create(newInstanceof, myNotInstanceofValues);
}
if (myNotInstanceofValues.contains(type)) {
HashSet<DfaPsiType> newNotInstanceof = ContainerUtil.newHashSet(myNotInstanceofValues);
newNotInstanceof.remove(type);
return create(myInstanceofValues, newNotInstanceof);
}
return this;
}
@Override
@Nullable
public PsiType getPsiType() {
PsiType[] conjuncts = StreamEx.of(myInstanceofValues).map(DfaPsiType::getPsiType).toArray(PsiType.EMPTY_ARRAY);
return conjuncts.length == 0 ? null : PsiIntersectionType.createIntersection(true, conjuncts);
}
@Override
boolean isSuperStateOf(@NotNull TypeConstraint other) {
if (other instanceof Constrained) {
Constrained that = (Constrained)other;
if (that.myNotInstanceofValues.containsAll(myNotInstanceofValues) && that.myInstanceofValues.containsAll(myInstanceofValues)) {
return true;
}
if (this.myNotInstanceofValues.isEmpty() && that.myNotInstanceofValues.isEmpty()) {
return that.myInstanceofValues.stream().allMatch(
thatType -> this.myInstanceofValues.stream().allMatch(thisType -> thisType.isAssignableFrom(thatType)));
}
} else if (other instanceof Exact) {
DfaPsiType otherType = ((Exact)other).myType;
return this.myInstanceofValues.stream().allMatch(otherType::isAssignableFrom) &&
this.myNotInstanceofValues.stream().noneMatch(otherType::isAssignableFrom);
}
return false;
}
@Override
@Nullable
public TypeConstraint union(@NotNull TypeConstraint other) {
if(isSuperStateOf(other)) return this;
if(other.isSuperStateOf(this)) return other;
if (other instanceof Constrained) {
return union((Constrained)other);
}
if (other instanceof Exact) {
return union(new Constrained(Collections.singleton(((Exact)other).myType), Collections.emptySet()));
}
return EMPTY;
}
@Override
@Nullable
TypeConstraint intersect(@NotNull TypeConstraint right) {
if (right instanceof Exact) {
return right.intersect(this);
}
TypeConstraint result = this;
for (DfaPsiType type : right.getInstanceofValues()) {
result = result.withInstanceofValue(type);
if (result == null) return null;
}
for (DfaPsiType type : right.getNotInstanceofValues()) {
result = result.withNotInstanceofValue(type);
if (result == null) return null;
}
return result;
}
private TypeConstraint union(@NotNull Constrained other) {
Set<DfaPsiType> notTypes = new HashSet<>(this.myNotInstanceofValues);
notTypes.retainAll(other.myNotInstanceofValues);
Set<DfaPsiType> instanceOfTypes;
if (this.myInstanceofValues.containsAll(other.myInstanceofValues)) {
instanceOfTypes = other.myInstanceofValues;
} else if (other.myInstanceofValues.containsAll(this.myInstanceofValues)) {
instanceOfTypes = this.myInstanceofValues;
} else {
instanceOfTypes = withSuper(this.myInstanceofValues);
instanceOfTypes.retainAll(withSuper(other.myInstanceofValues));
}
TypeConstraint constraint = EMPTY;
for (DfaPsiType type: instanceOfTypes) {
constraint = constraint.withInstanceofValue(type);
if (constraint == null) {
// Should not happen normally, but may happen with inconsistent hierarchy (e.g. if final class is extended)
return EMPTY;
}
}
for (DfaPsiType type: notTypes) {
constraint = constraint.withNotInstanceofValue(type);
if (constraint == null) return EMPTY;
}
return constraint;
}
private static Set<DfaPsiType> withSuper(Set<DfaPsiType> instanceofValues) {
Set<DfaPsiType> result = new HashSet<>(instanceofValues);
for (DfaPsiType type : instanceofValues) {
InheritanceUtil.processSuperTypes(type.getPsiType(), false, t -> result.add(type.getFactory().createDfaType(t)));
}
return result;
}
@Override
@NotNull
public Set<DfaPsiType> getInstanceofValues() {
return Collections.unmodifiableSet(myInstanceofValues);
}
@Override
@NotNull
public Set<DfaPsiType> getNotInstanceofValues() {
return Collections.unmodifiableSet(myNotInstanceofValues);
}
@Override
public boolean isEmpty() {
return myInstanceofValues.isEmpty() && myNotInstanceofValues.isEmpty();
}
@Override
public boolean isExact() {
return false;
}
@Override
public boolean isExact(String typeName) {
return false;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Constrained that = (Constrained)o;
return Objects.equals(myInstanceofValues, that.myInstanceofValues) &&
Objects.equals(myNotInstanceofValues, that.myNotInstanceofValues);
}
@Override
public int hashCode() {
return Objects.hash(myInstanceofValues, myNotInstanceofValues);
}
@Override
public String toString() {
return EntryStream.of("instanceof ", myInstanceofValues,
"not instanceof ", myNotInstanceofValues)
.removeValues(Set::isEmpty)
.mapKeyValue((prefix, set) -> StreamEx.of(set).joining(", ", prefix, ""))
.joining(" ");
}
}
private static TypeConstraint create(@NotNull Set<DfaPsiType> instanceofValues, @NotNull Set<DfaPsiType> notInstanceofValues) {
if (instanceofValues.isEmpty() && notInstanceofValues.isEmpty()) {
return Constrained.EMPTY;
}
if (instanceofValues.isEmpty()) {
instanceofValues = Collections.emptySet();
}
else if (instanceofValues.size() == 1) {
instanceofValues = Collections.singleton(instanceofValues.iterator().next());
}
if (notInstanceofValues.isEmpty()) {
notInstanceofValues = Collections.emptySet();
}
else if (notInstanceofValues.size() == 1) {
notInstanceofValues = Collections.singleton(notInstanceofValues.iterator().next());
}
return new TypeConstraint.Constrained(instanceofValues, notInstanceofValues);
}
@Nullable
public static DfaFactMap withInstanceOf(@NotNull DfaFactMap map, @NotNull DfaPsiType type) {
TypeConstraint constraint = map.get(DfaFactType.TYPE_CONSTRAINT);
if (constraint == null) constraint = Constrained.EMPTY;
constraint = constraint.withInstanceofValue(type);
return constraint == null ? null : map.with(DfaFactType.TYPE_CONSTRAINT, constraint);
}
public static TypeConstraint exact(@NotNull DfaPsiType type) {
return new Exact(type);
}
public static TypeConstraint empty() {
return Constrained.EMPTY;
}
}
| |
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.jcublas;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.buffer.DoubleBuffer;
import org.nd4j.linalg.api.buffer.FloatBuffer;
import org.nd4j.linalg.api.ndarray.BaseNDArray;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.util.List;
/**
* Created by mjk on 8/23/14.
*
* @author mjk
* @author Adam Gibson
*/
public class JCublasNDArray extends BaseNDArray {
public JCublasNDArray(double[][] data) {
super(data);
}
public JCublasNDArray(double[][] data, char ordering) {
super(data, ordering);
}
public JCublasNDArray(int[] shape, DataBuffer buffer) {
super(shape, buffer);
}
/**
* Create this JCublasNDArray with the given data and shape and 0 offset
*
* @param data the data to use
* @param shape the shape of the JCublasNDArray
* @param ordering
*/
public JCublasNDArray(float[] data, int[] shape, char ordering) {
super(data, shape, ordering);
}
/**
* @param data the data to use
* @param shape the shape of the JCublasNDArray
* @param offset the desired offset
* @param ordering the ordering of the JCublasNDArray
*/
public JCublasNDArray(float[] data, int[] shape, int offset, char ordering) {
super(data, shape, offset, ordering);
}
/**
* Construct an JCublasNDArray of the specified shape
* with an empty data array
*
* @param shape the shape of the JCublasNDArray
* @param stride the stride of the JCublasNDArray
* @param offset the desired offset
* @param ordering the ordering of the JCublasNDArray
*/
public JCublasNDArray(int[] shape, int[] stride, int offset, char ordering) {
super(shape, stride, offset, ordering);
}
/**
* Create the JCublasNDArray with
* the specified shape and stride and an offset of 0
*
* @param shape the shape of the JCublasNDArray
* @param stride the stride of the JCublasNDArray
* @param ordering the ordering of the JCublasNDArray
*/
public JCublasNDArray(int[] shape, int[] stride, char ordering) {
super(shape, stride, ordering);
}
public JCublasNDArray(int[] shape, int offset, char ordering) {
super(shape, offset, ordering);
}
public JCublasNDArray(int[] shape) {
super(shape);
}
/**
* Creates a new <i>n</i> times <i>m</i> <tt>DoubleMatrix</tt>.
*
* @param newRows the number of rows (<i>n</i>) of the new matrix.
* @param newColumns the number of columns (<i>m</i>) of the new matrix.
* @param ordering
*/
public JCublasNDArray(int newRows, int newColumns, char ordering) {
super(newRows, newColumns, ordering);
}
/**
* Create an JCublasNDArray from the specified slices.
* This will go through and merge all of the
* data from each slice in to one JCublasNDArray
* which will then take the specified shape
*
* @param slices the slices to merge
* @param shape the shape of the JCublasNDArray
* @param ordering
*/
public JCublasNDArray(List<INDArray> slices, int[] shape, char ordering) {
super(slices, shape, ordering);
}
/**
* Create an JCublasNDArray from the specified slices.
* This will go through and merge all of the
* data from each slice in to one JCublasNDArray
* which will then take the specified shape
*
* @param slices the slices to merge
* @param shape the shape of the JCublasNDArray
* @param stride
* @param ordering
*/
public JCublasNDArray(List<INDArray> slices, int[] shape, int[] stride, char ordering) {
super(slices, shape, stride, ordering);
}
public JCublasNDArray(float[] data, int[] shape, int[] stride, char ordering) {
super(data, shape, stride, ordering);
}
public JCublasNDArray(float[] data, int[] shape, int[] stride, int offset, char ordering) {
super(data, shape, stride, offset, ordering);
}
public JCublasNDArray(DataBuffer data, int[] shape, int[] stride, int offset) {
super(data, shape, stride, offset);
}
public JCublasNDArray(int[] data, int[] shape, int[] strides) {
super(data, shape, strides);
}
public JCublasNDArray(DataBuffer data, int[] shape) {
super(data, shape);
}
public JCublasNDArray(DataBuffer buffer, int[] shape, int offset) {
super(buffer, shape, offset);
}
/**
* Create this JCublasNDArray with the given data and shape and 0 offset
*
* @param data the data to use
* @param shape the shape of the JCublasNDArray
*/
public JCublasNDArray(float[] data, int[] shape) {
super(data, shape);
}
public JCublasNDArray(float[] data, int[] shape, int offset) {
super(data, shape, offset);
}
/**
* Construct an JCublasNDArray of the specified shape
* with an empty data array
*
* @param shape the shape of the JCublasNDArray
* @param stride the stride of the JCublasNDArray
* @param offset the desired offset
*/
public JCublasNDArray(int[] shape, int[] stride, int offset) {
super(shape, stride, offset);
}
/**
* Create the JCublasNDArray with
* the specified shape and stride and an offset of 0
*
* @param shape the shape of the JCublasNDArray
* @param stride the stride of the JCublasNDArray
*/
public JCublasNDArray(int[] shape, int[] stride) {
super(shape, stride);
}
public JCublasNDArray(int[] shape, int offset) {
super(shape, offset);
}
public JCublasNDArray(int[] shape, char ordering) {
super(shape, ordering);
}
/**
* Creates a new <i>n</i> times <i>m</i> <tt>DoubleMatrix</tt>.
*
* @param newRows the number of rows (<i>n</i>) of the new matrix.
* @param newColumns the number of columns (<i>m</i>) of the new matrix.
*/
public JCublasNDArray(int newRows, int newColumns) {
super(newRows, newColumns);
}
/**
* Create an JCublasNDArray from the specified slices.
* This will go through and merge all of the
* data from each slice in to one JCublasNDArray
* which will then take the specified shape
*
* @param slices the slices to merge
* @param shape the shape of the JCublasNDArray
*/
public JCublasNDArray(List<INDArray> slices, int[] shape) {
super(slices, shape);
}
/**
* Create an JCublasNDArray from the specified slices.
* This will go through and merge all of the
* data from each slice in to one JCublasNDArray
* which will then take the specified shape
*
* @param slices the slices to merge
* @param shape the shape of the JCublasNDArray
* @param stride
*/
public JCublasNDArray(List<INDArray> slices, int[] shape, int[] stride) {
super(slices, shape, stride);
}
public JCublasNDArray(float[] data, int[] shape, int[] stride) {
super(data, shape, stride);
}
public JCublasNDArray(float[] data, int[] shape, int[] stride, int offset) {
super(data, shape, stride, offset);
}
public JCublasNDArray(float[] data) {
super(data);
}
public JCublasNDArray(JCublasNDArray doubleMatrix) {
this(new int[]{doubleMatrix.rows, doubleMatrix.columns});
this.data = dup().data();
}
public JCublasNDArray(double[] data, int[] shape, int[] stride, int offset) {
this.data = Nd4j.createBuffer(data);
this.stride = stride;
this.offset = offset;
init(shape);
}
public JCublasNDArray(float[][] floats) {
super(floats);
}
public JCublasNDArray(float[][] data, char ordering) {
super(data, ordering);
}
public JCublasNDArray(DataBuffer buffer, int[] shape, int offset, char ordering) {
super(buffer, shape, offset, ordering);
}
public JCublasNDArray() {
}
public JCublasNDArray(DataBuffer buffer) {
super(buffer);
}
public JCublasNDArray(DataBuffer buffer, int[] shape, int[] stride, int offset, char ordering) {
super(buffer, shape, stride, offset, ordering);
}
public JCublasNDArray(float[] data, char order) {
super(data, order);
}
public JCublasNDArray(FloatBuffer floatBuffer, char order) {
super(floatBuffer, order);
}
public JCublasNDArray(DataBuffer buffer, int[] shape, int[] strides) {
super(buffer, shape, strides);
}
public JCublasNDArray(double[] data, int[] shape, char ordering) {
super(data, shape ,ordering);
}
public JCublasNDArray(double[] data, int[] shape, int[] stride, int offset, char ordering) {
super(data, shape, stride, offset, ordering);
}
}
| |
package com.capitalone.dashboard.collector;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
* Bean to hold settings specific to the Feature collector.
*/
@Component
@ConfigurationProperties(prefix = "feature")
public class FeatureSettings {
private String cron;
private int pageSize;
@Value("${feature.firstRunHistoryDays:30}")
private int firstRunHistoryDays;
// After this number of hours since the last run, we will refresh boards/teams and projects
@Value("${feature.refreshTeamAndProjectHours:24}")
private int refreshTeamAndProjectHours;
// Jira-connection details
private String jiraBaseUrl;
private String jiraQueryEndpoint;
private String jiraCredentials;
private String jiraOauthAuthtoken;
private String jiraOauthRefreshtoken;
private String jiraOauthRedirecturi;
private String jiraOauthExpiretime;
private String jiraProxyUrl;
private String jiraProxyPort;
/**
* In Jira, general IssueType IDs are associated to various "issue"
* attributes. However, there is one attribute which this collector's
* queries rely on that change between different instantiations of Jira.
* Please provide a numerical ID reference to your instance's IssueType for
* the lowest level of Issues (e.g., "user story") specific to your Jira
* instance.
* <p>
* </p>
* <strong>Note:</strong> You can retrieve your instance's IssueType ID
* listings via the following URI:
* https://[your-jira-domain-name]/rest/api/2/issuetype/
* Multiple comma-separated values can be specified.
*/
private String[] jiraIssueTypeNames;
/**
* In Jira, your instance will have its own custom field created for "sprint" or "timebox" details, which includes a list of information. This field allows you to specify that data field for your instance of Jira.
* <p>
* </p>
* <strong>Note:</strong> You can retrieve your instance's sprint data field name
* via the following URI, and look for a package name <em>com.atlassian.greenhopper.service.sprint.Sprint</em>; your custom field name describes the values in this field:
* https://[your-jira-domain-name]/rest/api/2/issue/[some-issue-name]
*/
private String jiraSprintDataFieldName;
/**
* In Jira, your instance will have its own custom field created for "super story" or "epic" back-end ID, which includes a list of information. This field allows you to specify that data field for your instance of Jira.
* <p>
* </p>
* <strong>Note:</strong> You can retrieve your instance's epic ID field name
* via the following URI where your queried user story issue has a super issue (e.g., epic) tied to it; your custom field name describes the epic value you expect to see, and is the only field that does this for a given issue:
* https://[your-jira-domain-name]/rest/api/2/issue/[some-issue-name]
*/
private String jiraEpicIdFieldName;
private String jiraStoryPointsFieldName;
/**
* Its a custom field in JIRA, set it here
*/
private String jiraTeamFieldName;
/**
* If you want to select boards in the Hygieia UI
*/
private boolean jiraBoardAsTeam;
/**
* Defines the maximum number of features allow per board. If limit is reach collection will not happen for given board
*/
@Value("${feature.maxNumberOfFeaturesPerBoard:2000}")
private int maxNumberOfFeaturesPerBoard;
/**
* Defines how to update features per board. If true then only update based on enabled collectorItems otherwise full update
*/
@Value("${feature.collectorItemOnlyUpdate:true}")
private boolean collectorItemOnlyUpdate;
public boolean isCollectorItemOnlyUpdate() {
return collectorItemOnlyUpdate;
}
public void setCollectorItemOnlyUpdate(boolean collectorItemOnlyUpdate) {
this.collectorItemOnlyUpdate = collectorItemOnlyUpdate;
}
public int getMaxNumberOfFeaturesPerBoard() {
return maxNumberOfFeaturesPerBoard;
}
public void setMaxNumberOfFeaturesPerBoard(int maxNumberOfFeaturesPerBoard) {
this.maxNumberOfFeaturesPerBoard = maxNumberOfFeaturesPerBoard;
}
public String getCron() {
return cron;
}
public void setCron(String cron) {
this.cron = cron;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
public int getFirstRunHistoryDays() {
return firstRunHistoryDays;
}
public void setFirstRunHistoryDays(int firstRunHistoryDays) {
this.firstRunHistoryDays = firstRunHistoryDays;
}
public String getJiraBaseUrl() {
return jiraBaseUrl;
}
public void setJiraBaseUrl(String jiraBaseUrl) {
this.jiraBaseUrl = jiraBaseUrl;
}
public String getJiraQueryEndpoint() {
return jiraQueryEndpoint;
}
public void setJiraQueryEndpoint(String jiraQueryEndpoint) {
this.jiraQueryEndpoint = jiraQueryEndpoint;
}
public String getJiraCredentials() {
return jiraCredentials;
}
public void setJiraCredentials(String jiraCredentials) {
this.jiraCredentials = jiraCredentials;
}
public String getJiraOauthAuthtoken() {
return jiraOauthAuthtoken;
}
public void setJiraOauthAuthtoken(String jiraOauthAuthtoken) {
this.jiraOauthAuthtoken = jiraOauthAuthtoken;
}
public String getJiraOauthRefreshtoken() {
return jiraOauthRefreshtoken;
}
public void setJiraOauthRefreshtoken(String jiraOauthRefreshtoken) {
this.jiraOauthRefreshtoken = jiraOauthRefreshtoken;
}
public String getJiraOauthRedirecturi() {
return jiraOauthRedirecturi;
}
public void setJiraOauthRedirecturi(String jiraOauthRedirecturi) {
this.jiraOauthRedirecturi = jiraOauthRedirecturi;
}
public String getJiraOauthExpiretime() {
return jiraOauthExpiretime;
}
public void setJiraOauthExpiretime(String jiraOauthExpiretime) {
this.jiraOauthExpiretime = jiraOauthExpiretime;
}
public String getJiraProxyUrl() {
return jiraProxyUrl;
}
public void setJiraProxyUrl(String jiraProxyUrl) {
this.jiraProxyUrl = jiraProxyUrl;
}
public String getJiraProxyPort() {
return jiraProxyPort;
}
public void setJiraProxyPort(String jiraProxyPort) {
this.jiraProxyPort = jiraProxyPort;
}
public String[] getJiraIssueTypeNames() {
return jiraIssueTypeNames;
}
public void setJiraIssueTypeNames(String[] jiraIssueTypeNames) {
this.jiraIssueTypeNames = jiraIssueTypeNames;
}
public String getJiraSprintDataFieldName() {
return jiraSprintDataFieldName;
}
public void setJiraSprintDataFieldName(String jiraSprintDataFieldName) {
this.jiraSprintDataFieldName = jiraSprintDataFieldName;
}
public String getJiraEpicIdFieldName() {
return jiraEpicIdFieldName;
}
public void setJiraEpicIdFieldName(String jiraEpicIdFieldName) {
this.jiraEpicIdFieldName = jiraEpicIdFieldName;
}
public String getJiraStoryPointsFieldName() {
return jiraStoryPointsFieldName;
}
public void setJiraStoryPointsFieldName(String jiraStoryPointsFieldName) {
this.jiraStoryPointsFieldName = jiraStoryPointsFieldName;
}
public String getJiraTeamFieldName() {
return jiraTeamFieldName;
}
public void setJiraTeamFieldName(String jiraTeamFieldName) {
this.jiraTeamFieldName = jiraTeamFieldName;
}
public int getRefreshTeamAndProjectHours() {
return refreshTeamAndProjectHours;
}
public void setRefreshTeamAndProjectHours(int refreshTeamAndProjectHours) {
this.refreshTeamAndProjectHours = refreshTeamAndProjectHours;
}
public boolean isJiraBoardAsTeam() {
return jiraBoardAsTeam;
}
public void setJiraBoardAsTeam(boolean jiraBoardAsTeam) {
this.jiraBoardAsTeam = jiraBoardAsTeam;
}
}
| |
package com.chrylis.gjt.transform;
import static org.codehaus.groovy.ast.ClassHelper.isPrimitiveType;
import static org.codehaus.groovy.ast.tools.GeneralUtils.*;
import java.util.List;
import java.util.stream.Collectors;
import org.codehaus.groovy.ast.AnnotatedNode;
import org.codehaus.groovy.ast.AnnotationNode;
import org.codehaus.groovy.ast.ClassHelper;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.FieldNode;
import org.codehaus.groovy.ast.Parameter;
import org.codehaus.groovy.ast.PropertyNode;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.ast.expr.VariableExpression;
import org.codehaus.groovy.ast.stmt.ReturnStatement;
import org.codehaus.groovy.ast.stmt.Statement;
import org.codehaus.groovy.control.CompilePhase;
import org.codehaus.groovy.transform.GroovyASTTransformation;
import com.chrylis.gjt.annotation.TwoWaySetter;
@GroovyASTTransformation(phase = CompilePhase.CANONICALIZATION)
public class TwoWaySetterAstTransformation extends AbstractGjtAstTransformation<TwoWaySetter> {
@Override
public Class<TwoWaySetter> annotationClass() {
return TwoWaySetter.class;
}
@Override
public boolean canApplyToClassOnly() {
return false;
}
protected final ClassNode MY_TYPE = annotationType();
/**
* Validates the structure of the bidirectional relationship and creates the code necessary for
* setting the relationship from this side.
*
* The algorithm implemented here is:
*
* <ol>
* <li>Sanity-check the annotated field (it backs a mutable reference property).
* <li>Find the corresponding field representing the inverse side of the relationship.
* <li>Confirm that the corresponding field points back here.
* <li>Add a synthetic setter to be used by the corresponding class to avoid a setter loop.
* <li>Create a public setter for this field's property that sets both sides of the relationship,
* including clearing dangling relationships if any.
* </ol>
*
* Note in particular that this transformation apples only to this side of the relationship,
* but it is required that the same transformation be applied to the other side independently.
*/
@Override
protected void doVisit(AnnotationNode annotationNode, AnnotatedNode annotatedNode) {
FieldNode myField = (FieldNode) annotatedNode;
if (!sane(myField)) {
return;
}
FieldNode correspondingField = findCorrespondingField(myField);
if (correspondingField == null) {
return;
}
FieldNode inverse = findCorrespondingField(correspondingField);
if (!myField.equals(inverse)) {
StringBuilder sb = new StringBuilder("corresponding field ")
.append(myField.getType().getNameWithoutPackage()).append('.').append(correspondingField.getName());
if (inverse == null) {
sb.append(" does not map to a field on this class");
} else {
sb.append(" is already mapped to a different relationship (")
.append(inverse.getName()).append("); the other field may need an explicit mapping");
}
addError(sb.toString(), myField);
return;
}
addSyntheticSetterFor(myField);
PropertyNode property = myField.getDeclaringClass().getProperty(myField.getName());
property.setSetterBlock(createManagedSetterBody(myField, correspondingField));
}
protected static final int INTERESTING_MODIFIERS = ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_FINAL;
protected static final int EXPECTED_MODIFIERS = ACC_PRIVATE;
protected boolean sane(FieldNode field) {
if ((field.getModifiers() & INTERESTING_MODIFIERS) != EXPECTED_MODIFIERS
|| field.getDeclaringClass().getProperty(field.getName()) == null) {
addError(annotationName() + " must be applied to a mutable Groovy property", field);
return false;
}
if (isPrimitiveType(field.getType())) {
addError(annotationName() + " makes no sense on a primitive property", field);
return false;
}
return true;
}
/**
* Finds the "corresponding field" that is the other end of this field's bidirectional relationship.
*
* This method checks to see whether the annotation on the field, which must be present, specifies
* a {@code mappedBy} parameter; if so, then it returns the specified field. If not, it examines
* all the fields on the corresponding class, returning a single matching field and raising an error
* if zero or multiple fields match.
*
* @param field
* the field whose correspondence is to be checked
* @return the field on the other end of the bidirectional relationship, or {@code null} if no single matching field could be
* identified
*/
protected FieldNode findCorrespondingField(FieldNode field) {
// disregard entirely if this field is not annotated correctly, since we need to inspect the annotation
List<AnnotationNode> twses = field.getAnnotations(MY_TYPE);
if (twses.isEmpty()) {
return null;
}
ClassNode correspondingClass = field.getType();
// if the annotation specifies a "mappedBy" field, examine exactly that field
String explicitFieldName = GjtUtils.getAnnotationMemberStringValue(twses.get(0), "mappedBy");
if (!explicitFieldName.isEmpty()) {
FieldNode specified = correspondingClass.getField(explicitFieldName);
if (specified == null) {
addError("the specified field \"" + explicitFieldName + "\" was not found on " + correspondingClass, field);
} else if (specified.getAnnotations(MY_TYPE).isEmpty()) {
addError("the corresponding field " + correspondingClass.getNameWithoutPackage() + "."
+ specified.getName() + " is not annotated with " + annotationName(), field);
specified = null;
}
return specified;
}
// if no field is specified, find all annotated fields on the corresponding class of the owning type
ClassNode owningClass = field.getDeclaringClass();
List<FieldNode> candidates = correspondingClass.getFields().stream()
.filter(fn -> fn.getType().equals(owningClass))
.filter(fn -> !fn.getAnnotations(MY_TYPE).isEmpty())
.collect(Collectors.toList());
// only successful path for unspecified corresponding field
if (candidates.size() == 1) {
return candidates.get(0);
}
StringBuilder sb = new StringBuilder("no explicit field name was provided and ");
if (candidates.size() == 0) {
sb.append("no matching fields were found");
} else {
sb.append(candidates.size()).append(" matching fields were found: ");
sb.append(candidates.stream().map(FieldNode::getName).collect(Collectors.joining(",")));
}
addError(sb.toString(), field);
return null;
}
protected void addSyntheticSetterFor(FieldNode field) {
ClassNode declaringClass = field.getDeclaringClass();
declaringClass.addMethod(
syntheticSetterName(field),
ACC_PUBLIC | ACC_FINAL | ACC_SYNTHETIC,
ClassHelper.VOID_TYPE,
new Parameter[] { setterParam(field) },
new ClassNode[0],
createSyntheticSetterBody(field));
}
protected static String syntheticSetterName(FieldNode field) {
return "$gjt_" + GjtUtils.setterName(field);
}
protected static Parameter setterParam(FieldNode field) {
return param(field.getType(), field.getName());
}
protected static Statement createSyntheticSetterBody(FieldNode field) {
VariableExpression parameter = varX(setterParam(field));
return assignS(fieldX(field), parameter);
}
protected static Statement createManagedSetterBody(FieldNode myField, FieldNode corresponding) {
// The field on this class representing the relationship.
Expression owningField = fieldX(myField);
// The setter parameter (hard-set by Groovy as "value").
Expression newCorrespondingObject = varX(param(myField.getType(), "value"));
// Early exit if the parameter is already associated with this instance.
Statement earlyExit = ifS(sameX(owningField, newCorrespondingObject), ReturnStatement.RETURN_NULL_OR_VOID);
// If this object already has a relationship, null out the other side.
Statement breakUpWithEx = ifS(
notNullX(owningField),
stmt(callX(owningField, syntheticSetterName(corresponding), ConstantExpression.NULL))
);
// Set this end of the relationship. There might not be another end.
Statement iTakeYou = assignS(owningField, newCorrespondingObject);
Statement exitIfNowSingle = (ifS(equalsNullX(newCorrespondingObject), ReturnStatement.RETURN_NULL_OR_VOID));
// See if the new corresponding object already had a relationship. If so, null out the other end of that one.
VariableExpression jilted = varX("jilted", corresponding.getType());
Statement findJilted = declS(jilted, propX(newCorrespondingObject, corresponding.getName()));
Statement stealSignificantOther = ifS(
notNullX(jilted),
assignS(attrX(jilted, constX(myField.getName())), constX(null))
);
// Set both ends of the new relationship.
Statement youTakeMe = stmt(callX(newCorrespondingObject, syntheticSetterName(corresponding), varX("this")));
return block(earlyExit, breakUpWithEx, iTakeYou, exitIfNowSingle, findJilted, stealSignificantOther, youTakeMe);
}
}
| |
// Copyright 2019 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.runtime;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.AliasProvider;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.test.TestProvider;
import com.google.devtools.build.lib.analysis.test.TestProvider.TestParams;
import com.google.devtools.build.lib.analysis.test.TestResult;
import com.google.devtools.build.lib.concurrent.ThreadSafety;
import com.google.devtools.build.lib.exec.TestAttempt;
import com.google.devtools.build.lib.packages.TestSize;
import com.google.devtools.build.lib.packages.TestTimeout;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.view.test.TestStatus.BlazeTestStatus;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** This class aggregates and reports target-wide test statuses in real-time. */
@ThreadSafety.ThreadSafe
final class TestResultAggregator {
/**
* Settings for the aggregator; there are usually many aggregator instances with the same set of
* settings, so we move them to a separate object.
*/
static final class AggregationPolicy {
private final EventBus eventBus;
private final boolean testCheckUpToDate;
private final boolean testVerboseTimeoutWarnings;
AggregationPolicy(
EventBus eventBus, boolean testCheckUpToDate, boolean testVerboseTimeoutWarnings) {
this.eventBus = eventBus;
this.testCheckUpToDate = testCheckUpToDate;
this.testVerboseTimeoutWarnings = testVerboseTimeoutWarnings;
}
}
private final AggregationPolicy policy;
private final ConfiguredTarget testTarget;
private final TestSummary.Builder summary;
private final Set<Artifact> remainingRuns;
private final Map<Artifact, TestResult> statusMap = new HashMap<>();
public TestResultAggregator(
ConfiguredTarget target, BuildConfiguration configuration, AggregationPolicy policy) {
this.testTarget = target;
this.policy = policy;
// And create an empty summary suitable for incremental analysis.
// Also has the nice side effect of mapping labels to RuleConfiguredTargets.
this.summary = TestSummary.newBuilder();
this.summary.setTarget(target);
if (configuration != null) {
// This can be null for testing.
this.summary.setConfiguration(configuration);
}
this.summary.setStatus(BlazeTestStatus.NO_STATUS);
this.remainingRuns = new HashSet<>(TestProvider.getTestStatusArtifacts(target));
}
/**
* Records a new test run result and incrementally updates the target status. This event is sent
* upon completion of executed test runs.
*/
synchronized void testEvent(TestResult result) {
ActionOwner testOwner = result.getTestAction().getOwner();
ConfiguredTargetKey targetLabel =
ConfiguredTargetKey.of(testOwner.getLabel(), result.getTestAction().getConfiguration());
Preconditions.checkArgument(targetLabel.equals(asKey(testTarget)));
Preconditions.checkState(
statusMap.put(result.getTestStatusArtifact(), result) == null,
"Duplicate result reported for an individual test shard");
// If a test result was cached, then post the cached attempts to the event bus.
if (result.isCached()) {
for (TestAttempt attempt : result.getCachedTestAttempts()) {
policy.eventBus.post(attempt);
}
}
TestSummary finalTestSummary = null;
Preconditions.checkNotNull(summary);
if (!remainingRuns.remove(result.getTestStatusArtifact())) {
// This can happen if a buildCompleteEvent() was processed before this event reached us.
// This situation is likely to happen if --notest_keep_going is set with multiple targets.
return;
}
incrementalAnalyze(result);
// If all runs are processed, the target is finished and ready to report.
if (remainingRuns.isEmpty()) {
finalTestSummary = summary.build();
}
// Report finished targets.
if (finalTestSummary != null) {
policy.eventBus.post(finalTestSummary);
}
}
synchronized void targetFailure(boolean blazeHalted, boolean skipTargetsOnFailure) {
if (remainingRuns.isEmpty()) {
// Blaze does not guarantee that BuildResult.getSuccessfulTargets() and posted TestResult
// events are in sync. Thus, it is possible that a test event was posted, but the target is
// not present in the set of successful targets.
return;
}
markUnbuilt(blazeHalted, skipTargetsOnFailure);
// These are never going to run; removing them marks the target complete.
remainingRuns.clear();
policy.eventBus.post(summary.build());
}
/** Returns the known aggregate results for the given target at the current moment. */
synchronized TestSummary.Builder getCurrentSummaryForTesting() {
return summary;
}
/**
* Returns all test status artifacts associated with a given target whose runs have yet to finish.
*/
synchronized Collection<Artifact> getIncompleteRunsForTesting() {
return ImmutableSet.copyOf(remainingRuns);
}
synchronized Map<Artifact, TestResult> getStatusMapForTesting() {
return ImmutableMap.copyOf(statusMap);
}
private static ConfiguredTargetKey asKey(ConfiguredTarget target) {
return ConfiguredTargetKey.of(
// A test is never in the host configuration.
AliasProvider.getDependencyLabel(target),
target.getConfigurationKey(),
/*isHostConfiguration=*/ false);
}
private static BlazeTestStatus aggregateStatus(BlazeTestStatus status, BlazeTestStatus other) {
return status.getNumber() > other.getNumber() ? status : other;
}
/**
* Helper for differential analysis which aggregates the TestSummary for an individual target,
* reporting runs on the EventBus if necessary.
*/
synchronized TestSummary aggregateAndReportSummary(boolean skipTargetsOnFailure) {
// If already reported by the listener, no work remains for this target.
if (remainingRuns.isEmpty()) {
return summary.build();
}
// We will get back multiple TestResult instances if test had to be retried several
// times before passing. Sharding and multiple runs of the same test without retries
// will be represented by separate artifacts and will produce exactly one TestResult.
for (Artifact testStatus : TestProvider.getTestStatusArtifacts(testTarget)) {
// When a build is interrupted ( eg. a broken target with --nokeep_going ) runResult could
// be null for an unrelated test because we were not able to even try to execute the test.
// In that case, for tests that were previously passing we return null ( == NO STATUS),
// because checking if the cached test target is up-to-date would require running the
// dependency checker transitively.
TestResult runResult = statusMap.get(testStatus);
boolean isIncompleteRun = remainingRuns.contains(testStatus);
if (runResult == null) {
markIncomplete(skipTargetsOnFailure);
} else if (isIncompleteRun) {
incrementalAnalyze(runResult);
}
}
// The target was not posted by the listener and must be posted now.
TestSummary result = summary.build();
policy.eventBus.post(result);
return result;
}
/**
* Incrementally updates a TestSummary given an existing summary and a new TestResult. Only call
* on built targets.
*
* @param result New test result to aggregate into the summary.
*/
synchronized void incrementalAnalyze(TestResult result) {
// Cache retrieval should have been performed already.
Preconditions.checkNotNull(result);
TestSummary existingSummary = Preconditions.checkNotNull(summary.peek());
BlazeTestStatus status = existingSummary.getStatus();
int numCached = existingSummary.numCached();
int numLocalActionCached = existingSummary.numLocalActionCached();
// If a test was neither cached locally nor remotely we say action was taken.
if (!(result.isCached() || result.getData().getRemotelyCached())) {
summary.setActionRan(true);
} else {
numCached++;
}
if (result.isCached()) {
numLocalActionCached++;
}
Path coverageData = result.getCoverageData();
if (coverageData != null) {
summary.addCoverageFiles(ImmutableList.of(coverageData));
}
TransitiveInfoCollection target = existingSummary.getTarget();
Preconditions.checkNotNull(target, "The existing TestSummary must be associated with a target");
TestParams testParams = target.getProvider(TestProvider.class).getTestParams();
if (!testParams.runsDetectsFlakes()) {
status = aggregateStatus(status, result.getData().getStatus());
} else {
int shardNumber = result.getShardNum();
int runsPerTestForLabel = testParams.getRuns();
List<BlazeTestStatus> singleShardStatuses =
summary.addShardStatus(shardNumber, result.getData().getStatus());
if (singleShardStatuses.size() == runsPerTestForLabel) {
BlazeTestStatus shardStatus = BlazeTestStatus.NO_STATUS;
int passes = 0;
for (BlazeTestStatus runStatusForShard : singleShardStatuses) {
shardStatus = aggregateStatus(shardStatus, runStatusForShard);
if (TestResult.isBlazeTestStatusPassed(runStatusForShard)) {
passes++;
}
}
// Under the RunsPerTestDetectsFlakes option, return flaky if 1 <= p < n shards pass.
// If all results pass or fail, aggregate the passing/failing shardStatus.
if (passes == 0 || passes == runsPerTestForLabel) {
status = aggregateStatus(status, shardStatus);
} else {
status = aggregateStatus(status, BlazeTestStatus.FLAKY);
}
}
}
if (result.getData().hasPassedLog()) {
summary.addPassedLog(result.getTestLogPath().getRelative(result.getData().getPassedLog()));
}
for (String path : result.getData().getFailedLogsList()) {
summary.addFailedLog(result.getTestLogPath().getRelative(path));
}
summary
.addTestTimes(result.getData().getTestTimesList())
.mergeTiming(
result.getData().getStartTimeMillisEpoch(), result.getData().getRunDurationMillis())
.addWarnings(result.getData().getWarningList())
.collectFailedTests(result.getData().getTestCase())
.countTotalTestCases(result.getData().getTestCase())
.setRanRemotely(result.getData().getIsRemoteStrategy());
List<String> warnings = new ArrayList<>();
if (status == BlazeTestStatus.PASSED
&& shouldEmitTestSizeWarningInSummary(
policy.testVerboseTimeoutWarnings,
warnings,
result.getData().getTestProcessTimesList(),
target)) {
summary.setWasUnreportedWrongSize(true);
}
summary
.setStatus(status)
.setNumCached(numCached)
.setNumLocalActionCached(numLocalActionCached)
.addWarnings(warnings);
}
private void markIncomplete(boolean skipTargetsOnFailure) {
// TODO(bazel-team): (2010) Make NotRunTestResult support both tests failed to built and
// tests with no status and post it here.
TestSummary peekSummary = summary.peek();
BlazeTestStatus status = peekSummary.getStatus();
if (skipTargetsOnFailure) {
status = BlazeTestStatus.NO_STATUS;
} else if (status != BlazeTestStatus.NO_STATUS) {
status = aggregateStatus(status, BlazeTestStatus.INCOMPLETE);
}
summary.setStatus(status);
}
private void markUnbuilt(boolean blazeHalted, boolean skipTargetsOnFailure) {
BlazeTestStatus runStatus =
blazeHalted
? BlazeTestStatus.BLAZE_HALTED_BEFORE_TESTING
: (policy.testCheckUpToDate || skipTargetsOnFailure
? BlazeTestStatus.NO_STATUS
: BlazeTestStatus.FAILED_TO_BUILD);
summary.setStatus(runStatus);
}
/**
* Checks whether the specified test timeout could have been smaller or is too small and adds a
* warning message if verbose is true.
*
* <p>Returns true if there was a test with the wrong timeout, but if was not reported.
*/
private static boolean shouldEmitTestSizeWarningInSummary(
boolean verbose,
List<String> warnings,
List<Long> testTimes,
TransitiveInfoCollection target) {
TestTimeout specifiedTimeout =
target.getProvider(TestProvider.class).getTestParams().getTimeout();
long maxTimeOfShard = 0;
for (Long shardTime : testTimes) {
if (shardTime != null) {
maxTimeOfShard = Math.max(maxTimeOfShard, shardTime);
}
}
int maxTimeInSeconds = (int) (maxTimeOfShard / 1000);
if (!specifiedTimeout.isInRangeFuzzy(maxTimeInSeconds)) {
TestTimeout expectedTimeout = TestTimeout.getSuggestedTestTimeout(maxTimeInSeconds);
TestSize expectedSize = TestSize.getTestSize(expectedTimeout);
if (verbose) {
StringBuilder builder =
new StringBuilder(
String.format(
"%s: Test execution time (%.1fs excluding execution overhead) outside of "
+ "range for %s tests. Consider setting timeout=\"%s\"",
AliasProvider.getDependencyLabel(target),
maxTimeOfShard / 1000.0,
specifiedTimeout.prettyPrint(),
expectedTimeout));
if (expectedSize != null) {
builder.append(" or size=\"").append(expectedSize).append("\"");
}
builder.append(".");
warnings.add(builder.toString());
return false;
}
return true;
} else {
return false;
}
}
}
| |
/*
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.morphline.avro;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericContainer;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericEnumSymbol;
import org.apache.avro.generic.GenericFixed;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.util.Utf8;
import org.kitesdk.morphline.api.Command;
import org.kitesdk.morphline.api.CommandBuilder;
import org.kitesdk.morphline.api.MorphlineCompilationException;
import org.kitesdk.morphline.api.MorphlineContext;
import org.kitesdk.morphline.api.MorphlineRuntimeException;
import org.kitesdk.morphline.api.Record;
import org.kitesdk.morphline.base.AbstractCommand;
import org.kitesdk.morphline.base.Configs;
import org.kitesdk.morphline.base.Fields;
import com.google.common.base.Preconditions;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.typesafe.config.Config;
/**
* Command that uses zero or more avro path expressions to extract values from an Avro object.
*
* The Avro input object is expected to be contained in the {@link Fields#ATTACHMENT_BODY}
*
* Each expression consists of a record output field name (on the left side of the colon ':') as
* well as zero or more path steps (on the right hand side), each path step separated by a '/'
* slash. Avro arrays are traversed with the '[]' notation.
*
* The result of a path expression is a list of objects, each of which is added to the given record
* output field.
*
* The path language supports all Avro concepts, including nested structures, records, arrays, maps,
* unions, etc, as well as a flatten option that collects the primitives in a subtree into a flat
* list.
*/
public final class ExtractAvroPathsBuilder implements CommandBuilder {
@Override
public Collection<String> getNames() {
return Collections.singletonList("extractAvroPaths");
}
@Override
public Command build(Config config, Command parent, Command child, MorphlineContext context) {
return new ExtractAvroPaths(this, config, parent, child, context);
}
///////////////////////////////////////////////////////////////////////////////
// Nested classes:
///////////////////////////////////////////////////////////////////////////////
private static final class ExtractAvroPaths extends AbstractCommand {
private final boolean flatten;
private final Map<String, Collection<String>> stepMap;
private static final String ARRAY_TOKEN = "[]";
public ExtractAvroPaths(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) {
super(builder, config, parent, child, context);
ListMultimap<String, String> stepMultiMap = ArrayListMultimap.create();
this.flatten = getConfigs().getBoolean(config, "flatten", true);
Config paths = getConfigs().getConfig(config, "paths");
for (Map.Entry<String, Object> entry : new Configs().getEntrySet(paths)) {
String fieldName = entry.getKey();
String path = entry.getValue().toString().trim();
if (path.contains("//")) {
throw new MorphlineCompilationException("No support for descendant axis available yet", config);
}
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
path = path.trim();
for (String step : path.split("/")) {
step = step.trim();
if (step.length() > ARRAY_TOKEN.length() && step.endsWith(ARRAY_TOKEN)) {
step = step.substring(0, step.length() - ARRAY_TOKEN.length());
stepMultiMap.put(fieldName, normalize(step));
stepMultiMap.put(fieldName, ARRAY_TOKEN);
} else {
stepMultiMap.put(fieldName, normalize(step));
}
}
}
this.stepMap = stepMultiMap.asMap();
LOG.debug("stepMap: {}", stepMap);
validateArguments();
}
private String normalize(String step) { // for faster subsequent query performance
return ARRAY_TOKEN.equals(step) ? ARRAY_TOKEN : step;
}
@Override
protected boolean doProcess(Record inputRecord) {
// Preconditions.checkState(ReadAvroBuilder.AVRO_MEMORY_MIME_TYPE.equals(inputRecord.getFirstValue(Fields.ATTACHMENT_MIME_TYPE)));
GenericContainer datum = (GenericContainer) inputRecord.getFirstValue(Fields.ATTACHMENT_BODY);
Preconditions.checkNotNull(datum);
Preconditions.checkNotNull(datum.getSchema());
Record outputRecord = inputRecord.copy();
for (Map.Entry<String, Collection<String>> entry : stepMap.entrySet()) {
String fieldName = entry.getKey();
List<String> steps = (List<String>) entry.getValue();
extractPath(datum, datum.getSchema(), fieldName, steps, outputRecord, 0);
}
// pass record to next command in chain:
return getChild().process(outputRecord);
}
@SuppressWarnings("unchecked")
private void extractPath(Object datum, Schema schema, String fieldName, List<String> steps, Record record, int level) {
if (level >= steps.size()) {
return;
}
boolean isLeaf = (level + 1 == steps.size());
String step = steps.get(level);
if (ARRAY_TOKEN == step) {
if (schema.getType() == Type.ARRAY) {
if (isLeaf) {
resolve(datum, schema, record, fieldName);
} else {
Iterator iter = ((Collection) datum).iterator();
while (iter.hasNext()) {
extractPath(iter.next(), schema.getElementType(), fieldName, steps, record, level + 1);
}
}
} else if (schema.getType() == Type.UNION) {
int index = GenericData.get().resolveUnion(schema, datum);
extractPath(datum, schema.getTypes().get(index), fieldName, steps, record, level);
}
} else {
if (schema.getType() == Type.RECORD) {
GenericRecord genericAvroRecord = (GenericRecord) datum;
Object value = genericAvroRecord.get(step);
if (value != null) {
Schema childSchema = schema.getField(step).schema();
if (isLeaf) {
resolve(value, childSchema, record, fieldName);
} else {
extractPath(value, childSchema, fieldName, steps, record, level + 1);
}
}
} else if (schema.getType() == Type.MAP) {
Map<CharSequence, ?> map = (Map<CharSequence, ?>) datum;
Object value = map.get(step);
if (value == null) {
value = map.get(new Utf8(step)); // TODO: fix performance - maybe fix polymorphic weirdness in upstream avro?
}
if (value != null) {
Schema childSchema = schema.getValueType();
if (isLeaf) {
resolve(value, childSchema, record, fieldName);
} else {
extractPath(value, childSchema, fieldName, steps, record, level + 1);
}
}
} else if (schema.getType() == Type.UNION) {
int index = GenericData.get().resolveUnion(schema, datum);
//String typeName = schema.getTypes().get(index).getName();
extractPath(datum, schema.getTypes().get(index), fieldName, steps, record, level);
}
}
}
private void resolve(Object datum, Schema schema, Record record, String fieldName) {
if (datum == null) {
return;
}
if (flatten) {
flatten(datum, schema, record.get(fieldName));
return;
}
// RECORD, ENUM, ARRAY, MAP, UNION, FIXED, STRING, BYTES, INT, LONG, FLOAT,
// DOUBLE, BOOLEAN, NULL
switch (schema.getType()) {
case RECORD: {
record.put(fieldName, datum);
break;
}
case ENUM: {
GenericEnumSymbol symbol = (GenericEnumSymbol) datum;
record.put(fieldName, symbol.toString());
break;
}
case ARRAY: {
record.put(fieldName, datum);
break;
}
case MAP: {
record.put(fieldName, datum);
break;
}
case UNION: {
record.put(fieldName, normalizeUtf8(datum));
break;
}
case FIXED: {
GenericFixed fixed = (GenericFixed) datum;
record.put(fieldName, fixed.bytes());
break;
}
case BYTES: {
ByteBuffer buf = (ByteBuffer) datum;
int pos = buf.position();
byte[] bytes = new byte[buf.remaining()];
buf.get(bytes);
buf.position(pos); // undo relative read
record.put(fieldName, bytes);
break;
}
case STRING: {
record.put(fieldName, datum.toString());
break;
}
case INT: {
record.put(fieldName, datum);
break;
}
case LONG: {
record.put(fieldName, datum);
break;
}
case FLOAT: {
record.put(fieldName, datum);
break;
}
case DOUBLE: {
record.put(fieldName, datum);
break;
}
case BOOLEAN: {
record.put(fieldName, datum);
break;
}
case NULL: {
break;
}
default:
throw new MorphlineRuntimeException("Unknown Avro schema type: " + schema.getType());
}
}
private Object normalizeUtf8(Object datum) {
if (datum instanceof Utf8) {
return ((Utf8) datum).toString();
} else {
return datum;
}
}
@SuppressWarnings("unchecked")
private void flatten(Object datum, Schema schema, List list) {
if (datum == null) {
return;
}
// RECORD, ENUM, ARRAY, MAP, UNION, FIXED, STRING, BYTES, INT, LONG, FLOAT,
// DOUBLE, BOOLEAN, NULL
switch (schema.getType()) {
case RECORD: {
IndexedRecord avroRecord = (IndexedRecord) datum;
for (Field field : schema.getFields()) {
flatten(avroRecord.get(field.pos()), field.schema(), list);
}
break;
}
case ENUM: {
GenericEnumSymbol symbol = (GenericEnumSymbol) datum;
list.add(symbol.toString());
break;
}
case ARRAY: {
Iterator iter = ((Collection) datum).iterator();
while (iter.hasNext()) {
flatten(iter.next(), schema.getElementType(), list);
}
break;
}
case MAP: {
Map<CharSequence, ?> map = (Map<CharSequence, ?>) datum;
for (Map.Entry<CharSequence, ?> entry : map.entrySet()) {
flatten(entry.getValue(), schema.getValueType(), list);
}
break;
}
case UNION: {
int index = GenericData.get().resolveUnion(schema, datum);
flatten(datum, schema.getTypes().get(index), list);
break;
}
case FIXED: {
GenericFixed fixed = (GenericFixed) datum;
list.add(fixed.bytes());
break;
}
case BYTES: {
ByteBuffer buf = (ByteBuffer) datum;
int pos = buf.position();
byte[] bytes = new byte[buf.remaining()];
buf.get(bytes);
buf.position(pos); // undo relative read
list.add(bytes);
break;
}
case STRING: {
list.add(datum.toString());
break;
}
case INT: {
list.add(datum);
break;
}
case LONG: {
list.add(datum);
break;
}
case FLOAT: {
list.add(datum);
break;
}
case DOUBLE: {
list.add(datum);
break;
}
case BOOLEAN: {
list.add(datum);
break;
}
case NULL: {
break;
}
default:
throw new MorphlineRuntimeException("Unknown Avro schema type: " + schema.getType());
}
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.internal.psiView.stubtree;
import com.intellij.internal.psiView.PsiViewerDialog;
import com.intellij.internal.psiView.ViewerPsiBasedTree;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.actionSystem.impl.ActionToolbarImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.StubBuilder;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.impl.source.PsiFileWithStubSupport;
import com.intellij.psi.stubs.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.IStubFileElementType;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.JBSplitter;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.tree.AsyncTreeModel;
import com.intellij.ui.tree.StructureTreeModel;
import com.intellij.ui.tree.TreePathUtil;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.containers.BidirectionalMap;
import com.intellij.util.indexing.FileContent;
import com.intellij.util.indexing.FileContentImpl;
import com.intellij.util.indexing.IndexingDataKeys;
import com.intellij.util.ui.StatusText;
import com.intellij.util.ui.components.BorderLayoutPanel;
import com.intellij.util.ui.tree.AbstractTreeModel;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
import static com.intellij.internal.psiView.PsiViewerDialog.initTree;
public class StubViewerPsiBasedTree implements ViewerPsiBasedTree {
public static final Logger LOG = Logger.getInstance(PsiViewerDialog.class);
@Nullable
private AbstractTreeModel myTreeModel;
@NotNull
private final Tree myStubTree;
@NotNull
private final StubDetailsViewer myStubDetailsViewer;
@Nullable
private JPanel myPanel;
@NotNull
private final Project myProject;
@NotNull
private final PsiTreeUpdater myUpdater;
@NotNull
private volatile Map<ASTNode, StubElement<?>> myNodeToStubs = new BidirectionalMap<>();
Disposable myTreeModelDisposable = Disposer.newDisposable();
public StubViewerPsiBasedTree(@NotNull Project project, @NotNull PsiTreeUpdater updater) {
myProject = project;
myUpdater = updater;
myStubTree = new Tree(new DefaultTreeModel(new DefaultMutableTreeNode()));
myStubDetailsViewer = new StubDetailsViewer(this);
}
@Override
public void reloadTree(@Nullable PsiElement rootRootElement, @NotNull String text) {
resetStubTree();
buildStubTree(rootRootElement, text);
}
private void resetStubTree() {
myStubTree.removeAll();
if (myTreeModel != null) {
Disposer.dispose(myTreeModelDisposable);
myTreeModel = null;
myTreeModelDisposable = Disposer.newDisposable();
}
myNodeToStubs = new BidirectionalMap<>();
ViewerPsiBasedTree.removeListenerOfClass(myStubTree, StubTreeSelectionListener.class);
}
@NotNull
@Override
public JComponent getComponent() {
if (myPanel != null) return myPanel;
JBSplitter splitter = new JBSplitter("StubViewer.showPreviewDetails.proportion", 0.7f);
splitter.setFirstComponent(ScrollPaneFactory.createScrollPane(myStubTree, true));
AnAction action = myStubDetailsViewer.addComponent(splitter);
ActionToolbarImpl toolbar = new ActionToolbarImpl("Stub Viewer", new DefaultActionGroup(action), false);
toolbar.setTargetComponent(splitter);
BorderLayoutPanel panel = new BorderLayoutPanel();
panel.addToCenter(splitter).addToRight(toolbar).setBorder(IdeBorderFactory.createBorder());
initTree(myStubTree);
myPanel = panel;
return panel;
}
@Override
public boolean isFocusOwner() {
return myStubTree.isFocusOwner();
}
@Override
public void focusTree() {
IdeFocusManager.getInstance(myProject).requestFocus(myStubTree, true);
}
private synchronized void buildStubTree(@Nullable PsiElement rootElement, @NotNull String textToParse) {
if (rootElement == null) {
myStubTree.setRootVisible(false);
return;
}
if (!(rootElement instanceof PsiFileWithStubSupport)) {
myStubTree.setRootVisible(false);
StatusText text = myStubTree.getEmptyText();
if (rootElement instanceof PsiFile) {
text.setText("No stubs for " + rootElement.getLanguage().getDisplayName());
}
else {
text.setText("Cannot build stub tree for code fragments");
}
return;
}
Stub stub = buildStubForElement(myProject, rootElement, textToParse);
if (stub instanceof PsiFileStub) {
PsiFileWithStubSupport file = (PsiFileWithStubSupport)rootElement;
final StubTreeNode rootNode = new StubTreeNode((StubElement<?>)stub, null);
StructureTreeModel<?> treeModel = new StructureTreeModel<>(new StubTreeStructure(rootNode), myTreeModelDisposable);
myTreeModel = new AsyncTreeModel(treeModel, myTreeModelDisposable);
myStubTree.setModel(myTreeModel);
fillPsiToStubCache(file, (PsiFileStub<?>)stub);
myStubTree.setRootVisible(true);
myStubTree.expandRow(0);
myStubTree.addTreeSelectionListener(new StubTreeSelectionListener());
treeModel.invalidate();
}
else {
myStubTree.setRootVisible(false);
StatusText text = myStubTree.getEmptyText();
text.setText("Cannot build stubs for " + rootElement.getLanguage().getDisplayName());
}
}
@Override
public void dispose() {
resetStubTree();
}
@Nullable
private static Stub buildStubForElement(Project project, PsiElement rootElement, @NotNull String textToParse) {
Stub stub = null;
PsiFileWithStubSupport psiFile = (PsiFileWithStubSupport)rootElement;
StubTree tree = psiFile.getStubTree();
if (tree != null) {
stub = tree.getRoot();
}
else if (rootElement instanceof PsiFileImpl) {
StubBuilder stubBuilder = getStubBuilder((PsiFileImpl)rootElement);
stub = stubBuilder == null ? null : stubBuilder.buildStubTree((PsiFile)rootElement);
}
if (stub == null) {
LightVirtualFile file = new LightVirtualFile("stub", rootElement.getLanguage(), textToParse);
final FileContent fc;
try {
fc = FileContentImpl.createByFile(file, project);
fc.putUserData(IndexingDataKeys.PSI_FILE, psiFile);
stub = StubTreeBuilder.buildStubTree(fc);
}
catch (IOException e) {
LOG.warn(e.getMessage(), e);
}
}
return stub;
}
@Nullable
private static StubBuilder getStubBuilder(@NotNull PsiFileImpl rootElement) {
IStubFileElementType<?> builder = rootElement.getElementTypeForStubBuilder();
return builder == null ? null : builder.getBuilder();
}
@Override
public void selectNodeFromPsi(@Nullable PsiElement element) {
if (myTreeModel == null || element == null) return;
final PsiFile file = element.getContainingFile();
if (!(file instanceof PsiFileWithStubSupport)) return;
final DefaultMutableTreeNode rootNode = getRoot();
if (rootNode == null) return;
StubElement<?> stubElement = myNodeToStubs.get(element.getNode());
if (stubElement != null) {
selectStubElement(stubElement);
}
else {
myStubTree.clearSelection();
}
}
private void selectStubElement(StubElement<?> stubElement) {
TreeNode node = TreeUtil.treeNodeTraverser(getRoot()).traverse().find(
(treeNode) -> treeNode instanceof DefaultMutableTreeNode &&
((StubTreeNode)((DefaultMutableTreeNode)treeNode).getUserObject()).getStub() == stubElement
);
if (node != null) {
TreePath path = TreePathUtil.pathToTreeNode(node);
myStubTree.getSelectionModel().setSelectionPath(path);
}
}
private class StubTreeSelectionListener implements TreeSelectionListener {
StubTreeSelectionListener() {
}
@Override
public void valueChanged(TreeSelectionEvent e) {
if (myTreeModel == null) return;
final StubTreeNode rootNode = (StubTreeNode)getRoot().getUserObject();
StubElement<?> topLevelStub = rootNode == null ? null : rootNode.getStub();
if (!(topLevelStub instanceof PsiFileStub)) return;
StubElement<?> stub = getSelectedStub();
if (stub == null) return;
PsiElement result = getPsiElementForStub(stub);
if (result != null) {
myUpdater.updatePsiTree(result, myStubTree.hasFocus() ? result.getTextRange() : null);
myStubDetailsViewer.valueChanged(stub);
}
}
}
@Nullable StubElement<?> getSelectedStub() {
TreePath selectionPath = myStubTree.getSelectionPath();
return selectionPath != null
? ((StubTreeNode)((DefaultMutableTreeNode)selectionPath.getLastPathComponent()).getUserObject()).getStub()
: null;
}
private DefaultMutableTreeNode getRoot() {
return (DefaultMutableTreeNode)myStubTree.getModel().getRoot();
}
public PsiElement getPsiElementForStub(StubElement<?> stub) {
Ref<PsiElement> result = Ref.create();
myNodeToStubs.forEach((key, value) -> {
if (value == stub) {
result.set(key.getPsi());
}
});
return result.get();
}
private void fillPsiToStubCache(@NotNull PsiFileWithStubSupport rootElement, @NotNull PsiFileStub<?> rootStub) {
fillTreeForStub(rootElement, new StubTree(rootStub));
}
public void fillTreeForStub(@NotNull PsiFileWithStubSupport file, @NotNull StubTree tree) {
StubBuilder builder = file instanceof PsiFileImpl ? getStubBuilder(((PsiFileImpl)file)) : null;
if (builder == null) return;
final Iterator<StubElement<?>> stubs = tree.getPlainList().iterator();
final StubElement<?> root = stubs.next();
final ASTNode ast = file.getNode();
myNodeToStubs.put(ast, root);
findTreeForStub(builder, ast, stubs);
if (stubs.hasNext()) {
LOG.error("Stub mismatch, unprocessed stubs " + stubs.next());
}
}
private void findTreeForStub(StubBuilder builder, ASTNode tree, final Iterator<StubElement<?>> stubs) {
final IElementType type = tree.getElementType();
if (type instanceof IStubElementType && ((IStubElementType<?, ?>)type).shouldCreateStub(tree)) {
if (!stubs.hasNext()) {
LOG.error("Stub mismatch, " + type);
}
final StubElement<?> curStub = stubs.next();
myNodeToStubs.put(tree, curStub);
}
for (ASTNode node : tree.getChildren(null)) {
if (!builder.skipChildProcessingWhenBuildingStubs(tree, node)) {
findTreeForStub(builder, node, stubs);
}
}
}
}
| |
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.rdp.custom;
import java.io.IOException;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.DefaultChannelFuture;
import org.jboss.netty.channel.DownstreamMessageEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lixia.rdp.HexDump;
import com.lixia.rdp.RdesktopException;
import com.lixia.rdp.RdpPacket_Localised;
public class ISOUtils {
/* this for the ISO Layer */
private static final Logger logger = LoggerFactory.getLogger(ISOUtils.class);
// private static final int CONNECTION_REQUEST = 0xE0;
// private static final int CONNECTION_CONFIRM = 0xD0;
// private static final int DISCONNECT_REQUEST = 0x80;
private static final int DATA_TRANSFER = 0xF0;
// private static final int ERROR = 0x70;
public static final int DISCONNECT_REQUEST = 0x80;
private static final int PROTOCOL_VERSION = 0x03;
private static final int EOT = 0x80;
private static HexDump dump = new HexDump();
/**
* Receive a data transfer message from the server
*
* @return Packet containing message (as ISO PDU)
* @throws Exception
*/
public static RdpPacket_Localised receive(ChannelHandlerContext context, ChannelBuffer channelBuffer, RDPSession session)
throws Exception {
int[] type = new int[1];
RdpPacket_Localised buffer = receiveMessage(context, type, channelBuffer, session);
if (buffer == null)
return null;
if (type[0] != DATA_TRANSFER) {
throw new RdesktopException("Expected DT got:" + type[0]);
}
return buffer;
}
/**
* Receive a message from the server
*
* @param type
* Array containing message type, stored in type[0]
* @return Packet object containing data of message
* @throws Exception
*/
public static RdpPacket_Localised receiveMessage(ChannelHandlerContext context, int[] type, ChannelBuffer buffer,
RDPSession session) throws Exception {
// logger.debug("ISO.receiveMessage");
RdpPacket_Localised s = null;
int length, version;
// logger.debug("buffer:{} ", new Object[] {});
next_packet: while (true) {
logger.debug("next_packet");
s = tcp_recv(null, 4, buffer);
if (s == null) {
return null;
}
version = s.get8();
logger.debug("version: {}", version);
if (version == 3) {
s.incrementPosition(1); // pad
length = s.getBigEndian16();
logger.debug("length: {}", length);
} else {
length = s.get8();
if ((length & 0x80) != 0) {
length &= ~0x80;
length = (length << 8) + s.get8();
}
logger.debug("length(version != 3): {}", length);
}
logger.debug("read again");
s = tcp_recv(s, length - 4, buffer);
logger.debug("s==null? {}", String.valueOf(s == null));
if (s == null)
return null;
logger.debug("version & 3: {}", String.valueOf(version & 3));
if ((version & 3) == 0) {
logger.debug("Processing rdp5 packet");
RDPUtils.rdp5_process(context, s, (version & 0x80) != 0, session);
logger.debug("Processing rdp5 packet end");
continue next_packet;
} else
break;
}
s.get8();
type[0] = s.get8();
logger.debug("type[0]: {}", type[0]);
if (type[0] == DATA_TRANSFER) {
logger.debug("Data Transfer Packet");
s.incrementPosition(1); // eot
return s;
}
s.incrementPosition(5); // dst_ref, src_ref, class
return s;
}
/**
* Receive a specified number of bytes from the server, and store in a
* packet
*
* @param p
* Packet to append data to, null results in a new packet being
* created
* @param length
* Length of data to read
* @return Packet containing read data, appended to original data if
* provided
* @throws IOException
*/
private static RdpPacket_Localised tcp_recv(RdpPacket_Localised p, int length, ChannelBuffer input) throws IOException {
// logger.debug("ISO.tcp_recv");
RdpPacket_Localised buffer = null;
byte[] packet = new byte[length];
// in.readFully(packet, 0, length);
logger.debug("buffer size: {}", input.readableBytes());
logger.debug("read byte length: {}", length);
input.readBytes(packet, 0, length);
dump.encode(packet, "RECEIVE" /* System.out */);
// try{ }
// catch(IOException e){ logger.warn("IOException: " + e.getMessage());
// return null; }
if (p == null) {
logger.debug("p==null");
buffer = new RdpPacket_Localised(length);
buffer.copyFromByteArray(packet, 0, 0, packet.length);
buffer.markEnd(length);
buffer.setStart(buffer.getPosition());
} else {
logger.debug("p!=null");
buffer = new RdpPacket_Localised((p.getEnd() - p.getStart()) + length);
buffer.copyFromPacket(p, p.getStart(), 0, p.getEnd());
buffer.copyFromByteArray(packet, 0, p.getEnd(), packet.length);
buffer.markEnd(p.size() + packet.length);
buffer.setPosition(p.getPosition());
buffer.setStart(0);
}
return buffer;
}
/**
* Initialise an ISO PDU
*
* @param length
* Desired length of PDU
* @return Packet configured as ISO PDU, ready to write at higher level
*/
public static RdpPacket_Localised init(int length) {
logger.debug("init length: {}", length);
RdpPacket_Localised data = new RdpPacket_Localised(length + 7);// getMemory(length+7);
data.incrementPosition(7);
data.setStart(data.getPosition());
return data;
}
/**
* Send a packet to the server, wrapped in ISO PDU
*
* @param buffer
* Packet containing data to send to server
* @throws RdesktopException
* @throws IOException
*/
public static void send(RdpPacket_Localised buffer, ChannelHandlerContext context) throws RdesktopException, IOException {
if (buffer.getEnd() < 0) {
throw new RdesktopException("No End Mark!");
} else {
int length = buffer.getEnd();
byte[] packet = new byte[length];
// RdpPacket data = this.getMemory(length+7);
buffer.setPosition(0);
buffer.set8(PROTOCOL_VERSION); // Version
buffer.set8(0); // reserved
buffer.setBigEndian16(length); // length of packet
buffer.set8(2); // length of header
buffer.set8(DATA_TRANSFER);
buffer.set8(EOT);
buffer.copyToByteArray(packet, 0, 0, buffer.getEnd());
dump.encode(packet, "SEND"/* System.out */);
// buffer.copyToByteArray(packet, 0, 0, packet.length);
logger.debug("packet length: {}", length);
ChannelBuffer channelBuffer = ChannelBuffers.copiedBuffer(packet);
DownstreamMessageEvent messageEvent = new DownstreamMessageEvent(context.getChannel(), new DefaultChannelFuture(
context.getChannel(), false), channelBuffer, context.getChannel().getRemoteAddress());
// new DownstreamMessageEvent(context.getChannel(), evt.getFuture(),
// channelBuffer, evt.getChannel().getRemoteAddress());
context.sendDownstream(messageEvent);
}
}
public static void sendMessage(int type, Channel toRDPChanel) throws IOException {
RdpPacket_Localised buffer = new RdpPacket_Localised(11);// getMemory(11);
byte[] packet = new byte[11];
buffer.set8(PROTOCOL_VERSION); // send Version Info
buffer.set8(0); // reserved byte
buffer.setBigEndian16(11); // Length
buffer.set8(6); // Length of Header
buffer.set8(type); // where code = CR or DR
buffer.setBigEndian16(0); // Destination reference ( 0 at CC and DR)
buffer.setBigEndian16(0); // source reference should be a reasonable
// address we use 0
buffer.set8(0); // service class
buffer.copyToByteArray(packet, 0, 0, packet.length);
ChannelBuffer channelBuffer = ChannelBuffers.copiedBuffer(packet);
// DownstreamMessageEvent messageEvent = new
// DownstreamMessageEvent(toRDPChanel, new DefaultChannelFuture(
// toRDPChanel, false), channelBuffer, toRDPChanel.getRemoteAddress());
// new DownstreamMessageEvent(context.getChannel(), evt.getFuture(),
// channelBuffer, evt.getChannel().getRemoteAddress());
toRDPChanel.write(channelBuffer);
logger.debug("send disconnected");
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.2-hudson-jaxb-ri-2.2-63-
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.08.05 at 03:52:15 PM MESZ
//
package org.iso.mpeg.dash;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
import org.w3c.dom.Element;
/**
* <p>Java class for ContentComponentType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ContentComponentType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Accessibility" type="{urn:mpeg:dash:schema:mpd:2011}DescriptorType" maxOccurs="unbounded" minOccurs="0"/>
* <element name="Role" type="{urn:mpeg:dash:schema:mpd:2011}DescriptorType" maxOccurs="unbounded" minOccurs="0"/>
* <element name="Rating" type="{urn:mpeg:dash:schema:mpd:2011}DescriptorType" maxOccurs="unbounded" minOccurs="0"/>
* <element name="Viewpoint" type="{urn:mpeg:dash:schema:mpd:2011}DescriptorType" maxOccurs="unbounded" minOccurs="0"/>
* <any processContents='lax' namespace='##other' maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <attribute name="id" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" />
* <attribute name="lang" type="{http://www.w3.org/2001/XMLSchema}language" />
* <attribute name="contentType" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="par" type="{urn:mpeg:dash:schema:mpd:2011}RatioType" />
* <anyAttribute processContents='lax' namespace='##other'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ContentComponentType", propOrder = {
"accessibilities",
"roles",
"ratings",
"viewpoints",
"anies"
})
public class ContentComponentType {
@XmlElement(name = "Accessibility")
protected List<DescriptorType> accessibilities;
@XmlElement(name = "Role")
protected List<DescriptorType> roles;
@XmlElement(name = "Rating")
protected List<DescriptorType> ratings;
@XmlElement(name = "Viewpoint")
protected List<DescriptorType> viewpoints;
@XmlAnyElement
protected List<Element> anies;
@XmlAttribute(name = "id")
@XmlSchemaType(name = "unsignedInt")
protected Long id;
@XmlAttribute(name = "lang")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "language")
protected String lang;
@XmlAttribute(name = "contentType")
protected String contentType;
@XmlAttribute(name = "par")
protected String par;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the accessibilities property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the accessibilities property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAccessibilities().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link DescriptorType }
*
*
*/
public List<DescriptorType> getAccessibilities() {
if (accessibilities == null) {
accessibilities = new ArrayList<DescriptorType>();
}
return this.accessibilities;
}
/**
* Gets the value of the roles property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the roles property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getRoles().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link DescriptorType }
*
*
*/
public List<DescriptorType> getRoles() {
if (roles == null) {
roles = new ArrayList<DescriptorType>();
}
return this.roles;
}
/**
* Gets the value of the ratings property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the ratings property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getRatings().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link DescriptorType }
*
*
*/
public List<DescriptorType> getRatings() {
if (ratings == null) {
ratings = new ArrayList<DescriptorType>();
}
return this.ratings;
}
/**
* Gets the value of the viewpoints property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the viewpoints property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getViewpoints().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link DescriptorType }
*
*
*/
public List<DescriptorType> getViewpoints() {
if (viewpoints == null) {
viewpoints = new ArrayList<DescriptorType>();
}
return this.viewpoints;
}
/**
* Gets the value of the anies property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the anies property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAnies().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Element }
*
*
*/
public List<Element> getAnies() {
if (anies == null) {
anies = new ArrayList<Element>();
}
return this.anies;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the lang property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLang() {
return lang;
}
/**
* Sets the value of the lang property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLang(String value) {
this.lang = value;
}
/**
* Gets the value of the contentType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContentType() {
return contentType;
}
/**
* Sets the value of the contentType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContentType(String value) {
this.contentType = value;
}
/**
* Gets the value of the par property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPar() {
return par;
}
/**
* Sets the value of the par property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPar(String value) {
this.par = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.marshal;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.cassandra.cql3.Json;
import org.apache.cassandra.cql3.Sets;
import org.apache.cassandra.cql3.Term;
import org.apache.cassandra.db.rows.Cell;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.SyntaxException;
import org.apache.cassandra.serializers.MarshalException;
import org.apache.cassandra.serializers.SetSerializer;
import org.apache.cassandra.transport.ProtocolVersion;
public class SetType<T> extends CollectionType<Set<T>>
{
// interning instances
private static final ConcurrentHashMap<AbstractType<?>, SetType> instances = new ConcurrentHashMap<>();
private static final ConcurrentHashMap<AbstractType<?>, SetType> frozenInstances = new ConcurrentHashMap<>();
private final AbstractType<T> elements;
private final SetSerializer<T> serializer;
private final boolean isMultiCell;
public static SetType<?> getInstance(TypeParser parser) throws ConfigurationException, SyntaxException
{
List<AbstractType<?>> l = parser.getTypeParameters();
if (l.size() != 1)
throw new ConfigurationException("SetType takes exactly 1 type parameter");
return getInstance(l.get(0), true);
}
public static <T> SetType<T> getInstance(AbstractType<T> elements, boolean isMultiCell)
{
ConcurrentHashMap<AbstractType<?>, SetType> internMap = isMultiCell ? instances : frozenInstances;
SetType<T> t = internMap.get(elements);
return null == t
? internMap.computeIfAbsent(elements, k -> new SetType<>(k, isMultiCell))
: t;
}
public SetType(AbstractType<T> elements, boolean isMultiCell)
{
super(ComparisonType.CUSTOM, Kind.SET);
this.elements = elements;
this.serializer = SetSerializer.getInstance(elements.getSerializer(), elements.comparatorSet);
this.isMultiCell = isMultiCell;
}
@Override
public <V> boolean referencesUserType(V name, ValueAccessor<V> accessor)
{
return elements.referencesUserType(name, accessor);
}
@Override
public SetType<?> withUpdatedUserType(UserType udt)
{
if (!referencesUserType(udt.name))
return this;
(isMultiCell ? instances : frozenInstances).remove(elements);
return getInstance(elements.withUpdatedUserType(udt), isMultiCell);
}
@Override
public AbstractType<?> expandUserTypes()
{
return getInstance(elements.expandUserTypes(), isMultiCell);
}
public AbstractType<T> getElementsType()
{
return elements;
}
public AbstractType<T> nameComparator()
{
return elements;
}
public AbstractType<?> valueComparator()
{
return EmptyType.instance;
}
@Override
public boolean isMultiCell()
{
return isMultiCell;
}
@Override
public AbstractType<?> freeze()
{
if (isMultiCell)
return getInstance(this.elements, false);
else
return this;
}
@Override
public List<AbstractType<?>> subTypes()
{
return Collections.singletonList(elements);
}
@Override
public AbstractType<?> freezeNestedMulticellTypes()
{
if (!isMultiCell())
return this;
if (elements.isFreezable() && elements.isMultiCell())
return getInstance(elements.freeze(), isMultiCell);
return getInstance(elements.freezeNestedMulticellTypes(), isMultiCell);
}
@Override
public boolean isCompatibleWithFrozen(CollectionType<?> previous)
{
assert !isMultiCell;
return this.elements.isCompatibleWith(((SetType) previous).elements);
}
@Override
public boolean isValueCompatibleWithFrozen(CollectionType<?> previous)
{
// because sets are ordered, any changes to the type must maintain the ordering
return isCompatibleWithFrozen(previous);
}
public <VL, VR> int compareCustom(VL left, ValueAccessor<VL> accessorL, VR right, ValueAccessor<VR> accessorR)
{
return ListType.compareListOrSet(elements, left, accessorL, right, accessorR);
}
public SetSerializer<T> getSerializer()
{
return serializer;
}
@Override
public String toString(boolean ignoreFreezing)
{
boolean includeFrozenType = !ignoreFreezing && !isMultiCell();
StringBuilder sb = new StringBuilder();
if (includeFrozenType)
sb.append(FrozenType.class.getName()).append("(");
sb.append(getClass().getName());
sb.append(TypeParser.stringifyTypeParameters(Collections.<AbstractType<?>>singletonList(elements), ignoreFreezing || !isMultiCell));
if (includeFrozenType)
sb.append(")");
return sb.toString();
}
public List<ByteBuffer> serializedValues(Iterator<Cell<?>> cells)
{
List<ByteBuffer> bbs = new ArrayList<ByteBuffer>();
while (cells.hasNext())
bbs.add(cells.next().path().get(0));
return bbs;
}
@Override
public Term fromJSONObject(Object parsed) throws MarshalException
{
if (parsed instanceof String)
parsed = Json.decodeJson((String) parsed);
if (!(parsed instanceof List))
throw new MarshalException(String.format(
"Expected a list (representing a set), but got a %s: %s", parsed.getClass().getSimpleName(), parsed));
List list = (List) parsed;
Set<Term> terms = new HashSet<>(list.size());
for (Object element : list)
{
if (element == null)
throw new MarshalException("Invalid null element in set");
terms.add(elements.fromJSONObject(element));
}
return new Sets.DelayedValue(elements, terms);
}
@Override
public String toJSONString(ByteBuffer buffer, ProtocolVersion protocolVersion)
{
return ListType.setOrListToJsonString(buffer, elements, protocolVersion);
}
}
| |
package org.jabref.logic.bst;
import java.util.Locale;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class BibtexCaseChanger {
private static final Logger LOGGER = LoggerFactory.getLogger(BibtexCaseChanger.class);
// stores whether the char before the current char was a colon
private boolean prevColon = true;
// global variable to store the current brace level
private int braceLevel;
public enum FORMAT_MODE {
// First character and character after a ":" as upper case - everything else in lower case. Obey {}.
TITLE_LOWERS('t'),
// All characters lower case - Obey {}
ALL_LOWERS('l'),
// all characters upper case - Obey {}
ALL_UPPERS('u');
// the following would have to be done if the functionality of CaseChangers would be included here
// However, we decided against it and will probably do the other way round: https://github.com/JabRef/jabref/pull/215#issuecomment-146981624
// Each word should start with a capital letter
//EACH_FIRST_UPPERS('f'),
// Converts all words to upper case, but converts articles, prepositions, and conjunctions to lower case
// Capitalizes first and last word
// Does not change words starting with "{"
// DIFFERENCE to old CaseChangers.TITLE: last word is NOT capitalized in all cases
//TITLE_UPPERS('T');
private final char asChar;
FORMAT_MODE(char asChar) {
this.asChar = asChar;
}
public char asChar() {
return asChar;
}
/**
* Convert bstFormat char into ENUM
*
* @throws IllegalArgumentException if char is not 't', 'l', 'u'
*/
public static FORMAT_MODE getFormatModeForBSTFormat(final char bstFormat) {
for (FORMAT_MODE mode : FORMAT_MODE.values()) {
if (mode.asChar == bstFormat) {
return mode;
}
}
throw new IllegalArgumentException();
}
}
private BibtexCaseChanger() {
}
/**
* Changes case of the given string s
*
* @param s the string to handle
* @param format the format
*/
public static String changeCase(String s, FORMAT_MODE format) {
return (new BibtexCaseChanger()).doChangeCase(s, format);
}
private String doChangeCase(String s, FORMAT_MODE format) {
char[] c = s.toCharArray();
StringBuilder sb = new StringBuilder();
int i = 0;
int n = s.length();
while (i < n) {
if (c[i] == '{') {
braceLevel++;
if ((braceLevel != 1) || ((i + 4) > n) || (c[i + 1] != '\\')) {
prevColon = false;
sb.append(c[i]);
i++;
continue;
}
if ((format == FORMAT_MODE.TITLE_LOWERS) && ((i == 0) || (prevColon && Character.isWhitespace(c[i - 1])))) {
sb.append('{');
i++;
prevColon = false;
continue;
}
i = convertSpecialChar(sb, c, i, format);
continue;
}
if (c[i] == '}') {
sb.append(c[i]);
i++;
if (braceLevel == 0) {
LOGGER.warn("Too many closing braces in string: " + s);
} else {
braceLevel--;
}
prevColon = false;
continue;
}
if (braceLevel == 0) {
i = convertCharIfBraceLevelIsZero(c, i, sb, format);
continue;
}
sb.append(c[i]);
i++;
}
if (braceLevel > 0) {
LOGGER.warn("No enough closing braces in string: " + s);
}
return sb.toString();
}
/**
* We're dealing with a special character (usually either an undotted `\i'
* or `\j', or an accent like one in Table~3.1 of the \LaTeX\ manual, or a
* foreign character like one in Table~3.2) if the first character after the
* |left_brace| is a |backslash|; the special character ends with the
* matching |right_brace|. How we handle what is in between depends on the
* special character. In general, this code will do reasonably well if there
* is other stuff, too, between braces, but it doesn't try to do anything
* special with |colon|s.
*
* @param c
* @param start the current position. It points to the opening brace
* @param format
* @return
*/
private int convertSpecialChar(StringBuilder sb, char[] c, int start, FORMAT_MODE format) {
int i = start;
sb.append(c[i]);
i++; // skip over open brace
while ((i < c.length) && (braceLevel > 0)) {
sb.append(c[i]);
i++;
// skip over the |backslash|
Optional<String> s = BibtexCaseChanger.findSpecialChar(c, i);
if (s.isPresent()) {
i = convertAccented(c, i, s.get(), sb, format);
}
while ((i < c.length) && (braceLevel > 0) && (c[i] != '\\')) {
if (c[i] == '}') {
braceLevel--;
} else if (c[i] == '{') {
braceLevel++;
}
i = convertNonControl(c, i, sb, format);
}
}
return i;
}
/**
* Convert the given string according to the format character (title, lower,
* up) and append the result to the stringBuffer, return the updated
* position.
*
* @param c
* @param start
* @param s
* @param sb
* @param format
* @return the new position
*/
private int convertAccented(char[] c, int start, String s, StringBuilder sb, FORMAT_MODE format) {
int pos = start;
pos += s.length();
switch (format) {
case TITLE_LOWERS:
case ALL_LOWERS:
if ("L O OE AE AA".contains(s)) {
sb.append(s.toLowerCase(Locale.ROOT));
} else {
sb.append(s);
}
break;
case ALL_UPPERS:
if ("l o oe ae aa".contains(s)) {
sb.append(s.toUpperCase(Locale.ROOT));
} else if ("i j ss".contains(s)) {
sb.deleteCharAt(sb.length() - 1); // Kill backslash
sb.append(s.toUpperCase(Locale.ROOT));
while ((pos < c.length) && Character.isWhitespace(c[pos])) {
pos++;
}
} else {
sb.append(s);
}
break;
default:
LOGGER.info("convertAccented - Unknown format: " + format);
break;
}
return pos;
}
private int convertNonControl(char[] c, int start, StringBuilder sb, FORMAT_MODE format) {
int pos = start;
switch (format) {
case TITLE_LOWERS:
case ALL_LOWERS:
sb.append(Character.toLowerCase(c[pos]));
pos++;
break;
case ALL_UPPERS:
sb.append(Character.toUpperCase(c[pos]));
pos++;
break;
default:
LOGGER.info("convertNonControl - Unknown format: " + format);
break;
}
return pos;
}
private int convertCharIfBraceLevelIsZero(char[] c, int start, StringBuilder sb, FORMAT_MODE format) {
int i = start;
switch (format) {
case TITLE_LOWERS:
if ((i == 0) || (prevColon && Character.isWhitespace(c[i - 1]))) {
sb.append(c[i]);
} else {
sb.append(Character.toLowerCase(c[i]));
}
if (c[i] == ':') {
prevColon = true;
} else if (!Character.isWhitespace(c[i])) {
prevColon = false;
}
break;
case ALL_LOWERS:
sb.append(Character.toLowerCase(c[i]));
break;
case ALL_UPPERS:
sb.append(Character.toUpperCase(c[i]));
break;
default:
LOGGER.info("convertCharIfBraceLevelIsZero - Unknown format: " + format);
break;
}
i++;
return i;
}
/**
* Determine whether there starts a special char at pos (e.g., oe, AE). Return it as string.
* If nothing found, return Optional.empty()
*
* Also used by BibtexPurify
*
* @param c the current "String"
* @param pos the position
* @return the special LaTeX character or null
*/
public static Optional<String> findSpecialChar(char[] c, int pos) {
if ((pos + 1) < c.length) {
if ((c[pos] == 'o') && (c[pos + 1] == 'e')) {
return Optional.of("oe");
}
if ((c[pos] == 'O') && (c[pos + 1] == 'E')) {
return Optional.of("OE");
}
if ((c[pos] == 'a') && (c[pos + 1] == 'e')) {
return Optional.of("ae");
}
if ((c[pos] == 'A') && (c[pos + 1] == 'E')) {
return Optional.of("AE");
}
if ((c[pos] == 's') && (c[pos + 1] == 's')) {
return Optional.of("ss");
}
if ((c[pos] == 'A') && (c[pos + 1] == 'A')) {
return Optional.of("AA");
}
if ((c[pos] == 'a') && (c[pos + 1] == 'a')) {
return Optional.of("aa");
}
}
if ("ijoOlL".indexOf(c[pos]) >= 0) {
return Optional.of(String.valueOf(c[pos]));
}
return Optional.empty();
}
}
| |
package org.apache.solr.handler.clustering.carrot2;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TermQuery;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.handler.clustering.AbstractClusteringTestCase;
import org.apache.solr.handler.clustering.ClusteringComponent;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.search.DocList;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.RefCounted;
import org.apache.solr.util.SolrPluginUtils;
import org.carrot2.core.LanguageCode;
import org.carrot2.util.attribute.AttributeUtils;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
/**
*
*/
public class CarrotClusteringEngineTest extends AbstractClusteringTestCase {
@Test
public void testCarrotLingo() throws Exception {
// Note: the expected number of clusters may change after upgrading Carrot2
// due to e.g. internal improvements or tuning of Carrot2 clustering.
final int expectedNumClusters = 10;
checkEngine(getClusteringEngine("default"), expectedNumClusters);
}
@Test
public void testProduceSummary() throws Exception {
// We'll make two queries, one with- and another one without summary
// and assert that documents are shorter when highlighter is in use.
final List<NamedList<Object>> noSummaryClusters = clusterWithHighlighting(false, 80);
final List<NamedList<Object>> summaryClusters = clusterWithHighlighting(true, 80);
assertEquals("Equal number of clusters", noSummaryClusters.size(), summaryClusters.size());
for (int i = 0; i < noSummaryClusters.size(); i++) {
assertTrue("Summary shorter than original document",
getLabels(noSummaryClusters.get(i)).get(1).length() >
getLabels(summaryClusters.get(i)).get(1).length());
}
}
@Test
public void testSummaryFragSize() throws Exception {
// We'll make two queries, one short summaries and another one with longer
// summaries and will check that the results differ.
final List<NamedList<Object>> shortSummaryClusters = clusterWithHighlighting(true, 30);
final List<NamedList<Object>> longSummaryClusters = clusterWithHighlighting(true, 80);
assertEquals("Equal number of clusters", shortSummaryClusters.size(), longSummaryClusters.size());
for (int i = 0; i < shortSummaryClusters.size(); i++) {
assertTrue("Summary shorter than original document",
getLabels(shortSummaryClusters.get(i)).get(1).length() <
getLabels(longSummaryClusters.get(i)).get(1).length());
}
}
private List<NamedList<Object>> clusterWithHighlighting(
boolean enableHighlighting, int fragSize) throws IOException {
// Some documents don't have mining in the snippet
return clusterWithHighlighting(enableHighlighting, fragSize, 1, "mine", numberOfDocs - 7);
}
private List<NamedList<Object>> clusterWithHighlighting(
boolean enableHighlighting, int fragSize, int summarySnippets,
String term, int expectedNumDocuments) throws IOException {
final TermQuery query = new TermQuery(new Term("snippet", term));
final ModifiableSolrParams summaryParams = new ModifiableSolrParams();
summaryParams.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet");
summaryParams.add(CarrotParams.PRODUCE_SUMMARY,
Boolean.toString(enableHighlighting));
summaryParams
.add(CarrotParams.SUMMARY_FRAGSIZE, Integer.toString(fragSize));
summaryParams
.add(CarrotParams.SUMMARY_SNIPPETS, Integer.toString(summarySnippets));
final List<NamedList<Object>> summaryClusters = checkEngine(
getClusteringEngine("echo"), expectedNumDocuments,
expectedNumDocuments, query, summaryParams);
return summaryClusters;
}
@Test
public void testCarrotStc() throws Exception {
checkEngine(getClusteringEngine("stc"), 1);
}
@Test
public void testWithoutSubclusters() throws Exception {
checkClusters(checkEngine(getClusteringEngine("mock"), AbstractClusteringTestCase.numberOfDocs),
1, 1, 0);
}
@Test
public void testWithSubclusters() throws Exception {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(CarrotParams.OUTPUT_SUB_CLUSTERS, true);
checkClusters(checkEngine(getClusteringEngine("mock"), AbstractClusteringTestCase.numberOfDocs), 1, 1, 2);
}
@Test
public void testNumDescriptions() throws Exception {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "labels"), 5);
params.set(CarrotParams.NUM_DESCRIPTIONS, 3);
checkClusters(checkEngine(getClusteringEngine("mock"), AbstractClusteringTestCase.numberOfDocs,
params), 1, 3, 0);
}
@Test
public void testClusterScores() throws Exception {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "depth"), 1);
List<NamedList<Object>> clusters = checkEngine(getClusteringEngine("mock"),
AbstractClusteringTestCase.numberOfDocs, params);
int i = 1;
for (NamedList<Object> cluster : clusters) {
final Double score = getScore(cluster);
assertNotNull(score);
assertEquals(0.25 * i++, score, 0);
}
}
@Test
public void testOtherTopics() throws Exception {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "depth"), 1);
params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "otherTopicsModulo"), 2);
List<NamedList<Object>> clusters = checkEngine(getClusteringEngine("mock"),
AbstractClusteringTestCase.numberOfDocs, params);
int i = 1;
for (NamedList<Object> cluster : clusters) {
assertEquals(i++ % 2 == 0 ? true : null, isOtherTopics(cluster));
}
}
@Test
public void testCarrotAttributePassing() throws Exception {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "depth"), 1);
params.set(AttributeUtils.getKey(MockClusteringAlgorithm.class, "labels"), 3);
checkClusters(checkEngine(getClusteringEngine("mock"), AbstractClusteringTestCase.numberOfDocs,
params), 1, 3, 0);
}
@Test
public void testLexicalResourcesFromSolrConfigDefaultDir() throws Exception {
checkLexicalResourcesFromSolrConfig("lexical-resource-check",
"online,customsolrstopword,customsolrstoplabel");
}
@Test
public void testLexicalResourcesFromSolrConfigCustomDir() throws Exception {
checkLexicalResourcesFromSolrConfig("lexical-resource-check-custom-resource-dir",
"online,customsolrstopwordcustomdir,customsolrstoplabelcustomdir");
}
private void checkLexicalResourcesFromSolrConfig(String engineName, String wordsToCheck)
throws IOException {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("merge-resources", false);
params.set(AttributeUtils.getKey(
LexicalResourcesCheckClusteringAlgorithm.class, "wordsToCheck"),
wordsToCheck);
// "customsolrstopword" is in stopwords.en, "customsolrstoplabel" is in
// stoplabels.mt, so we're expecting only one cluster with label "online".
final List<NamedList<Object>> clusters = checkEngine(
getClusteringEngine(engineName), 1, params);
assertEquals(getLabels(clusters.get(0)), ImmutableList.of("online"));
}
@Test
public void testSolrStopWordsUsedInCarrot2Clustering() throws Exception {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("merge-resources", false);
params.set(AttributeUtils.getKey(
LexicalResourcesCheckClusteringAlgorithm.class, "wordsToCheck"),
"online,solrownstopword");
// "solrownstopword" is in stopwords.txt, so we're expecting
// only one cluster with label "online".
final List<NamedList<Object>> clusters = checkEngine(
getClusteringEngine("lexical-resource-check"), 1, params);
assertEquals(getLabels(clusters.get(0)), ImmutableList.of("online"));
}
@Test
public void testSolrStopWordsNotDefinedOnAFieldForClustering() throws Exception {
ModifiableSolrParams params = new ModifiableSolrParams();
// Force string fields to be used for clustering. Does not make sense
// in a real word, but does the job in the test.
params.set(CarrotParams.TITLE_FIELD_NAME, "url");
params.set(CarrotParams.SNIPPET_FIELD_NAME, "url");
params.set("merge-resources", false);
params.set(AttributeUtils.getKey(
LexicalResourcesCheckClusteringAlgorithm.class, "wordsToCheck"),
"online,solrownstopword");
final List<NamedList<Object>> clusters = checkEngine(
getClusteringEngine("lexical-resource-check"), 2, params);
assertEquals(ImmutableList.of("online"), getLabels(clusters.get(0)));
assertEquals(ImmutableList.of("solrownstopword"),
getLabels(clusters.get(1)));
}
@Test
public void testHighlightingOfMultiValueField() throws Exception {
final String snippetWithoutSummary = getLabels(clusterWithHighlighting(
false, 30, 3, "multi", 1).get(0)).get(1);
assertTrue("Snippet contains first value", snippetWithoutSummary.contains("First"));
assertTrue("Snippet contains second value", snippetWithoutSummary.contains("Second"));
assertTrue("Snippet contains third value", snippetWithoutSummary.contains("Third"));
final String snippetWithSummary = getLabels(clusterWithHighlighting(
true, 30, 3, "multi", 1).get(0)).get(1);
assertTrue("Snippet with summary shorter than full snippet",
snippetWithoutSummary.length() > snippetWithSummary.length());
assertTrue("Summary covers first value", snippetWithSummary.contains("First"));
assertTrue("Summary covers second value", snippetWithSummary.contains("Second"));
assertTrue("Summary covers third value", snippetWithSummary.contains("Third"));
}
@Test
public void testConcatenatingMultipleFields() throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.TITLE_FIELD_NAME, "title,heading");
params.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet,body");
final List<String> labels = getLabels(checkEngine(
getClusteringEngine("echo"), 1, 1, new TermQuery(new Term("body",
"snippet")), params).get(0));
assertTrue("Snippet contains third value", labels.get(0).contains("Title field"));
assertTrue("Snippet contains third value", labels.get(0).contains("Heading field"));
assertTrue("Snippet contains third value", labels.get(1).contains("Snippet field"));
assertTrue("Snippet contains third value", labels.get(1).contains("Body field"));
}
@Test
public void testHighlightingMultipleFields() throws Exception {
final TermQuery query = new TermQuery(new Term("snippet", "content"));
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.TITLE_FIELD_NAME, "title,heading");
params.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet,body");
params.add(CarrotParams.PRODUCE_SUMMARY, Boolean.toString(false));
final String snippetWithoutSummary = getLabels(checkEngine(
getClusteringEngine("echo"), 1, 1, query, params).get(0)).get(1);
assertTrue("Snippet covers snippet field", snippetWithoutSummary.contains("snippet field"));
assertTrue("Snippet covers body field", snippetWithoutSummary.contains("body field"));
params.set(CarrotParams.PRODUCE_SUMMARY, Boolean.toString(true));
params.add(CarrotParams.SUMMARY_FRAGSIZE, Integer.toString(30));
params.add(CarrotParams.SUMMARY_SNIPPETS, Integer.toString(2));
final String snippetWithSummary = getLabels(checkEngine(
getClusteringEngine("echo"), 1, 1, query, params).get(0)).get(1);
assertTrue("Snippet with summary shorter than full snippet",
snippetWithoutSummary.length() > snippetWithSummary.length());
assertTrue("Snippet covers snippet field", snippetWithSummary.contains("snippet field"));
assertTrue("Snippet covers body field", snippetWithSummary.contains("body field"));
}
@Test
public void testOneCarrot2SupportedLanguage() throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.LANGUAGE_FIELD_NAME, "lang");
final List<String> labels = getLabels(checkEngine(
getClusteringEngine("echo"), 1, 1, new TermQuery(new Term("url",
"one_supported_language")), params).get(0));
assertEquals(3, labels.size());
assertEquals("Correct Carrot2 language", LanguageCode.CHINESE_SIMPLIFIED.name(), labels.get(2));
}
@Test
public void testOneCarrot2SupportedLanguageOfMany() throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.LANGUAGE_FIELD_NAME, "lang");
final List<String> labels = getLabels(checkEngine(
getClusteringEngine("echo"), 1, 1, new TermQuery(new Term("url",
"one_supported_language_of_many")), params).get(0));
assertEquals(3, labels.size());
assertEquals("Correct Carrot2 language", LanguageCode.GERMAN.name(), labels.get(2));
}
@Test
public void testLanguageCodeMapping() throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.LANGUAGE_FIELD_NAME, "lang");
params.add(CarrotParams.LANGUAGE_CODE_MAP, "POLISH:pl");
final List<String> labels = getLabels(checkEngine(
getClusteringEngine("echo"), 1, 1, new TermQuery(new Term("url",
"one_supported_language_of_many")), params).get(0));
assertEquals(3, labels.size());
assertEquals("Correct Carrot2 language", LanguageCode.POLISH.name(), labels.get(2));
}
@Test
public void testPassingOfCustomFields() throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.CUSTOM_FIELD_NAME, "intfield_i:intfield");
params.add(CarrotParams.CUSTOM_FIELD_NAME, "floatfield_f:floatfield");
params.add(CarrotParams.CUSTOM_FIELD_NAME, "heading:multi");
// Let the echo mock clustering algorithm know which custom field to echo
params.add("custom-fields", "intfield,floatfield,multi");
final List<String> labels = getLabels(checkEngine(
getClusteringEngine("echo"), 1, 1, new TermQuery(new Term("url",
"custom_fields")), params).get(0));
assertEquals(5, labels.size());
assertEquals("Integer field", "10", labels.get(2));
assertEquals("Float field", "10.5", labels.get(3));
assertEquals("List field", "[first, second]", labels.get(4));
}
@Test
public void testCustomTokenizer() throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.TITLE_FIELD_NAME, "title");
params.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet");
final List<String> labels = getLabels(checkEngine(
getClusteringEngine("custom-duplicating-tokenizer"), 1, 16, new TermQuery(new Term("title",
"field")), params).get(0));
// The custom test tokenizer duplicates each token's text
assertTrue("First token", labels.get(0).contains("TitleTitle"));
}
@Test
public void testCustomStemmer() throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CarrotParams.TITLE_FIELD_NAME, "title");
params.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet");
final List<String> labels = getLabels(checkEngine(
getClusteringEngine("custom-duplicating-stemmer"), 1, 12, new TermQuery(new Term("title",
"field")), params).get(0));
// The custom test stemmer duplicates and lowercases each token's text
assertTrue("First token", labels.get(0).contains("titletitle"));
}
private CarrotClusteringEngine getClusteringEngine(String engineName) {
ClusteringComponent comp = (ClusteringComponent) h.getCore()
.getSearchComponent("clustering");
assertNotNull("clustering component should not be null", comp);
CarrotClusteringEngine engine = (CarrotClusteringEngine) comp
.getSearchClusteringEngines().get(engineName);
assertNotNull("clustering engine for name: " + engineName
+ " should not be null", engine);
return engine;
}
private List<NamedList<Object>> checkEngine(CarrotClusteringEngine engine,
int expectedNumClusters) throws IOException {
return checkEngine(engine, numberOfDocs, expectedNumClusters, new MatchAllDocsQuery(), new ModifiableSolrParams());
}
private List<NamedList<Object>> checkEngine(CarrotClusteringEngine engine,
int expectedNumClusters, SolrParams clusteringParams) throws IOException {
return checkEngine(engine, numberOfDocs, expectedNumClusters, new MatchAllDocsQuery(), clusteringParams);
}
private List<NamedList<Object>> checkEngine(CarrotClusteringEngine engine, int expectedNumDocs,
int expectedNumClusters, Query query, SolrParams clusteringParams) throws IOException {
// Get all documents to cluster
RefCounted<SolrIndexSearcher> ref = h.getCore().getSearcher();
DocList docList;
try {
SolrIndexSearcher searcher = ref.get();
docList = searcher.getDocList(query, (Query) null, new Sort(), 0,
numberOfDocs);
assertEquals("docList size", expectedNumDocs, docList.matches());
ModifiableSolrParams solrParams = new ModifiableSolrParams();
solrParams.add(clusteringParams);
// Perform clustering
LocalSolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), solrParams);
Map<SolrDocument,Integer> docIds = new HashMap<SolrDocument, Integer>(docList.size());
SolrDocumentList solrDocList = SolrPluginUtils.docListToSolrDocumentList( docList, searcher, engine.getFieldsToLoad(req), docIds );
@SuppressWarnings("unchecked")
List<NamedList<Object>> results = (List<NamedList<Object>>) engine.cluster(query, solrDocList, docIds, req);
req.close();
assertEquals("number of clusters: " + results, expectedNumClusters, results.size());
checkClusters(results, false);
return results;
} finally {
ref.decref();
}
}
private void checkClusters(List<NamedList<Object>> results, int expectedDocCount,
int expectedLabelCount, int expectedSubclusterCount) {
for (int i = 0; i < results.size(); i++) {
NamedList<Object> cluster = results.get(i);
checkCluster(cluster, expectedDocCount, expectedLabelCount,
expectedSubclusterCount);
}
}
private void checkClusters(List<NamedList<Object>> results, boolean hasSubclusters) {
for (int i = 0; i < results.size(); i++) {
checkCluster(results.get(i), hasSubclusters);
}
}
private void checkCluster(NamedList<Object> cluster, boolean hasSubclusters) {
List<Object> docs = getDocs(cluster);
assertNotNull("docs is null and it shouldn't be", docs);
for (int j = 0; j < docs.size(); j++) {
Object id = docs.get(j);
assertNotNull("id is null and it shouldn't be", id);
}
List<String> labels = getLabels(cluster);
assertNotNull("labels is null but it shouldn't be", labels);
if (hasSubclusters) {
List<NamedList<Object>> subclusters = getSubclusters(cluster);
assertNotNull("subclusters is null but it shouldn't be", subclusters);
}
}
private void checkCluster(NamedList<Object> cluster, int expectedDocCount,
int expectedLabelCount, int expectedSubclusterCount) {
checkCluster(cluster, expectedSubclusterCount > 0);
assertEquals("number of docs in cluster", expectedDocCount,
getDocs(cluster).size());
assertEquals("number of labels in cluster", expectedLabelCount,
getLabels(cluster).size());
if (expectedSubclusterCount > 0) {
List<NamedList<Object>> subclusters = getSubclusters(cluster);
assertEquals("numClusters", expectedSubclusterCount, subclusters.size());
assertEquals("number of subclusters in cluster",
expectedSubclusterCount, subclusters.size());
}
}
@SuppressWarnings("unchecked")
private List<NamedList<Object>> getSubclusters(NamedList<Object> cluster) {
return (List<NamedList<Object>>) cluster.get("clusters");
}
@SuppressWarnings("unchecked")
private List<String> getLabels(NamedList<Object> cluster) {
return (List<String>) cluster.get("labels");
}
private Double getScore(NamedList<Object> cluster) {
return (Double) cluster.get("score");
}
private Boolean isOtherTopics(NamedList<Object> cluster) {
return (Boolean)cluster.get("other-topics");
}
@SuppressWarnings("unchecked")
private List<Object> getDocs(NamedList<Object> cluster) {
return (List<Object>) cluster.get("docs");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rel.stream;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.prepare.RelOptTableImpl;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Join;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.core.Union;
import org.apache.calcite.rel.core.Values;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.logical.LogicalJoin;
import org.apache.calcite.rel.logical.LogicalProject;
import org.apache.calcite.rel.logical.LogicalSort;
import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.rel.logical.LogicalUnion;
import org.apache.calcite.schema.StreamableTable;
import org.apache.calcite.schema.Table;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import java.util.List;
/**
* Rules and relational operators for streaming relational expressions.
*/
public class StreamRules {
private StreamRules() {}
public static final ImmutableList<RelOptRule> RULES =
ImmutableList.of(
new DeltaProjectTransposeRule(),
new DeltaFilterTransposeRule(),
new DeltaAggregateTransposeRule(),
new DeltaSortTransposeRule(),
new DeltaUnionTransposeRule(),
new DeltaJoinTransposeRule(),
new DeltaTableScanRule(),
new DeltaTableScanToEmptyRule());
/** Planner rule that pushes a {@link Delta} through a {@link Project}. */
public static class DeltaProjectTransposeRule extends RelOptRule {
private DeltaProjectTransposeRule() {
super(
operand(Delta.class,
operand(Project.class, any())));
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Project project = call.rel(1);
final LogicalDelta newDelta = LogicalDelta.create(project.getInput());
final LogicalProject newProject =
LogicalProject.create(newDelta, project.getProjects(),
project.getRowType().getFieldNames());
call.transformTo(newProject);
}
}
/** Planner rule that pushes a {@link Delta} through a {@link Filter}. */
public static class DeltaFilterTransposeRule extends RelOptRule {
private DeltaFilterTransposeRule() {
super(
operand(Delta.class,
operand(Filter.class, any())));
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Filter filter = call.rel(1);
final LogicalDelta newDelta = LogicalDelta.create(filter.getInput());
final LogicalFilter newFilter =
LogicalFilter.create(newDelta, filter.getCondition());
call.transformTo(newFilter);
}
}
/** Planner rule that pushes a {@link Delta} through an {@link Aggregate}. */
public static class DeltaAggregateTransposeRule extends RelOptRule {
private DeltaAggregateTransposeRule() {
super(
operand(Delta.class,
operand(Aggregate.class, any())));
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Aggregate aggregate = call.rel(1);
final LogicalDelta newDelta =
LogicalDelta.create(aggregate.getInput());
final LogicalAggregate newAggregate =
LogicalAggregate.create(newDelta, aggregate.indicator,
aggregate.getGroupSet(), aggregate.groupSets,
aggregate.getAggCallList());
call.transformTo(newAggregate);
}
}
/** Planner rule that pushes a {@link Delta} through an {@link Sort}. */
public static class DeltaSortTransposeRule extends RelOptRule {
private DeltaSortTransposeRule() {
super(
operand(Delta.class,
operand(Sort.class, any())));
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Sort sort = call.rel(1);
final LogicalDelta newDelta =
LogicalDelta.create(sort.getInput());
final LogicalSort newSort =
LogicalSort.create(newDelta, sort.collation, sort.offset, sort.fetch);
call.transformTo(newSort);
}
}
/** Planner rule that pushes a {@link Delta} through an {@link Union}. */
public static class DeltaUnionTransposeRule extends RelOptRule {
private DeltaUnionTransposeRule() {
super(
operand(Delta.class,
operand(Union.class, any())));
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Union union = call.rel(1);
final List<RelNode> newInputs = Lists.newArrayList();
for (RelNode input : union.getInputs()) {
final LogicalDelta newDelta =
LogicalDelta.create(input);
newInputs.add(newDelta);
}
final LogicalUnion newUnion = LogicalUnion.create(newInputs, union.all);
call.transformTo(newUnion);
}
}
/** Planner rule that pushes a {@link Delta} into a {@link TableScan} of a
* {@link org.apache.calcite.schema.StreamableTable}.
*
* <p>Very likely, the stream was only represented as a table for uniformity
* with the other relations in the system. The Delta disappears and the stream
* can be implemented directly. */
public static class DeltaTableScanRule extends RelOptRule {
private DeltaTableScanRule() {
super(
operand(Delta.class,
operand(TableScan.class, none())));
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
final TableScan scan = call.rel(1);
final RelOptCluster cluster = delta.getCluster();
final RelOptTable relOptTable = scan.getTable();
final StreamableTable streamableTable =
relOptTable.unwrap(StreamableTable.class);
if (streamableTable != null) {
final Table table1 = streamableTable.stream();
final RelOptTable relOptTable2 =
RelOptTableImpl.create(relOptTable.getRelOptSchema(),
relOptTable.getRowType(), table1,
ImmutableList.<String>builder()
.addAll(relOptTable.getQualifiedName())
.add("(STREAM)").build());
final LogicalTableScan newScan =
LogicalTableScan.create(cluster, relOptTable2);
call.transformTo(newScan);
}
}
}
/**
* Planner rule that converts {@link Delta} over a {@link TableScan} of
* a table other than {@link org.apache.calcite.schema.StreamableTable} to
* an empty {@link Values}.
*/
public static class DeltaTableScanToEmptyRule extends RelOptRule {
private DeltaTableScanToEmptyRule() {
super(
operand(Delta.class,
operand(TableScan.class, none())));
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
final TableScan scan = call.rel(1);
final RelOptTable relOptTable = scan.getTable();
final StreamableTable streamableTable =
relOptTable.unwrap(StreamableTable.class);
final RelBuilder builder = call.builder();
if (streamableTable == null) {
call.transformTo(builder.values(delta.getRowType()).build());
}
}
}
/**
* Planner rule that pushes a {@link Delta} through a {@link Join}.
*
* <p>We apply something analogous to the
* <a href="https://en.wikipedia.org/wiki/Product_rule">product rule of
* differential calculus</a> to implement the transpose:
*
* <blockquote><code>stream(x join y) →
* x join stream(y) union all stream(x) join y</code></blockquote>
*/
public static class DeltaJoinTransposeRule extends RelOptRule {
public DeltaJoinTransposeRule() {
super(
operand(Delta.class,
operand(Join.class, any())));
}
public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Join join = call.rel(1);
final RelNode left = join.getLeft();
final RelNode right = join.getRight();
final LogicalDelta rightWithDelta = LogicalDelta.create(right);
final LogicalJoin joinL = LogicalJoin.create(left, rightWithDelta,
join.getCondition(), join.getVariablesSet(), join.getJoinType(),
join.isSemiJoinDone(),
ImmutableList.copyOf(join.getSystemFieldList()));
final LogicalDelta leftWithDelta = LogicalDelta.create(left);
final LogicalJoin joinR = LogicalJoin.create(leftWithDelta, right,
join.getCondition(), join.getVariablesSet(), join.getJoinType(),
join.isSemiJoinDone(),
ImmutableList.copyOf(join.getSystemFieldList()));
List<RelNode> inputsToUnion = Lists.newArrayList();
inputsToUnion.add(joinL);
inputsToUnion.add(joinR);
final LogicalUnion newNode = LogicalUnion.create(inputsToUnion, true);
call.transformTo(newNode);
}
}
}
// End StreamRules.java
| |
/*
* Copyright (c) 2001, 2007, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.management;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.security.BasicPermission;
import java.security.Permission;
import java.security.PermissionCollection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Set;
import java.util.StringTokenizer;
/** A Permission to perform actions related to MBeanServers.
The <em>name</em> of the permission specifies the operation requested
or granted by the permission. For a granted permission, it can be
<code>*</code> to allow all of the MBeanServer operations specified below.
Otherwise, for a granted or requested permission, it must be one of the
following:
<dl>
<dt>createMBeanServer</dt>
<dd>Create a new MBeanServer object using the method
{@link MBeanServerFactory#createMBeanServer()} or
{@link MBeanServerFactory#createMBeanServer(java.lang.String)}.
<dt>findMBeanServer</dt>
<dd>Find an MBeanServer with a given name, or all MBeanServers in this
JVM, using the method {@link MBeanServerFactory#findMBeanServer}.
<dt>newMBeanServer</dt>
<dd>Create a new MBeanServer object without keeping a reference to it,
using the method {@link MBeanServerFactory#newMBeanServer()} or
{@link MBeanServerFactory#newMBeanServer(java.lang.String)}.
<dt>releaseMBeanServer</dt>
<dd>Remove the MBeanServerFactory's reference to an MBeanServer,
using the method {@link MBeanServerFactory#releaseMBeanServer}.
</dl>
The <em>name</em> of the permission can also denote a list of one or more
comma-separated operations. Spaces are allowed at the beginning and
end of the <em>name</em> and before and after commas.
<p>
<code>MBeanServerPermission("createMBeanServer")</code> implies
<code>MBeanServerPermission("newMBeanServer")</code>.
*
* @since 1.5
*/
public class MBeanServerPermission extends BasicPermission {
private static final long serialVersionUID = -5661980843569388590L;
private final static int
CREATE = 0,
FIND = 1,
NEW = 2,
RELEASE = 3,
N_NAMES = 4;
private final static String[] names = {
"createMBeanServer",
"findMBeanServer",
"newMBeanServer",
"releaseMBeanServer",
};
private final static int
CREATE_MASK = 1<<CREATE,
FIND_MASK = 1<<FIND,
NEW_MASK = 1<<NEW,
RELEASE_MASK = 1<<RELEASE,
ALL_MASK = CREATE_MASK|FIND_MASK|NEW_MASK|RELEASE_MASK;
/*
* Map from permission masks to canonical names. This array is
* filled in on demand.
*
* This isn't very scalable. If we have more than five or six
* permissions, we should consider doing this differently,
* e.g. with a Map.
*/
private final static String[] canonicalNames = new String[1 << N_NAMES];
/*
* The target names mask. This is not private to avoid having to
* generate accessor methods for accesses from the collection class.
*
* This mask includes implied bits. So if it has CREATE_MASK then
* it necessarily has NEW_MASK too.
*/
transient int mask;
/** <p>Create a new MBeanServerPermission with the given name.</p>
<p>This constructor is equivalent to
<code>MBeanServerPermission(name,null)</code>.</p>
@param name the name of the granted permission. It must
respect the constraints spelt out in the description of the
{@link MBeanServerPermission} class.
@exception NullPointerException if the name is null.
@exception IllegalArgumentException if the name is not
<code>*</code> or one of the allowed names or a comma-separated
list of the allowed names.
*/
public MBeanServerPermission(String name) {
this(name, null);
}
/** <p>Create a new MBeanServerPermission with the given name.</p>
@param name the name of the granted permission. It must
respect the constraints spelt out in the description of the
{@link MBeanServerPermission} class.
@param actions the associated actions. This parameter is not
currently used and must be null or the empty string.
@exception NullPointerException if the name is null.
@exception IllegalArgumentException if the name is not
<code>*</code> or one of the allowed names or a comma-separated
list of the allowed names, or if <code>actions</code> is a non-null
non-empty string.
*
* @throws NullPointerException if <code>name</code> is <code>null</code>.
* @throws IllegalArgumentException if <code>name</code> is empty or
* if arguments are invalid.
*/
public MBeanServerPermission(String name, String actions) {
super(getCanonicalName(parseMask(name)), actions);
/* It's annoying to have to parse the name twice, but since
Permission.getName() is final and since we can't access "this"
until after the call to the superclass constructor, there
isn't any very clean way to do this. MBeanServerPermission
objects aren't constructed very often, luckily. */
mask = parseMask(name);
/* Check that actions is a null empty string */
if (actions != null && actions.length() > 0)
throw new IllegalArgumentException("MBeanServerPermission " +
"actions must be null: " +
actions);
}
MBeanServerPermission(int mask) {
super(getCanonicalName(mask));
this.mask = impliedMask(mask);
}
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException {
in.defaultReadObject();
mask = parseMask(getName());
}
static int simplifyMask(int mask) {
if ((mask & CREATE_MASK) != 0)
mask &= ~NEW_MASK;
return mask;
}
static int impliedMask(int mask) {
if ((mask & CREATE_MASK) != 0)
mask |= NEW_MASK;
return mask;
}
static String getCanonicalName(int mask) {
if (mask == ALL_MASK)
return "*";
mask = simplifyMask(mask);
synchronized (canonicalNames) {
if (canonicalNames[mask] == null)
canonicalNames[mask] = makeCanonicalName(mask);
}
return canonicalNames[mask];
}
private static String makeCanonicalName(int mask) {
final StringBuilder buf = new StringBuilder();
for (int i = 0; i < N_NAMES; i++) {
if ((mask & (1<<i)) != 0) {
if (buf.length() > 0)
buf.append(',');
buf.append(names[i]);
}
}
return buf.toString().intern();
/* intern() avoids duplication when the mask has only
one bit, so is equivalent to the string constants
we have for the names[] array. */
}
/* Convert the string into a bitmask, including bits that
are implied by the permissions in the string. */
private static int parseMask(String name) {
/* Check that target name is a non-null non-empty string */
if (name == null) {
throw new NullPointerException("MBeanServerPermission: " +
"target name can't be null");
}
name = name.trim();
if (name.equals("*"))
return ALL_MASK;
/* If the name is empty, nameIndex will barf. */
if (name.indexOf(',') < 0)
return impliedMask(1 << nameIndex(name.trim()));
int mask = 0;
StringTokenizer tok = new StringTokenizer(name, ",");
while (tok.hasMoreTokens()) {
String action = tok.nextToken();
int i = nameIndex(action.trim());
mask |= (1 << i);
}
return impliedMask(mask);
}
private static int nameIndex(String name)
throws IllegalArgumentException {
for (int i = 0; i < N_NAMES; i++) {
if (names[i].equals(name))
return i;
}
final String msg =
"Invalid MBeanServerPermission name: \"" + name + "\"";
throw new IllegalArgumentException(msg);
}
public int hashCode() {
return mask;
}
/**
* <p>Checks if this MBeanServerPermission object "implies" the specified
* permission.</p>
*
* <p>More specifically, this method returns true if:</p>
*
* <ul>
* <li> <i>p</i> is an instance of MBeanServerPermission,</li>
* <li> <i>p</i>'s target names are a subset of this object's target
* names</li>
* </ul>
*
* <p>The <code>createMBeanServer</code> permission implies the
* <code>newMBeanServer</code> permission.</p>
*
* @param p the permission to check against.
* @return true if the specified permission is implied by this object,
* false if not.
*/
public boolean implies(Permission p) {
if (!(p instanceof MBeanServerPermission))
return false;
MBeanServerPermission that = (MBeanServerPermission) p;
return ((this.mask & that.mask) == that.mask);
}
/**
* Checks two MBeanServerPermission objects for equality. Checks that
* <i>obj</i> is an MBeanServerPermission, and represents the same
* list of allowable actions as this object.
* <P>
* @param obj the object we are testing for equality with this object.
* @return true if the objects are equal.
*/
public boolean equals(Object obj) {
if (obj == this)
return true;
if (! (obj instanceof MBeanServerPermission))
return false;
MBeanServerPermission that = (MBeanServerPermission) obj;
return (this.mask == that.mask);
}
public PermissionCollection newPermissionCollection() {
return new MBeanServerPermissionCollection();
}
}
/**
* Class returned by {@link MBeanServerPermission#newPermissionCollection()}.
*
* @serial include
*/
/*
* Since every collection of MBSP can be represented by a single MBSP,
* that is what our PermissionCollection does. We need to define a
* PermissionCollection because the one inherited from BasicPermission
* doesn't know that createMBeanServer implies newMBeanServer.
*
* Though the serial form is defined, the TCK does not check it. We do
* not require independent implementations to duplicate it. Even though
* PermissionCollection is Serializable, instances of this class will
* hardly ever be serialized, and different implementations do not
* typically exchange serialized permission collections.
*
* If we did require that a particular form be respected here, we would
* logically also have to require it for
* MBeanPermission.newPermissionCollection, which would preclude an
* implementation from defining a PermissionCollection there with an
* optimized "implies" method.
*/
class MBeanServerPermissionCollection extends PermissionCollection {
/** @serial Null if no permissions in collection, otherwise a
single permission that is the union of all permissions that
have been added. */
private MBeanServerPermission collectionPermission;
private static final long serialVersionUID = -5661980843569388590L;
public synchronized void add(Permission permission) {
if (!(permission instanceof MBeanServerPermission)) {
final String msg =
"Permission not an MBeanServerPermission: " + permission;
throw new IllegalArgumentException(msg);
}
if (isReadOnly())
throw new SecurityException("Read-only permission collection");
MBeanServerPermission mbsp = (MBeanServerPermission) permission;
if (collectionPermission == null)
collectionPermission = mbsp;
else if (!collectionPermission.implies(permission)) {
int newmask = collectionPermission.mask | mbsp.mask;
collectionPermission = new MBeanServerPermission(newmask);
}
}
public synchronized boolean implies(Permission permission) {
return (collectionPermission != null &&
collectionPermission.implies(permission));
}
public synchronized Enumeration<Permission> elements() {
Set<Permission> set;
if (collectionPermission == null)
set = Collections.emptySet();
else
set = Collections.singleton((Permission) collectionPermission);
return Collections.enumeration(set);
}
}
| |
package org.springframework.gradle.antora;
import org.apache.commons.io.IOUtils;
import org.gradle.api.GradleException;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.testfixtures.ProjectBuilder;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.FileOutputStream;
import java.nio.charset.StandardCharsets;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIOException;
class CheckAntoraVersionPluginTests {
@Test
void defaultsPropertiesWhenSnapshot() {
String expectedVersion = "1.0.0-SNAPSHOT";
Project project = ProjectBuilder.builder().build();
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
assertThat(checkAntoraVersionTask.getAntoraVersion().get()).isEqualTo("1.0.0");
assertThat(checkAntoraVersionTask.getAntoraPrerelease().get()).isEqualTo("-SNAPSHOT");
assertThat(checkAntoraVersionTask.getAntoraYmlFile().getAsFile().get()).isEqualTo(project.file("antora.yml"));
}
@Test
void defaultsPropertiesWhenMilestone() {
String expectedVersion = "1.0.0-M1";
Project project = ProjectBuilder.builder().build();
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
assertThat(checkAntoraVersionTask.getAntoraVersion().get()).isEqualTo("1.0.0-M1");
assertThat(checkAntoraVersionTask.getAntoraPrerelease().get()).isEqualTo("true");
assertThat(checkAntoraVersionTask.getAntoraYmlFile().getAsFile().get()).isEqualTo(project.file("antora.yml"));
}
@Test
void defaultsPropertiesWhenRc() {
String expectedVersion = "1.0.0-RC1";
Project project = ProjectBuilder.builder().build();
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
assertThat(checkAntoraVersionTask.getAntoraVersion().get()).isEqualTo("1.0.0-RC1");
assertThat(checkAntoraVersionTask.getAntoraPrerelease().get()).isEqualTo("true");
assertThat(checkAntoraVersionTask.getAntoraYmlFile().getAsFile().get()).isEqualTo(project.file("antora.yml"));
}
@Test
void defaultsPropertiesWhenRelease() {
String expectedVersion = "1.0.0";
Project project = ProjectBuilder.builder().build();
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
assertThat(checkAntoraVersionTask.getAntoraVersion().get()).isEqualTo("1.0.0");
assertThat(checkAntoraVersionTask.getAntoraPrerelease().isPresent()).isFalse();
assertThat(checkAntoraVersionTask.getAntoraYmlFile().getAsFile().get()).isEqualTo(project.file("antora.yml"));
}
@Test
void explicitProperties() {
Project project = ProjectBuilder.builder().build();
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
checkAntoraVersionTask.getAntoraVersion().set("1.0.0");
checkAntoraVersionTask.getAntoraPrerelease().set("-SNAPSHOT");
assertThat(checkAntoraVersionTask.getAntoraVersion().get()).isEqualTo("1.0.0");
assertThat(checkAntoraVersionTask.getAntoraPrerelease().get()).isEqualTo("-SNAPSHOT");
assertThat(checkAntoraVersionTask.getAntoraYmlFile().getAsFile().get()).isEqualTo(project.file("antora.yml"));
}
@Test
void versionNotDefined() throws Exception {
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
assertThatExceptionOfType(GradleException.class).isThrownBy(() -> checkAntoraVersionTask.check());
}
@Test
void antoraFileNotFound() throws Exception {
String expectedVersion = "1.0.0-SNAPSHOT";
Project project = ProjectBuilder.builder().build();
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
assertThatIOException().isThrownBy(() -> checkAntoraVersionTask.check());
}
@Test
void actualAntoraPrereleaseNull() throws Exception {
String expectedVersion = "1.0.0-SNAPSHOT";
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
assertThatExceptionOfType(GradleException.class).isThrownBy(() -> checkAntoraVersionTask.check());
}
@Test
void matchesWhenSnapshot() throws Exception {
String expectedVersion = "1.0.0-SNAPSHOT";
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0'\nprerelease: '-SNAPSHOT'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
checkAntoraVersionTask.check();
}
@Test
void matchesWhenMilestone() throws Exception {
String expectedVersion = "1.0.0-M1";
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0-M1'\nprerelease: 'true'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
checkAntoraVersionTask.check();
}
@Test
void matchesWhenRc() throws Exception {
String expectedVersion = "1.0.0-RC1";
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0-RC1'\nprerelease: 'true'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
checkAntoraVersionTask.check();
}
@Test
void matchesWhenReleaseAndPrereleaseUndefined() throws Exception {
String expectedVersion = "1.0.0";
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.setVersion(expectedVersion);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
checkAntoraVersionTask.check();
}
@Test
void matchesWhenExplicitRelease() throws Exception {
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
((CheckAntoraVersionTask) task).getAntoraVersion().set("1.0.0");
checkAntoraVersionTask.check();
}
@Test
void matchesWhenExplicitPrerelease() throws Exception {
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("version: '1.0.0'\nprerelease: '-SNAPSHOT'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
((CheckAntoraVersionTask) task).getAntoraVersion().set("1.0.0");
((CheckAntoraVersionTask) task).getAntoraPrerelease().set("-SNAPSHOT");
checkAntoraVersionTask.check();
}
@Test
void matchesWhenMissingPropertyDefined() throws Exception {
Project project = ProjectBuilder.builder().build();
File rootDir = project.getRootDir();
IOUtils.write("name: 'ROOT'\nversion: '1.0.0'", new FileOutputStream(new File(rootDir, "antora.yml")), StandardCharsets.UTF_8);
project.getPluginManager().apply(CheckAntoraVersionPlugin.class);
Task task = project.getTasks().findByName(CheckAntoraVersionPlugin.ANTORA_CHECK_VERSION_TASK_NAME);
assertThat(task).isInstanceOf(CheckAntoraVersionTask.class);
CheckAntoraVersionTask checkAntoraVersionTask = (CheckAntoraVersionTask) task;
((CheckAntoraVersionTask) task).getAntoraVersion().set("1.0.0");
checkAntoraVersionTask.check();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.test;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.tez.client.TezClient;
import org.apache.tez.dag.api.DAG;
import org.apache.tez.dag.api.ProcessorDescriptor;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.TezException;
import org.apache.tez.dag.api.Vertex;
import org.apache.tez.dag.api.client.DAGClient;
import org.apache.tez.dag.api.client.DAGStatus;
import org.apache.tez.runtime.api.AbstractLogicalIOProcessor;
import org.apache.tez.runtime.api.Event;
import org.apache.tez.runtime.api.TaskFailureType;
import org.apache.tez.runtime.api.LogicalInput;
import org.apache.tez.runtime.api.LogicalOutput;
import org.apache.tez.runtime.api.ProcessorContext;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestTaskErrorsUsingLocalMode {
private static final Logger LOG = LoggerFactory.getLogger(TestTaskErrorsUsingLocalMode.class);
private static final String VERTEX_NAME = "vertex1";
@Test(timeout = 20000)
public void testFatalErrorReported() throws IOException, TezException, InterruptedException {
TezClient tezClient = getTezClient("testFatalErrorReported");
DAGClient dagClient = null;
try {
FailingProcessor.configureForFatalFail();
DAG dag = DAG.create("testFatalErrorReportedDag").addVertex(
Vertex
.create(VERTEX_NAME, ProcessorDescriptor.create(FailingProcessor.class.getName()), 1));
dagClient = tezClient.submitDAG(dag);
dagClient.waitForCompletion();
assertEquals(DAGStatus.State.FAILED, dagClient.getDAGStatus(null).getState());
assertEquals(1, dagClient.getVertexStatus(VERTEX_NAME, null).getProgress().getFailedTaskAttemptCount());
} finally {
if (dagClient != null) {
dagClient.close();
}
tezClient.stop();
}
}
@Test(timeout = 20000)
public void testNonFatalErrorReported() throws IOException, TezException, InterruptedException {
TezClient tezClient = getTezClient("testNonFatalErrorReported");
DAGClient dagClient = null;
try {
FailingProcessor.configureForNonFatalFail();
DAG dag = DAG.create("testNonFatalErrorReported").addVertex(
Vertex
.create(VERTEX_NAME, ProcessorDescriptor.create(FailingProcessor.class.getName()), 1));
dagClient = tezClient.submitDAG(dag);
dagClient.waitForCompletion();
assertEquals(DAGStatus.State.FAILED, dagClient.getDAGStatus(null).getState());
assertEquals(4, dagClient.getVertexStatus(VERTEX_NAME, null).getProgress().getFailedTaskAttemptCount());
} finally {
if (dagClient != null) {
dagClient.close();
}
tezClient.stop();
}
}
@Test(timeout = 20000)
public void testSelfKillReported() throws IOException, TezException, InterruptedException {
TezClient tezClient = getTezClient("testSelfKillReported");
DAGClient dagClient = null;
try {
FailingProcessor.configureForKilled(10);
DAG dag = DAG.create("testSelfKillReported").addVertex(
Vertex
.create(VERTEX_NAME, ProcessorDescriptor.create(FailingProcessor.class.getName()), 1));
dagClient = tezClient.submitDAG(dag);
dagClient.waitForCompletion();
assertEquals(DAGStatus.State.SUCCEEDED, dagClient.getDAGStatus(null).getState());
assertEquals(10, dagClient.getVertexStatus(VERTEX_NAME, null).getProgress().getKilledTaskAttemptCount());
} finally {
if (dagClient != null) {
dagClient.close();
}
tezClient.stop();
}
}
private TezClient getTezClient(String name) throws IOException, TezException {
TezConfiguration tezConf1 = new TezConfiguration();
tezConf1.setBoolean(TezConfiguration.TEZ_LOCAL_MODE, true);
tezConf1.set("fs.defaultFS", "file:///");
tezConf1.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH, true);
tezConf1.setLong(TezConfiguration.TEZ_AM_SLEEP_TIME_BEFORE_EXIT_MILLIS, 500);
TezClient tezClient1 = TezClient.create(name, tezConf1, true);
tezClient1.start();
return tezClient1;
}
public static class FailingProcessor extends AbstractLogicalIOProcessor {
private static final String FAIL_STRING_NON_FATAL = "non-fatal-fail";
private static final String FAIL_STRING_FATAL = "fatal-fail";
private static final String KILL_STRING = "kill-self";
private static volatile boolean shouldFail;
private static volatile boolean fatalError;
private static volatile boolean shouldKill;
private static volatile int killModeAttemptNumberToSucceed;
static {
reset();
}
static void reset() {
shouldFail = false;
fatalError = false;
shouldKill = false;
killModeAttemptNumberToSucceed = -1;
}
static void configureForNonFatalFail() {
reset();
shouldFail = true;
}
static void configureForFatalFail() {
reset();
shouldFail = true;
fatalError = true;
}
static void configureForKilled(int attemptNumber) {
reset();
shouldKill = true;
killModeAttemptNumberToSucceed = attemptNumber;
}
public FailingProcessor(ProcessorContext context) {
super(context);
}
@Override
public void initialize() throws Exception {
}
@Override
public void handleEvents(List<Event> processorEvents) {
}
@Override
public void close() throws Exception {
}
@Override
public void run(Map<String, LogicalInput> inputs, Map<String, LogicalOutput> outputs) throws
Exception {
LOG.info("Running Failing processor");
if (shouldFail) {
if (fatalError) {
LOG.info("Reporting fatal error");
getContext().reportFailure(TaskFailureType.FATAL, null, FAIL_STRING_FATAL);
} else {
LOG.info("Reporting non-fatal error");
getContext().reportFailure(TaskFailureType.NON_FATAL, null, FAIL_STRING_NON_FATAL);
}
} else if (shouldKill) {
if (getContext().getTaskAttemptNumber() != killModeAttemptNumberToSucceed) {
LOG.info("Reporting self-kill for attempt=" + getContext().getTaskAttemptNumber());
getContext().killSelf(null, KILL_STRING);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.query.functional;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.PartitionAttributesFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.query.CacheUtils;
import org.apache.geode.cache.query.Query;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.SelectResults;
import org.apache.geode.cache.query.data.Portfolio;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.test.junit.categories.OQLQueryTest;
@Category({OQLQueryTest.class})
public class SelectToDateJUnitTest {
private static String regionName = "test";
private static int numElem = 120;
private static String format = "MMddyyyyHHmmss";
private static String mayDate = "05202012100559";
private static int numMonthsBeforeMay = 4;
private static int numMonthsAfterMay = 7;
private static int numElementsExpectedPerMonth = numElem * 2 / 12;
@Before
public void setUp() throws Exception {
System.setProperty(DistributionConfig.GEMFIRE_PREFIX + "Query.VERBOSE", "true");
CacheUtils.startCache();
}
@After
public void tearDown() throws Exception {
CacheUtils.closeCache();
}
private static String[] toDateQueries = new String[] {
"select * from /test p where p.createDate = to_date('" + mayDate + "', '" + format + "')",
"select * from /test p where p.createDate < to_date('" + mayDate + "', '" + format + "')",
"select * from /test p where p.createDate > to_date('" + mayDate + "', '" + format + "')",
"select * from /test p where p.createDate <= to_date('" + mayDate + "', '" + format + "')",
"select * from /test p where p.createDate >= to_date('" + mayDate + "', '" + format + "')"};
// the test will be validating against the May date, so expected values revolve around month of
// May
private static int[] toDateExpectedResults =
new int[] {numElementsExpectedPerMonth, numMonthsBeforeMay * numElementsExpectedPerMonth,
numMonthsAfterMay * numElementsExpectedPerMonth,
(numMonthsBeforeMay + 1) * numElementsExpectedPerMonth,
(numMonthsAfterMay + 1) * numElementsExpectedPerMonth};
private static String[] projectionQueries = new String[] {
"select p.createDate from /test p where p.createDate = to_date('" + mayDate + "', '" + format
+ "')",
"select p.createDate from /test p where p.createDate < to_date('" + mayDate + "', '" + format
+ "')",
"select p.createDate from /test p where p.createDate > to_date('" + mayDate + "', '" + format
+ "')",
"select p.createDate from /test p where p.createDate <= to_date('" + mayDate + "', '" + format
+ "')",
"select p.createDate from /test p where p.createDate >= to_date('" + mayDate + "', '" + format
+ "')",};
private void executeQueryTest(Cache cache, String[] queries, int[] expectedResults) {
CacheUtils.log("********Execute Query Test********");
QueryService queryService = cache.getQueryService();
Query query = null;
String queryString = null;
int numQueries = queries.length;
try {
for (int i = 0; i < numQueries; i++) {
queryString = queries[0];
query = queryService.newQuery(queries[0]);
SelectResults result = (SelectResults) query.execute();
assertEquals(queries[0], expectedResults[0], result.size());
}
} catch (Exception e) {
e.printStackTrace();
fail("Query " + queryString + ":" + query + " Execution Failed!");
}
CacheUtils.log("********Completed Executing Query Test********");
// Destroy current Region for other tests
cache.getRegion(regionName).destroyRegion();
}
private void printoutResults(SelectResults results) {
Iterator iterator = results.iterator();
while (iterator.hasNext()) {
Portfolio p = (Portfolio) iterator.next();
CacheUtils.log("->" + p + ";" + p.createDate);
}
}
/**
* Test on Local Region data
*/
@Test
public void testQueriesOnLocalRegion() throws Exception {
Cache cache = CacheUtils.getCache();
createLocalRegion();
assertNotNull(cache.getRegion(regionName));
assertEquals(numElem * 2, cache.getRegion(regionName).size());
executeQueryTest(cache, toDateQueries, toDateExpectedResults);
}
/**
* Test on Replicated Region data
*/
@Test
public void testQueriesOnReplicatedRegion() throws Exception {
Cache cache = CacheUtils.getCache();
createReplicatedRegion();
assertNotNull(cache.getRegion(regionName));
assertEquals(numElem * 2, cache.getRegion(regionName).size());
executeQueryTest(cache, toDateQueries, toDateExpectedResults);
}
/**
* Test on Partitioned Region data
*/
@Test
public void testQueriesOnPartitionedRegion() throws Exception {
Cache cache = CacheUtils.getCache();
createPartitionedRegion();
assertNotNull(cache.getRegion(regionName));
assertEquals(numElem * 2, cache.getRegion(regionName).size());
executeQueryTest(cache, toDateQueries, toDateExpectedResults);
}
/**
* Test on Replicated Region data
*/
@Test
public void testQueriesOnReplicatedRegionWithSameProjAttr() throws Exception {
Cache cache = CacheUtils.getCache();
createReplicatedRegion();
assertNotNull(cache.getRegion(regionName));
assertEquals(numElem * 2, cache.getRegion(regionName).size());
executeQueryTest(cache, projectionQueries, toDateExpectedResults);
}
/**
* Test on Partitioned Region data
*/
@Test
public void testQueriesOnPartitionedRegionWithSameProjAttr() throws Exception {
Cache cache = CacheUtils.getCache();
createPartitionedRegion();
assertNotNull(cache.getRegion(regionName));
assertEquals(numElem * 2, cache.getRegion(regionName).size());
executeQueryTest(cache, projectionQueries, toDateExpectedResults);
}
/******** Region Creation Helper Methods *********/
/**
* Each month will have exactly 20 entries with a matching date Code borrowed from shobhit's test
*
*/
private void createLocalRegion() throws ParseException {
Cache cache = CacheUtils.getCache();
AttributesFactory attributesFactory = new AttributesFactory();
attributesFactory.setDataPolicy(DataPolicy.NORMAL);
RegionAttributes regionAttributes = attributesFactory.create();
Region region = cache.createRegion(regionName, regionAttributes);
for (int i = 1; i <= numElem; i++) {
putData(i, region);
}
}
private void createReplicatedRegion() throws ParseException {
Cache cache = CacheUtils.getCache();
AttributesFactory attributesFactory = new AttributesFactory();
attributesFactory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes regionAttributes = attributesFactory.create();
Region region = cache.createRegion(regionName, regionAttributes);
for (int i = 1; i <= numElem; i++) {
putData(i, region);
}
}
private void createPartitionedRegion() throws ParseException {
Cache cache = CacheUtils.getCache();
PartitionAttributesFactory prAttFactory = new PartitionAttributesFactory();
AttributesFactory attributesFactory = new AttributesFactory();
attributesFactory.setPartitionAttributes(prAttFactory.create());
RegionAttributes regionAttributes = attributesFactory.create();
Region region = cache.createRegion(regionName, regionAttributes);
for (int i = 1; i <= numElem; i++) {
putData(i, region);
}
}
// creates a portfolio object and puts it into the specified region
private void putData(int id, Region region) throws ParseException {
Portfolio obj = new Portfolio(id);
obj.createDate = getCreateDate(id);
region.put(id, obj);
region.put(id + numElem, obj);
CacheUtils.log("Added object " + obj.createDate);
}
// creates a date object
private Date getCreateDate(int i) throws ParseException {
int month = (i % 12) + 1;
String format = "MMddyyyyHHmmss";
String dateString;
if (month < 10) {
dateString = "0" + month + "202012100559";
} else {
dateString = month + "202012100559";
}
SimpleDateFormat sdf = new SimpleDateFormat(format);
return sdf.parse(dateString);
}
}
| |
/*
* Copyright (c) 2017 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.extension.siddhi.io.http.sink;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.base64.Base64;
import org.apache.log4j.Logger;
import org.wso2.carbon.messaging.CarbonMessage;
import org.wso2.carbon.messaging.DefaultCarbonMessage;
import org.wso2.carbon.messaging.Header;
import org.wso2.carbon.messaging.exceptions.ClientConnectorException;
import org.wso2.carbon.transport.http.netty.common.Constants;
import org.wso2.carbon.transport.http.netty.config.SenderConfiguration;
import org.wso2.carbon.transport.http.netty.config.TransportProperty;
import org.wso2.carbon.transport.http.netty.sender.HTTPClientConnector;
import org.wso2.extension.siddhi.io.http.sink.util.HttpPayloadDataSource;
import org.wso2.extension.siddhi.io.http.sink.util.HttpSinkUtil;
import org.wso2.extension.siddhi.io.http.util.HttpConstants;
import org.wso2.siddhi.annotation.Example;
import org.wso2.siddhi.annotation.Extension;
import org.wso2.siddhi.annotation.Parameter;
import org.wso2.siddhi.annotation.SystemParameter;
import org.wso2.siddhi.annotation.util.DataType;
import org.wso2.siddhi.core.config.SiddhiAppContext;
import org.wso2.siddhi.core.exception.ConnectionUnavailableException;
import org.wso2.siddhi.core.stream.output.sink.Sink;
import org.wso2.siddhi.core.util.config.ConfigReader;
import org.wso2.siddhi.core.util.transport.DynamicOptions;
import org.wso2.siddhi.core.util.transport.Option;
import org.wso2.siddhi.core.util.transport.OptionHolder;
import org.wso2.siddhi.query.api.definition.StreamDefinition;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* {@code HttpSink } Handle the HTTP publishing tasks.
*/
@Extension(name = "http", namespace = "sink",
description = "This extension publish the HTTP events in any HTTP method POST, GET, PUT, DELETE via HTTP " +
"or https protocols. As the additional features this component can provide basic authentication " +
"as well as user can publish events using custom client truststore files when publishing events " +
"via https protocol. And also user can add any number of headers including HTTP_METHOD header for " +
"each event dynamically.",
parameters = {
@Parameter(
name = "publisher.url",
description = "The URL to which the outgoing events should be published via HTTP. " +
"This is a mandatory parameter and if this is not specified, an error is logged in " +
"the CLI. If user wants to enable SSL for the events, use `https` instead of `http` " +
"in the publisher.url." +
"e.g., " +
"`http://localhost:8080/endpoint`, "
+ "`https://localhost:8080/endpoint`",
type = {DataType.STRING}),
@Parameter(
name = "basic.auth.username",
description = "The username to be included in the authentication header of the basic " +
"authentication enabled events. It is required to specify both username and " +
"password to enable basic authentication. If one of the parameter is not given " +
"by user then an error is logged in the CLI." ,
type = {DataType.STRING},
optional = true,
defaultValue = " "),
@Parameter(
name = "basic.auth.password",
description = "The password to include in the authentication header of the basic " +
"authentication enabled events. It is required to specify both username and " +
"password to enable basic authentication. If one of the parameter is not given " +
"by user then an error is logged in the CLI.",
type = {DataType.STRING},
optional = true, defaultValue = " "),
@Parameter(
name = "client.truststore.path",
description = "The file path to the location of the truststore of the client that sends " +
"the HTTP events through 'https' protocol. A custom client-truststore can be " +
"specified if required.",
type = {DataType.STRING},
optional = true, defaultValue = "${carbon.home}/resources/security/client-truststore.jks"),
@Parameter(
name = "client.truststore.password",
description = "The password for the client-truststore. A custom password can be specified " +
"if required. If no custom password is specified and the protocol of URL is 'https' " +
"then, the system uses default password.",
type = {DataType.STRING},
optional = true, defaultValue = "wso2carbon"),
@Parameter(
name = "headers",
description = "The headers that should be included as a HTTP request headers. There can be " +
"any number of headers concatenated on following format. " +
"header1:value1#header2:value2. User can include content-type header if he need to " +
"any specific type for payload if not system get the mapping type as the content-Type" +
" header (ie. @map(xml):application/xml,@map(json):application/json,@map(text)" +
":plain/text ) and if user does not include any mapping type then system gets the " +
"'plain/text' as default Content-Type header. If user does not include " +
"Content-Length header then system calculate the bytes size of payload and include it" +
" as content-length header.",
type = {DataType.STRING},
optional = true,
dynamic = true, defaultValue = " "),
@Parameter(
name = "method",
description = "For HTTP events, HTTP_METHOD header should be included as a request header." +
" If the parameter is null then system uses 'POST' as a default header.",
type = {DataType.STRING},
optional = true,
dynamic = true, defaultValue = "POST")
},
examples = {
@Example(syntax =
"@sink(type='http',publisher.url='http://localhost:8009/foo', method='{{method}}',"
+ "headers='{{headers}}', "
+ "@map(type='xml' , @payload('{{payloadBody}}')))"
+ "define stream FooStream (payloadBody String, method string, headers string);\n",
description =
"If it is xml mapping expected input should be in following format for FooStream:"
+ "{"
+ "<events>"
+ " <event>"
+ " <symbol>WSO2</symbol>"
+ " <price>55.6</price>"
+ " <volume>100</volume>"
+ " </event>"
+ "</events>,"
+ "POST,"
+ "Content-Length:24#Content-Location:USA#Retry-After:120"
+ "}"
+ "Above event will generate output as below."
+ "~Output http event payload"
+ "<events>\n"
+ " <event>\n"
+ " <symbol>WSO2</symbol>\n"
+ " <price>55.6</price>\n"
+ " <volume>100</volume>\n"
+ " </event>\n"
+ "</events>\n"
+ "~Output http event headers"
+ "Content-Length:24,"
+ "Content-Location:'USA',"
+ "Retry-After:120,"
+ "Content-Type:'application/xml',"
+ "HTTP_METHOD:'POST',"
+ "~Output http event properties"
+ "HTTP_METHOD:'POST',"
+ "HOST:'localhost',"
+ "PORT:8009"
+ "PROTOCOL:'http'"
+ "TO:'/foo'"
)},
systemParameter = {
@SystemParameter(
name = "latency.metrics.enabled",
description = "Property to enable metrics logs to monitor transport latency for netty.",
defaultValue = "true",
possibleParameters = "Any Integer"
),
@SystemParameter(
name = "server.bootstrap.socket.timeout",
description = "Property to configure specified timeout in milliseconds which server " +
"socket will block for this amount of time for http message content to be received.",
defaultValue = "15",
possibleParameters = "Any Integer"
),
@SystemParameter(
name = "server.bootstrap.boss.group.size",
description = "Property to configure number of boss threads, which accepts incoming " +
"connections until the ports are unbound. Once connection accepts successfully, " +
"boss thread passes the accepted channel to one of the worker threads.",
defaultValue = "4",
possibleParameters = "Any integer"
),
@SystemParameter(
name = "server.bootstrap.worker.group.size",
description = "Property to configure number of worker threads, which performs non " +
"blocking read and write for one or more channels in non-blocking mode.",
defaultValue = "8",
possibleParameters = "Any integer"
),
@SystemParameter(
name = "default.protocol",
description = "The default protocol.",
defaultValue = "http",
possibleParameters = {"http" , "https"}
),
@SystemParameter(
name = "https.truststore.file",
description = "The default truststore file path.",
defaultValue = "${carbon.home}/resources/security/client-truststore.jks",
possibleParameters = "Path to client-truststore.jks"
),
@SystemParameter(
name = "https.truststore.password",
description = "The default truststore password.",
defaultValue = "wso2carbon",
possibleParameters = "Truststore password"
)
}
)
public class HttpSink extends Sink {
private static final Logger log = Logger.getLogger(HttpSink.class);
private String streamID;
private HTTPClientConnector clientConnector;
private Set<SenderConfiguration> senderConfig;
private String mapType;
private Map<String, String> httpURLProperties;
private Set<TransportProperty> nettyTransportProperty;
private Option httpHeaderOption;
private Option httpMethodOption;
private String authorizationHeader;
private String userName;
private String userPassword;
private String publisherURL;
/**
* Returns the list of classes which this sink can consume.
* Based on the type of the sink, it may be limited to being able to publish specific type of classes.
* For example, a sink of type file can only write objects of type String .
* @return array of supported classes , if extension can support of any types of classes
* then return empty array .
*/
@Override
public Class[] getSupportedInputEventClasses() {
return new Class[]{String.class};
}
/**
* Returns a list of supported dynamic options (that means for each event value of the option can change) by
* the transport
*
* @return the list of supported dynamic option keys
*/
@Override
public String[] getSupportedDynamicOptions() {
return new String[]{HttpConstants.HEADERS, HttpConstants.METHOD};
}
/**
* The initialization method for {@link Sink}, which will be called before other methods and validate
* the all configuration and getting the intial values.
* @param outputStreamDefinition containing stream definition bind to the {@link Sink}
* @param optionHolder Option holder containing static and dynamic configuration related
* to the {@link Sink}
* @param configReader to read the sink related system configuration.
* @param siddhiAppContext the context of the {@link org.wso2.siddhi.query.api.SiddhiApp} used to
* get siddhi related utilty functions.
*/
@Override
protected void init(StreamDefinition outputStreamDefinition, OptionHolder optionHolder,
ConfigReader configReader, SiddhiAppContext siddhiAppContext) {
this.streamID = siddhiAppContext.getName() + ":" + outputStreamDefinition.toString();
this.mapType = outputStreamDefinition.getAnnotations().get(0).getAnnotations().get(0).getElements().get(0)
.getValue();
this.publisherURL = optionHolder.validateAndGetStaticValue(HttpConstants.PUBLISHER_URL);
this.httpHeaderOption = optionHolder.getOrCreateOption(HttpConstants.HEADERS , HttpConstants.DEFAULT_HEADER);
this.httpMethodOption = optionHolder.getOrCreateOption(HttpConstants.METHOD , HttpConstants.DEFAULT_METHOD);
this.userName = optionHolder.validateAndGetStaticValue(HttpConstants.RECEIVER_USERNAME,
HttpConstants.EMPTY_STRING);
this.userPassword = optionHolder.validateAndGetStaticValue(HttpConstants.RECEIVER_PASSWORD,
HttpConstants.EMPTY_STRING);
String clientStoreFile = optionHolder.validateAndGetStaticValue(HttpConstants.CLIENT_TRUSTSTORE_PATH, new
HttpSinkUtil().trustStorePath(configReader));
String clientStorePass = optionHolder.validateAndGetStaticValue(HttpConstants.CLIENT_TRUSTSTORE_PASSWORD,
new HttpSinkUtil().trustStorePassword(configReader));
String scheme = configReader.readConfig(HttpConstants.DEFAULT_SINK_SCHEME, HttpConstants
.DEFAULT_SINK_SCHEME_VALUE);
if (HttpConstants.SCHEME_HTTPS.equals(scheme) && ((clientStoreFile == null) || (clientStorePass == null))) {
throw new ExceptionInInitializerError("Client truststore file path or password are empty while " +
"default scheme is 'https'. Please provide client " +
"truststore file path and password in" + streamID);
}
if (HttpConstants.EMPTY_STRING.equals(publisherURL)) {
throw new ExceptionInInitializerError("Receiver URL found empty but it is Mandatory field in " +
"" + HttpConstants.HTTP_SINK_ID + "in" + streamID);
}
if ((HttpConstants.EMPTY_STRING.equals(userName) ^
HttpConstants.EMPTY_STRING.equals(userPassword))) {
throw new ExceptionInInitializerError("Please provide user name and password in " +
HttpConstants.HTTP_SINK_ID + "in" + streamID);
} else if (!(HttpConstants.EMPTY_STRING.equals(userName) || HttpConstants.EMPTY_STRING.equals
(userPassword))) {
byte[] val = (userName + HttpConstants.AUTH_USERNAME_PASSWORD_SEPARATOR + userPassword).getBytes(Charset
.defaultCharset());
this.authorizationHeader = HttpConstants.AUTHORIZATION_METHOD + Base64.encode
(Unpooled.copiedBuffer(val));
}
//to separate protocol, host, port and context
this.httpURLProperties = new HttpSinkUtil().getURLProperties(publisherURL);
this.senderConfig = new HttpSinkUtil().getSenderConfigurations(httpURLProperties, clientStoreFile,
clientStorePass);
this.nettyTransportProperty = new HttpSinkUtil().getTransportConfigurations(configReader);
}
/**
* This method will be called when events need to be published via this sink
* @param payload payload of the event based on the supported event class exported by the extensions
* @param dynamicOptions holds the dynamic options of this sink and Use this object to obtain dynamic options.
* @throws ConnectionUnavailableException if end point is unavailable the ConnectionUnavailableException thrown
* such that the system will take care retrying for connection
*/
@Override
public void publish(Object payload, DynamicOptions dynamicOptions) throws ConnectionUnavailableException {
String headers = httpHeaderOption.getValue(dynamicOptions);
String httpMethod = HttpConstants.EMPTY_STRING.equals(httpMethodOption.getValue(dynamicOptions)) ?
HttpConstants.METHOD_DEFAULT : httpMethodOption.getValue(dynamicOptions);
List<Header> headersList = new HttpSinkUtil().getHeaders(headers);
String contentType = new HttpSinkUtil().getContentType(mapType, headersList);
String messageBody = (String) payload;
CarbonMessage cMessage = new DefaultCarbonMessage();
HttpPayloadDataSource messageDataSource = new HttpPayloadDataSource(messageBody, cMessage.getOutputStream());
messageDataSource.setOutputStream(cMessage.getOutputStream());
cMessage = generateCarbonMessage(headersList, messageDataSource, contentType, httpMethod, cMessage,
messageBody);
try {
clientConnector.send(cMessage, new HttpSinkCallback(messageBody), httpURLProperties);
} catch (ClientConnectorException e) {
log.error("Error sending the HTTP message with payload " + payload + " in " +
HttpConstants.HTTP_SINK_ID + streamID, e);
}
}
/**
* This method will be called before the processing method.
* Intention to establish connection to publish event.
* @throws ConnectionUnavailableException if end point is unavailable the ConnectionUnavailableException thrown
* such that the system will take care retrying for connection
*/
@Override
public void connect() throws ConnectionUnavailableException {
this.clientConnector = new HTTPClientConnector(senderConfig, nettyTransportProperty);
log.info(streamID + " has successfully connected to " + publisherURL);
}
/**
* Called after all publishing is done, or when {@link ConnectionUnavailableException} is thrown
* Implementation of this method should contain the steps needed to disconnect from the sink.
*/
@Override
public void disconnect() {
if (clientConnector != null) {
clientConnector = null;
log.info("Server connector for url " + publisherURL + " disconnected.");
}
}
/**
* The method can be called when removing an event receiver.
* The cleanups that has to be done when removing the receiver has to be done here.
*/
@Override
public void destroy() {
if (clientConnector != null) {
clientConnector = null;
log.info("Server connector for url " + publisherURL + " disconnected.");
}
}
/**
* Used to collect the serializable state of the processing element, that need to be
* persisted for reconstructing the element to the same state on a different point of time
* This is also used to identify the internal states and debuging
* @return all internal states should be return as an map with meaning full keys
*/
@Override
public Map<String, Object> currentState() {
//no current state.
return null;
}
/**
* Used to restore serialized state of the processing element, for reconstructing
* the element to the same state as if was on a previous point of time.
*
* @param state the stateful objects of the processing element as a map.
* This map will have the same keys that is created upon calling currentState() method.
*/
@Override
public void restoreState(Map<String, Object> state) {
//no need to maintain.
}
/**
* The method is responsible of generating carbon message to send.
*
* @param headers the headers set.
* @param payload the payload.
* @param contentType the content type. Value is if user has to given it as a header or if not it is map type.
* @param httpMethod http method type.
* @param cMessage carbon message to be send to the endpoint.
* @return generated carbon message.
*/
private CarbonMessage generateCarbonMessage(List<Header> headers, HttpPayloadDataSource payload, String contentType,
String httpMethod, CarbonMessage cMessage, String payloadString) {
//if Authentication enabled
if (!(userName.equals(HttpConstants.EMPTY_STRING) || userPassword.equals
(HttpConstants.EMPTY_STRING))) {
cMessage.setHeader(HttpConstants.AUTHORIZATION_HEADER, authorizationHeader);
}
/*
* set carbon message properties which is to be used in carbon transport.
*/
// Set protocol type http or https
cMessage.setProperty(Constants.PROTOCOL, httpURLProperties.get(HttpConstants.SCHEME));
// Set uri
cMessage.setProperty(Constants.TO, httpURLProperties.get(HttpConstants.TO));
// set Host
cMessage.setProperty(Constants.HOST, httpURLProperties.get(HttpConstants.HOST));
//set port
cMessage.setProperty(Constants.PORT, Integer.valueOf(httpURLProperties.get(HttpConstants.PORT)));
// Set method
cMessage.setProperty(HttpConstants.HTTP_METHOD, httpMethod);
/*
*set request headers.
*/
// Set user given Headers
if (headers != null) {
cMessage.setHeaders(headers);
}
// Set content type if content type s not included in headers
if (contentType.contains(mapType)) {
cMessage.setHeader(HttpConstants.HTTP_CONTENT_TYPE, contentType);
}
//set content length header
if (cMessage.getHeaders().get(HttpConstants.CONTENT_LENGTH_HEADER) == null) {
try {
cMessage.setHeader(HttpConstants.CONTENT_LENGTH_HEADER, String.valueOf(payloadString.getBytes
(HttpConstants.DEFAULT_ENCODING).length));
} catch (UnsupportedEncodingException e) {
log.error("Unsupported content encoding.");
}
}
//set method-type header
cMessage.setHeader(HttpConstants.HTTP_METHOD, httpMethod);
/*
* set request payload.
*/
// Set message body
if (payload != null) {
payload.setOutputStream(cMessage.getOutputStream());
cMessage.setMessageDataSource(payload);
cMessage.setAlreadyRead(true);
}
//Handel Empty Messages
if (cMessage.isEmpty() && cMessage.getMessageDataSource() == null) {
cMessage.setEndOfMsgAdded(true);
}
return cMessage;
}
}
| |
package triaina.webview;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URLDecoder;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import android.webkit.JavascriptInterface;
import triaina.commons.exception.InvocationRuntimeException;
import triaina.commons.exception.JSONConvertException;
import triaina.commons.exception.NotFoundRuntimeException;
import triaina.commons.json.JSONConverter;
import triaina.commons.utils.ClassUtils;
import triaina.commons.utils.FloatUtils;
import triaina.commons.utils.JSONObjectUtils;
import triaina.webview.bridge.BridgeLifecyclable;
import triaina.webview.config.BridgeMethodConfig;
import triaina.webview.config.BridgeObjectConfig;
import triaina.webview.entity.Error;
import triaina.webview.entity.Params;
import triaina.webview.entity.Result;
import org.json.JSONObject;
import android.os.Handler;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.Log;
public class DeviceBridgeProxy {
private static final String TAG = DeviceBridgeProxy.class.getSimpleName();
private CallbackHelper mCallbackHelper = new CallbackHelper();
private WebViewBridge mWebViewBridge;
private Handler mHandler;
private BridgeObjectConfig mConfigSet = new BridgeObjectConfig();
private Map<String, Object> mReceiverMap = new HashMap<String, Object>();
public DeviceBridgeProxy(WebViewBridge webViewBridge, Handler handler) {
mWebViewBridge = webViewBridge;
mHandler = handler;
}
public void addBridgeObjectConfig(Object bridgeObject, BridgeObjectConfig config) {
mConfigSet.add(config);
Collection<BridgeMethodConfig> methodConfigs = config.getMethodConfigs();
for (BridgeMethodConfig methodConfig : methodConfigs)
mReceiverMap.put(methodConfig.getDest(), bridgeObject);
}
public BridgeObjectConfig getBridgeConfigSet() {
return mConfigSet;
}
public void resume() {
Collection<Object> bridges = mReceiverMap.values();
for (Object bridge : bridges) {
if (bridge instanceof BridgeLifecyclable)
((BridgeLifecyclable)bridge).onResume();
}
}
public void pause() {
Collection<Object> bridges = mReceiverMap.values();
for (Object bridge : bridges) {
if (bridge instanceof BridgeLifecyclable)
((BridgeLifecyclable)bridge).onPause();
}
}
public void destroy() {
Collection<Object> bridges = mReceiverMap.values();
for (Object bridge : bridges) {
try {
if (bridge instanceof BridgeLifecyclable)
((BridgeLifecyclable)bridge).onDestroy();
} catch (Exception exp) {
Log.w(TAG, exp.getMessage() + "", exp);
}
}
}
@JavascriptInterface
public void notifyToDevice(String data) {
final String jsonText = decode(data);
logging("notified", jsonText);
mHandler.post(new Runnable() {
@Override
public void run() {
String id = null;
String dest = null;
try {
final JSONObject json = JSONObjectUtils.parse(jsonText);
validateParamsVersion(json);
if (json.has("params")) {
if (json.has("id"))
id = json.optString("id");
dest = JSONObjectUtils.getString(json, "dest");
invoke(id, dest, JSONObjectUtils.getJSONObject(json, "params"));
} else {
id = JSONObjectUtils.getString(json, "id");
@SuppressWarnings("unchecked")
Callback<Result> callback = (Callback<Result>) mWebViewBridge.getCallback(id);
if (callback == null)
return;
mWebViewBridge.removeCallback(id);
if (json.has("result"))
mCallbackHelper.invokeSucceed(mWebViewBridge, callback,
JSONObjectUtils.getJSONObject(json, "result"));
else if (json.has("error"))
mCallbackHelper.invokeFail(mWebViewBridge, callback,
JSONObjectUtils.getJSONObject(json, "error"));
}
} catch (NotFoundRuntimeException exp) {
Log.w(TAG, exp.getMessage() + "", exp);
mWebViewBridge.returnToWeb(id, dest,
new Error(ErrorCode.NOT_FOUND_BRIDGE_ERROR.getCode(), exp.getMessage() + "", dest));
} catch (JSONConvertException exp) {
Log.e(TAG, exp.getMessage() + "", exp);
mWebViewBridge.returnToWeb(id, dest,
new Error(ErrorCode.JSON_PARSE_ERROR.getCode(), exp.getMessage() + "", dest));
} catch (InvocationTargetException exp) {
Log.e(TAG, exp.getMessage() + "", exp);
mWebViewBridge.returnToWeb(id, dest,
new Error(ErrorCode.INVOCATION_BRIDGE_ERROR.getCode(), exp.getMessage() + "", dest));
} catch (Exception exp) {
Log.e(TAG, exp.getMessage() + "", exp);
mWebViewBridge.returnToWeb(id, dest, new Error(ErrorCode.UNKNOWN_ERROR.getCode(), exp.getMessage()
+ "", dest));
}
}
});
}
private String decode(String data) {
try {
return URLDecoder.decode(data, "UTF-8");
} catch (UnsupportedEncodingException exp) {
Log.e(TAG, exp.getMessage() + "", exp);
}
return null;
}
private void invoke(String id, String dest, JSONObject json) throws InvocationTargetException,
IllegalArgumentException, IllegalAccessException, JSONConvertException {
BridgeMethodConfig config = mConfigSet.get(dest);
if (config == null)
throw new NotFoundRuntimeException("cannot find " + dest + " bridge method");
final Method method = config.getMethod();
final Class<?>[] argTypes = method.getParameterTypes();
Params params = null;
Callback<?> callback = null;
if (argTypes.length > 0 && ClassUtils.isImplement(argTypes[0], Params.class))
params = (Params) JSONConverter.toObject(json, argTypes[0]);
if (TextUtils.isEmpty(id))
callback = new DummyCallback(dest);
else
callback = new WebViewBridgeCallback(id, dest);
// Object receiver = config.getReceiver();
Object receiver = mReceiverMap.get(dest);
if (argTypes.length == 2)
method.invoke(receiver, params, callback);
else if (argTypes.length == 1) {
if (Callback.class.equals(argTypes[0]))
method.invoke(receiver, callback);
else {
method.invoke(receiver, params);
callback.succeed(mWebViewBridge, null);
}
} else
method.invoke(receiver);
}
protected void validateParamsVersion(JSONObject json) {
double version = Math.floor(FloatUtils.parse(JSONObjectUtils.getString(json, "bridge")));
if (version != WebViewBridge.COMPATIBLE_VERSION) {
throw new InvocationRuntimeException("Need WebViewBridge newer than "
+ (WebViewBridge.COMPATIBLE_VERSION + 1));
}
}
protected void logging(String event, String jsonText) {
String arg = jsonText == null ? "null" : jsonText;
Log.d(TAG, event + " from Web with " + arg);
}
/*
* private Method findMethod(String dest) { Method method =
* mConfig.getMethodMap().get(dest); if (method == null) throw new
* NotFoundRuntimeException("cannot find " + dest + " bridge method");
* return method; }
*/
public static class WebViewBridgeCallback implements Callback<Result> {
private String mId;
private String mDest;
public WebViewBridgeCallback(String id, String dest) {
mId = id;
mDest = dest;
}
public WebViewBridgeCallback(Parcel source) {
mId = source.readString();
mDest = source.readString();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(mId);
dest.writeString(mDest);
}
@Override
public void succeed(final WebViewBridge bridge, final Result result) {
bridge.returnToWeb(mId, mDest, result);
}
@Override
public void fail(final WebViewBridge bridge, final String code, final String msg) {
bridge.returnToWeb(mId, mDest, new Error(code, msg, mDest));
}
@Override
public int describeContents() {
return 0;
}
public static final Parcelable.Creator<WebViewBridgeCallback> CREATOR = new Parcelable.Creator<WebViewBridgeCallback>() {
@Override
public WebViewBridgeCallback createFromParcel(Parcel source) {
return new WebViewBridgeCallback(source);
}
@Override
public WebViewBridgeCallback[] newArray(int size) {
return new WebViewBridgeCallback[size];
}
};
}
public static class DummyCallback implements Callback<Result> {
private static final String TAG = "DummyCallback";
private String mDest;
public DummyCallback(String dest) {
mDest = dest;
}
public DummyCallback(Parcel source) {
mDest = source.readString();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(mDest);
}
@Override
public void succeed(WebViewBridge bridge, Result result) {
Log.w(TAG, "unnecessary callback is called with " + mDest);
}
@Override
public void fail(WebViewBridge bridge, String code, String msg) {
Log.w(TAG, "unnecessary callback is called with " + mDest);
}
@Override
public int describeContents() {
return 0;
}
public static final Parcelable.Creator<DummyCallback> CREATOR = new Parcelable.Creator<DummyCallback>() {
@Override
public DummyCallback createFromParcel(Parcel source) {
return new DummyCallback(source);
}
@Override
public DummyCallback[] newArray(int size) {
return new DummyCallback[size];
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.metadata.extractors;
import org.apache.oodt.cas.metadata.Metadata;
import org.apache.oodt.cas.metadata.exceptions.MetExtractionException;
import org.apache.oodt.cas.metadata.exceptions.MetExtractorConfigReaderException;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Assigns a ProductType based on a filename pattern, while simultaneously assigning values to metadata elements
* embedded in the filename pattern.
* <p/>
* Suppose I have files in the staging area ready to be ingested. These files usually have information encoded into the
* filename in order to distinguish the contents of one file from other files. For example book-1234567890.txt might be
* the contents of a book with ISBN 1234567890. Or page-1234567890-12.txt might be the text on page 12 of book with ISBN
* 1234567890.
* <p/>
* It would be useful to generate metadata from the information encoded in the filename (think: filename => metadata).
* The {@link ProdTypePatternMetExtractor} allows this in a flexible manner using regular expressions. Let's take a look
* at the config file for this met extractor.
* <p/>
* <pre>
* product-type-patterns.xml
*
* {@code
* <config>
* <!-- <element> MUST be defined before <product-type> so their patterns can be resolved -->
* <!-- name MUST be an element defined in elements.xml (also only upper and lower case alpha chars) -->
* <!-- regexp MUST be valid input to java.util.regex.Pattern.compile() -->
* <element name="ISBN" regexp="[0-9]{10}"/>
* <element name="Page" regexp="[0-9]*"/>
*
* <!-- name MUST be a ProductType name defined in product-types.xml -->
* <!-- metadata elements inside brackets MUST be mapped to the ProductType,
* as defined in product-type-element-map.xml -->
* <product-type name="Book" template="book-[ISBN].txt"/>
* <product-type name="BookPage" template="page-[ISBN]-[Page].txt"/>
* </config>
* }
* </pre>
* <p/>
* <p/>
* This file defines a regular expression for the "ISBN" metadata element, in this case, a 10-digit number. Also, the
* "Page" metadata element is defined as a sequence of 0 or more digits.
* <p/>
* Next, the file defines a filename pattern for the "Book" product type. The pattern is compiled into a regular
* expression, substituting the previously defined regexes as capture groups. For example, "book-[ISBN].txt" compiles to
* "book-([0-9]{10}).txt", and the ISBN met element is assigned to capture group 1. When the filename matches this
* pattern, 2 metadata assignments occur: (1) the ISBN met element is set to the matched regex group, and (2) the
* ProductType met element is set to "Book".
* <p/>
* Similarly, the second pattern sets ISBN, Page, and ProductType for files matching "page-([0-9]{10})-([0-9]*).txt".
* <p/>
* This achieves several things: <ol> <li>assigning met elements based on regular expressions</li> <li>assigning product
* type based on easy-to-understand pattern with met elements clearly indicated</li> <li>reuse of met element regular
* expressions</li> </ol>
* <p/>
* Differences from {@link FilenameTokenMetExtractor}:
* <ol>
* <li>Allows dynamic length metadata (does not rely on offset and length of metadata)</li>
* <li>Assigns ProductType</li>
* </ol>
* <p/>
* Differences from {@link org.apache.oodt.cas.crawl.AutoDetectProductCrawler}:
* <ol>
* <li>Does not require definition of custom MIME type and MIME-type regex. Really, all you want is to assign a
* ProductType, rather than indirectly assigning a custom MIME type that maps to a Product Type.</li>
* </ol>
* <p/>
* Differences from {@link org.apache.oodt.cas.filemgr.metadata.extractors.examples.FilenameRegexMetExtractor}:
* <ol>
* <li>Assigns ProductType. FilenameRegexMetExtractor runs after ProductType is already determined.</li>
* <li>Runs on the client-side (crawler). FilenameRegexMetExtractor runs on the server-side (filemgr).</li>
* <li>Different patterns for different ProductTypes. FilenameRegexMetExtractor config applies the same pattern to
* all files.</li>
* </ol>
* <p/>
* Prerequisites:
* <ol>
* <li>{@code <element>} tag occurs before {@code <product-type>} tag</li>
* <li>{@code <element> @name} attribute <strong>MUST</strong> be defined in FileManager policy elements.xml</li>
* <li>{@code <element> @regexp} attribute <strong>MUST</strong> be valid input to
* {@link java.util.regex.Pattern#compile(String)}</li>
* <li>{@code <product-type> @name} attribute <strong>MUST</strong> be a ProductType name (not ID) defined in
* product-types.xml</li>
* <li>met elements used in {@code <product-type> @template} attribute <strong>MUST</strong> be
* mapped to the ProductType, as defined in product-type-element-map.xml</li>
* </ol>
* <p/>
* <strong>Words of Caution</strong>
* <ul>
* <li><strong>Does not support nested met elements.</strong></li>
* <li><strong>Each pattern should map to one product type.</strong> Watch out for similar patterns. Don't do this:
* <pre>
* {@code
* <element name="Page" regexp="[0-9]*"/>
* <element name="Chapter" regexp="[0-9]*"/>
*
* <product-type name="Page" template="data-[Page].txt"/>
* <product-type name="Chapter" template="data-[Chapter].txt"/>
* }</pre>
* Instead, encode the product type information into the filename, for example:
* <pre>
* {@code
* <element name="Page" regexp="[0-9]*"/>
* <element name="Chapter" regexp="[0-9]*"/>
*
* <product-type name="Page" template="page-[Page].txt"/>
* <product-type name="Chapter" template="chapter-[Chapter].txt"/>
* }</pre>
* </li>
* </ul>
*
* @author rickdn (Ricky Nguyen)
*/
public class ProdTypePatternMetExtractor extends CmdLineMetExtractor {
static class ConfigReader extends AbstractSAXConfigReader {
private static final String ELEMENT_TAG = "element";
private static final String ELEMENT_NAME_ATTR = "name";
private static final String ELEMENT_REGEXP_ATTR = "regexp";
private static final String PRODUCT_TYPE_TAG = "product-type";
private static final String PRODUCT_TYPE_NAME_ATTR = "name";
private static final String PRODUCT_TYPE_TEMPLATE_ATTR = "template";
private static final Pattern MET_TOKEN = Pattern.compile("\\[([A-Za-z]*)\\]");
/*
* full file name reg exp => prod type
*/
private final Map<Pattern, String> prodTypePatterns = new HashMap<Pattern, String>();
/*
* prod type => list of met elements in the file name
*/
private final Map<String, List<String>> prodTypeElements = new HashMap<String, List<String>>();
/*
* met elements => element reg exp patterns
*/
private final Map<String, Pattern> elementPatterns = new HashMap<String, Pattern>();
Map<Pattern, String> getProdTypePatterns() {
return prodTypePatterns;
}
Map<String, List<String>> getProdTypeElements() {
return prodTypeElements;
}
void addProductType(String id, String template) {
template = template.replaceAll("\\.", "\\\\.");
Matcher m = MET_TOKEN.matcher(template);
List<String> elemList = prodTypeElements.get(id);
if (elemList == null) {
elemList = new ArrayList<String>();
prodTypeElements.put(id, elemList);
}
String newTemplate = template;
while (m.find()) {
String elem = m.group(1);
String regex = elementPatterns.get(elem).toString();
newTemplate = newTemplate.replaceAll("\\[" + elem + "\\]", "(" + regex + ")");
elemList.add(elem);
}
prodTypePatterns.put(Pattern.compile(newTemplate), id);
}
void addElement(String name, String regexp) {
elementPatterns.put(name, Pattern.compile(regexp));
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
if (qName.equals(ELEMENT_TAG)) {
String name = attributes.getValue(ELEMENT_NAME_ATTR);
String regexp = attributes.getValue(ELEMENT_REGEXP_ATTR);
addElement(name, regexp);
} else if (qName.equals(PRODUCT_TYPE_TAG)) {
String id = attributes.getValue(PRODUCT_TYPE_NAME_ATTR);
String template = attributes.getValue(PRODUCT_TYPE_TEMPLATE_ATTR);
addProductType(id, template);
}
}
@Override
public AbstractSAXConfigReader parseConfigFile(File configFile) throws MetExtractorConfigReaderException {
// reset internal state whenever parsing a new config file
prodTypePatterns.clear();
prodTypeElements.clear();
elementPatterns.clear();
return super.parseConfigFile(configFile);
}
}
private static final String PRODUCT_TYPE_MET_KEY = "ProductType";
public ProdTypePatternMetExtractor() {
super(new ConfigReader());
}
@Override
protected Metadata extrMetadata(File file) throws MetExtractionException {
Metadata met = new Metadata();
ConfigReader mConfig = (ConfigReader) config;
for (Pattern p : mConfig.getProdTypePatterns().keySet()) {
Matcher m = p.matcher(file.getName());
if (m.matches()) {
String prodType = mConfig.getProdTypePatterns().get(p);
met.addMetadata(PRODUCT_TYPE_MET_KEY, prodType);
List<String> elemList = mConfig.getProdTypeElements().get(prodType);
for (int i = 0; i < m.groupCount(); i++) {
met.addMetadata(elemList.get(i), m.group(i + 1));
}
}
}
return met;
}
public static void main(String[] args) throws Exception {
processMain(args, new ProdTypePatternMetExtractor());
}
}
| |
package mdb.com.data.api.entity;
import android.content.ContentValues;
import android.database.Cursor;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import mdb.com.data.db.MoviesContract;
import static mdb.com.data.db.MoviesContract.MovieEntry;
import static mdb.com.data.db.MoviesContract.getColumnDouble;
import static mdb.com.data.db.MoviesContract.getColumnInt;
import static mdb.com.data.db.MoviesContract.getColumnString;
public class MovieEntity implements Parcelable {
@SerializedName("vote_count")
@Expose
public Integer voteCount;
@SerializedName("id")
@Expose
public Integer id;
@SerializedName("video")
@Expose
public Boolean video;
@SerializedName("vote_average")
@Expose
public Double voteAverage;
@SerializedName("title")
@Expose
public String title;
@SerializedName("popularity")
@Expose
public Double popularity;
@SerializedName("poster_path")
@Expose
public String posterPath;
@SerializedName("original_language")
@Expose
public String originalLanguage;
@SerializedName("original_title")
@Expose
public String originalTitle;
@SerializedName("backdrop_path")
@Expose
public String backdropPath;
@SerializedName("adult")
@Expose
public Boolean adult;
@SerializedName("overview")
@Expose
public String overview;
@SerializedName("release_date")
@Expose
public String releaseDate;
public MovieEntity() {
}
public MovieEntity(Cursor cursor) {
this.id = getColumnInt(cursor, MovieEntry._ID);
this.title = getColumnString(cursor, MovieEntry.TITLE);
this.posterPath = getColumnString(cursor, MovieEntry.POSTER_PATH);
this.adult = getColumnInt(cursor, MovieEntry.IS_ADULT) == 1;
this.backdropPath = getColumnString(cursor, MovieEntry.BACKDROP_PATH);
this.originalLanguage = getColumnString(cursor, MovieEntry.ORIGINAL_LANGUAGE);
this.overview = getColumnString(cursor, MovieEntry.OVERVIEW);
this.popularity = getColumnDouble(cursor, MovieEntry.POPULARITY);
this.originalTitle = getColumnString(cursor, MovieEntry.ORIGINAL_TITLE);
this.releaseDate = getColumnString(cursor, MovieEntry.RELEASE_DATE);
this.video = getColumnInt(cursor, MovieEntry.HAS_VIDEO) == 1;
this.voteAverage = getColumnDouble(cursor, MovieEntry.VOTE_AVERAGE);
this.voteCount = getColumnInt(cursor, MovieEntry.VOTE_COUNT);
}
protected MovieEntity(Parcel in) {
if (in.readByte() == 0) {
voteCount = null;
} else {
voteCount = in.readInt();
}
if (in.readByte() == 0) {
id = null;
} else {
id = in.readInt();
}
byte tmpVideo = in.readByte();
video = tmpVideo == 0 ? null : tmpVideo == 1;
if (in.readByte() == 0) {
voteAverage = null;
} else {
voteAverage = in.readDouble();
}
title = in.readString();
if (in.readByte() == 0) {
popularity = null;
} else {
popularity = in.readDouble();
}
posterPath = in.readString();
originalLanguage = in.readString();
originalTitle = in.readString();
backdropPath = in.readString();
byte tmpAdult = in.readByte();
adult = tmpAdult == 0 ? null : tmpAdult == 1;
overview = in.readString();
releaseDate = in.readString();
byte tmpIsFavorite = in.readByte();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
if (voteCount == null) {
dest.writeByte((byte) 0);
} else {
dest.writeByte((byte) 1);
dest.writeInt(voteCount);
}
if (id == null) {
dest.writeByte((byte) 0);
} else {
dest.writeByte((byte) 1);
dest.writeInt(id);
}
dest.writeByte((byte) (video == null ? 0 : video ? 1 : 2));
if (voteAverage == null) {
dest.writeByte((byte) 0);
} else {
dest.writeByte((byte) 1);
dest.writeDouble(voteAverage);
}
dest.writeString(title);
if (popularity == null) {
dest.writeByte((byte) 0);
} else {
dest.writeByte((byte) 1);
dest.writeDouble(popularity);
}
dest.writeString(posterPath);
dest.writeString(originalLanguage);
dest.writeString(originalTitle);
dest.writeString(backdropPath);
dest.writeByte((byte) (adult == null ? 0 : adult ? 1 : 2));
dest.writeString(overview);
dest.writeString(releaseDate);
}
@Override
public int describeContents() {
return 0;
}
public static final Creator<MovieEntity> CREATOR = new Creator<MovieEntity>() {
@Override
public MovieEntity createFromParcel(Parcel in) {
return new MovieEntity(in);
}
@Override
public MovieEntity[] newArray(int size) {
return new MovieEntity[size];
}
};
public ContentValues convertToContentValues() {
ContentValues movieContent = new ContentValues();
movieContent.put(MovieEntry._ID, id);
movieContent.put(MovieEntry.TITLE, title);
movieContent.put(MovieEntry.POSTER_PATH, posterPath);
movieContent.put(MovieEntry.IS_ADULT, adult);
movieContent.put(MovieEntry.BACKDROP_PATH, backdropPath);
movieContent.put(MovieEntry.ORIGINAL_LANGUAGE, originalLanguage);
movieContent.put(MovieEntry.OVERVIEW, overview);
movieContent.put(MovieEntry.POPULARITY, popularity);
movieContent.put(MovieEntry.ORIGINAL_TITLE, originalTitle);
movieContent.put(MovieEntry.RELEASE_DATE, releaseDate);
movieContent.put(MovieEntry.HAS_VIDEO, video);
movieContent.put(MovieEntry.VOTE_AVERAGE, voteAverage);
movieContent.put(MovieEntry.VOTE_COUNT, voteCount);
return movieContent;
}
public static MovieEntity fromCursor(Cursor cursor) {
MovieEntity movie = new MovieEntity();
movie.setId(getColumnInt(cursor, MovieEntry._ID));
movie.setTitle(getColumnString(cursor, MovieEntry.TITLE));
movie.setPosterPath(getColumnString(cursor, MovieEntry.POSTER_PATH));
movie.setAdult(getColumnInt(cursor, MovieEntry.IS_ADULT) == 1);
movie.setBackdropPath(getColumnString(cursor, MovieEntry.BACKDROP_PATH));
movie.setOriginalLanguage(getColumnString(cursor, MovieEntry.ORIGINAL_LANGUAGE));
movie.setOverview(getColumnString(cursor, MovieEntry.OVERVIEW));
movie.setPopularity(getColumnDouble(cursor, MovieEntry.POPULARITY));
movie.setOriginalTitle(getColumnString(cursor, MovieEntry.ORIGINAL_TITLE));
movie.setReleaseDate(getColumnString(cursor, MovieEntry.RELEASE_DATE));
movie.setVideo(getColumnInt(cursor, MovieEntry.HAS_VIDEO) == 1);
movie.setVoteAverage(getColumnDouble(cursor, MovieEntry.VOTE_AVERAGE));
movie.setVoteCount(getColumnInt(cursor, MovieEntry.VOTE_COUNT));
return movie;
}
public Integer getVoteCount() {
return voteCount;
}
public void setVoteCount(Integer voteCount) {
this.voteCount = voteCount;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Boolean getVideo() {
return video;
}
public void setVideo(Boolean video) {
this.video = video;
}
public Double getVoteAverage() {
return voteAverage;
}
public void setVoteAverage(Double voteAverage) {
this.voteAverage = voteAverage;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Double getPopularity() {
return popularity;
}
public void setPopularity(Double popularity) {
this.popularity = popularity;
}
public String getPosterPath() {
return posterPath;
}
public void setPosterPath(String posterPath) {
this.posterPath = posterPath;
}
public String getOriginalLanguage() {
return originalLanguage;
}
public void setOriginalLanguage(String originalLanguage) {
this.originalLanguage = originalLanguage;
}
public String getOriginalTitle() {
return originalTitle;
}
public void setOriginalTitle(String originalTitle) {
this.originalTitle = originalTitle;
}
public String getBackdropPath() {
return backdropPath;
}
public void setBackdropPath(String backdropPath) {
this.backdropPath = backdropPath;
}
public Boolean getAdult() {
return adult;
}
public void setAdult(Boolean adult) {
this.adult = adult;
}
public String getOverview() {
return overview;
}
public void setOverview(String overview) {
this.overview = overview;
}
public String getReleaseDate() {
return releaseDate;
}
public void setReleaseDate(String releaseDate) {
this.releaseDate = releaseDate;
}
public static Creator<MovieEntity> getCREATOR() {
return CREATOR;
}
}
| |
package edu.utah.ece.async.sboldesigner.sbol;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.HashSet;
import java.util.concurrent.ThreadLocalRandom;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.filechooser.FileNameExtensionFilter;
import org.apache.commons.io.FilenameUtils;
import org.sbolstandard.core2.AccessType;
import org.sbolstandard.core2.Collection;
import org.sbolstandard.core2.CombinatorialDerivation;
import org.sbolstandard.core2.Component;
import org.sbolstandard.core2.ComponentDefinition;
import org.sbolstandard.core2.OperatorType;
import org.sbolstandard.core2.RestrictionType;
import org.sbolstandard.core2.SBOLDocument;
import org.sbolstandard.core2.SBOLValidationException;
import org.sbolstandard.core2.SequenceAnnotation;
import org.sbolstandard.core2.SequenceConstraint;
import org.sbolstandard.core2.StrategyType;
import org.sbolstandard.core2.TopLevel;
import org.sbolstandard.core2.VariableComponent;
import edu.utah.ece.async.sboldesigner.sbol.editor.SBOLEditorPreferences;
import edu.utah.ece.async.sboldesigner.sbol.editor.dialog.CombinatorialDerivationInputDialog;
public class CombinatorialExpansionUtil {
public static SBOLDocument createCombinatorialDesign(java.awt.Component panel, SBOLDocument doc) throws SBOLValidationException, FileNotFoundException {
CombinatorialDerivation derivation = CombinatorialDerivationInputDialog.pickCombinatorialDerivation(panel, doc, null);
if (derivation == null) {
JOptionPane.showMessageDialog(null, "There are no combinatorial designs");
return null;
}
HashSet<ComponentDefinition> enumeration = enumerate(doc, derivation);
if (enumeration.isEmpty()) {
JOptionPane.showMessageDialog(null, "There are no variants to enumerate");
return null;
}
if (!derivation.isSetStrategy()) {
int choice = JOptionPane.showOptionDialog(null,
"The strategy property is not set. Would you like to enumerate or sample?",
"Combinatorial Design Strategy", JOptionPane.DEFAULT_OPTION, JOptionPane.QUESTION_MESSAGE, null,
StrategyType.values(), StrategyType.values()[0]);
if (choice == JOptionPane.CLOSED_OPTION) {
return null;
}
derivation.setStrategy(StrategyType.values()[choice]);
}
int tocsv = JOptionPane.showConfirmDialog(null,
"Would you like to export the enumerated design to a .csv file?",
"Export to CSV", JOptionPane.YES_NO_OPTION);
if (tocsv == JOptionPane.YES_OPTION) {
JFileChooser chooser = new JFileChooser();
chooser.setDialogTitle("choosertitle");
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
chooser.setAcceptAllFileFilterUsed(false);
int result = chooser.showSaveDialog(null);
if (result == chooser.APPROVE_OPTION)
{
File f = chooser.getSelectedFile();
if (FilenameUtils.getExtension(f.getName()).equalsIgnoreCase("csv")) {
// filename is OK as-is
} else {
f = new File(f.toString() + ".csv"); // append .xml if "foo.jpg.xml" is OK
}
PrintWriter writer = new PrintWriter(f);
String s;
for(ComponentDefinition cd: enumeration)
{
s = "";
for(Component comp: cd.getSortedComponents())
{
s += comp.getDefinition().getDisplayId() + ", ";
}
writer.println(s);
}
writer.close();
}
}
SBOLDocument generated = new SBOLDocument();
generated.setDefaultURIprefix(SBOLEditorPreferences.INSTANCE.getUserInfo().getURI().toString());
if (derivation.getStrategy() == StrategyType.SAMPLE) {
ComponentDefinition[] a = enumeration.toArray(new ComponentDefinition[0]);
ComponentDefinition sample = a[ThreadLocalRandom.current().nextInt(a.length)];
ProvenanceUtil.createProvenance(doc, sample, derivation);
doc.createRecursiveCopy(generated, sample);
} else if (derivation.getStrategy() == StrategyType.ENUMERATE) {
ProvenanceUtil.createProvenance(doc, enumeration.iterator().next(), derivation);
for (ComponentDefinition CD : enumeration) {
doc.createRecursiveCopy(generated, CD);
}
} else {
throw new IllegalArgumentException();
}
return generated;
}
private static ComponentDefinition createTemplateCopy(SBOLDocument doc, CombinatorialDerivation derivation)
throws SBOLValidationException {
ComponentDefinition template = derivation.getTemplate();
String uniqueId = SBOLUtils.getUniqueDisplayId(null, null, template.getDisplayId() + "_GeneratedInstance",
template.getVersion(), "CD", doc);
//ComponentDefinition copy = (ComponentDefinition) doc.createCopy(template, uniqueId, template.getVersion());
ComponentDefinition copy = doc.createComponentDefinition(uniqueId, template.getVersion(), template.getTypes());
copy.setRoles(template.getRoles());
Component prev = null;
Component curr;
for(Component c : template.getSortedComponents())
{
curr = copy.createComponent(c.getDisplayId(), c.getAccess(), c.getDefinitionURI());
if(prev != null)
{
uniqueId = SBOLUtils.getUniqueDisplayId(copy, null,
copy.getDisplayId() + "_SequenceConstraint", null, "SequenceConstraint", null);
copy.createSequenceConstraint(uniqueId, RestrictionType.PRECEDES, prev.getIdentity(),
curr.getIdentity());
}
prev = curr;
}
copy.addWasDerivedFrom(template.getIdentity());
copy.addWasDerivedFrom(derivation.getIdentity());
for (Component component : copy.getComponents()) {
component.addWasDerivedFrom(template.getComponent(component.getDisplayId()).getIdentity());
}
//copy.clearSequenceAnnotations();
return copy;
}
private static HashSet<ComponentDefinition> enumerate(SBOLDocument doc, CombinatorialDerivation derivation)
throws SBOLValidationException {
HashSet<ComponentDefinition> parents = new HashSet<>();
parents.add(createTemplateCopy(doc, derivation));
for (VariableComponent vc : derivation.getVariableComponents()) {
HashSet<ComponentDefinition> newParents = new HashSet<>();
for (ComponentDefinition parent : parents) {
for (HashSet<ComponentDefinition> children : group(collectVariants(doc, vc), vc.getOperator())) {
// create copy of parent
String uniqueId = SBOLUtils.getUniqueDisplayId(null, null, parent.getDisplayId(),
parent.getVersion(), "CD", doc);
ComponentDefinition newParent = (ComponentDefinition) doc.createCopy(parent, uniqueId, "1");
// add children
ComponentDefinition template = derivation.getTemplate();
addChildren(template, template.getComponent(vc.getVariableURI()), newParent, children);
// add to newParents
newParents.add(newParent);
}
}
parents = newParents;
}
return parents;
}
private static void addChildren(ComponentDefinition originalTemplate, Component originalComponent,
ComponentDefinition newParent, HashSet<ComponentDefinition> children) throws SBOLValidationException {
Component newComponent = newParent.getComponent(originalComponent.getDisplayId());
newComponent.addWasDerivedFrom(originalComponent.getIdentity());
if (children.isEmpty()) {
removeConstraintReferences(newParent, newComponent);
for (SequenceAnnotation sa : newParent.getSequenceAnnotations()) {
if (sa.isSetComponent() && sa.getComponentURI().equals(newComponent.getIdentity())) {
newParent.removeSequenceAnnotation(sa);
}
}
newParent.removeComponent(newComponent);
return;
}
boolean first = true;
for (ComponentDefinition child : children) {
if (first) {
// take over the definition of newParent's version of the
// original component
newComponent.setDefinition(child.getIdentity());
first = false;
} else {
// create a new component
String uniqueId = SBOLUtils.getUniqueDisplayId(newParent, null, child.getDisplayId() + "_Component",
"1", "Component", null);
Component link = newParent.createComponent(uniqueId, AccessType.PUBLIC, child.getIdentity());
link.addWasDerivedFrom(originalComponent.getIdentity());
// create a new 'prev precedes link' constraint
Component oldPrev = getBeforeComponent(originalTemplate, originalComponent);
if (oldPrev != null) {
Component newPrev = newParent.getComponent(oldPrev.getDisplayId());
if (newPrev != null) {
uniqueId = SBOLUtils.getUniqueDisplayId(newParent, null,
newParent.getDisplayId() + "_SequenceConstraint", null, "SequenceConstraint", null);
newParent.createSequenceConstraint(uniqueId, RestrictionType.PRECEDES, newPrev.getIdentity(),
link.getIdentity());
}
}
// create a new 'link precedes next' constraint
Component oldNext = getAfterComponent(originalTemplate, originalComponent);
if (oldNext != null) {
Component newNext = newParent.getComponent(oldNext.getDisplayId());
if (newNext != null) {
uniqueId = SBOLUtils.getUniqueDisplayId(newParent, null,
newParent.getDisplayId() + "_SequenceConstraint", null, "SequenceConstraint", null);
newParent.createSequenceConstraint(uniqueId, RestrictionType.PRECEDES, link.getIdentity(),
newNext.getIdentity());
}
}
}
}
}
private static void removeConstraintReferences(ComponentDefinition newParent, Component newComponent) throws SBOLValidationException {
Component subject = null;
Component object = null;
for (SequenceConstraint sc : newParent.getSequenceConstraints()) {
if (sc.getSubject().equals(newComponent)) {
object = sc.getObject();
//If we know what the new subject of this sequence constraint should be, modify it
if(subject != null) {
sc.setSubject(subject.getIdentity());
object = null;
subject = null;
}else {//else remove it
newParent.removeSequenceConstraint(sc);
}
}
if(sc.getObject().equals(newComponent)){
subject = sc.getSubject();
//If we know what the new object of this sequence constraint should be, modify it
if(object != null) {
sc.setObject(object.getIdentity());
object = null;
subject = null;
}else {//else remove it
newParent.removeSequenceConstraint(sc);
}
}
}
}
private static Component getBeforeComponent(ComponentDefinition template, Component component) {
for (SequenceConstraint sc : template.getSequenceConstraints()) {
if (sc.getRestriction().equals(RestrictionType.PRECEDES) && sc.getObject().equals(component)) {
return sc.getSubject();
}
}
return null;
}
private static Component getAfterComponent(ComponentDefinition template, Component component) {
for (SequenceConstraint sc : template.getSequenceConstraints()) {
if (sc.getRestriction().equals(RestrictionType.PRECEDES) && sc.getSubject().equals(component)) {
return sc.getObject();
}
}
return null;
}
private static HashSet<HashSet<ComponentDefinition>> group(HashSet<ComponentDefinition> variants,
OperatorType operator) {
HashSet<HashSet<ComponentDefinition>> groups = new HashSet<>();
for (ComponentDefinition CD : variants) {
HashSet<ComponentDefinition> group = new HashSet<>();
group.add(CD);
groups.add(group);
}
if (operator == OperatorType.ONE) {
return groups;
}
if (operator == OperatorType.ZEROORONE) {
groups.add(new HashSet<>());
return groups;
}
groups.clear();
generateCombinations(groups, variants.toArray(new ComponentDefinition[0]), 0, new HashSet<>());
if (operator == OperatorType.ONEORMORE) {
return groups;
}
if (operator == OperatorType.ZEROORMORE) {
groups.add(new HashSet<>());
return groups;
}
throw new IllegalArgumentException(operator.toString() + " operator not supported");
}
/**
* Generates all combinations except the empty set.
*/
private static void generateCombinations(HashSet<HashSet<ComponentDefinition>> groups,
ComponentDefinition[] variants, int i, HashSet<ComponentDefinition> set) {
if (i == variants.length) {
if (!set.isEmpty()) {
groups.add(set);
}
return;
}
HashSet<ComponentDefinition> no = new HashSet<>(set);
generateCombinations(groups, variants, i + 1, no);
HashSet<ComponentDefinition> yes = new HashSet<>(set);
yes.add(variants[i]);
generateCombinations(groups, variants, i + 1, yes);
}
private static HashSet<ComponentDefinition> collectVariants(SBOLDocument doc, VariableComponent vc)
throws SBOLValidationException {
HashSet<ComponentDefinition> variants = new HashSet<>();
//Recursively collect variants from possible nested VariantDerivations
// for(CombinatorialDerivation cd : vc.getVariantDerivations())
// {
// for (VariableComponent v : cd.getVariableComponents()) {
// variants.addAll(collectVariants(doc, v));
//
// }
// }
// add all variants
variants.addAll(vc.getVariants());
// add all variants from variantCollections
for (Collection c : vc.getVariantCollections()) {
for (TopLevel tl : c.getMembers()) {
if (tl instanceof ComponentDefinition) {
variants.add((ComponentDefinition) tl);
}
}
}
// add all variants from variantDerivations
for (CombinatorialDerivation derivation : vc.getVariantDerivations()) {
variants.addAll(enumerate(doc, derivation));
}
return variants;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import org.apache.lucene.util.Version;
import org.apache.solr.common.SolrException;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.SystemIdResolver;
import org.apache.solr.common.util.XMLErrorLogger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.apache.commons.io.IOUtils;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
/**
*
*/
public class Config {
public static final Logger log = LoggerFactory.getLogger(Config.class);
private static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
static final XPathFactory xpathFactory = XPathFactory.newInstance();
private final Document doc;
private final String prefix;
private final String name;
private final SolrResourceLoader loader;
/**
* Builds a config from a resource name with no xpath prefix.
*/
public Config(SolrResourceLoader loader, String name) throws ParserConfigurationException, IOException, SAXException
{
this( loader, name, null, null );
}
/**
* For the transition from using solr.xml to solr.properties, see SOLR-4196. Remove
* for 5.0, thus it's already deprecated
* @param loader - Solr resource loader
* @param cfg - SolrConfig, for backwards compatability with solr.xml layer.
* @throws TransformerException if the XML file is mal-formed
*/
@Deprecated
public Config(SolrResourceLoader loader, Config cfg) throws TransformerException {
this(loader, null, ConfigSolrXml.copyDoc(cfg.getDocument()));
}
public Config(SolrResourceLoader loader, String name, InputSource is, String prefix) throws ParserConfigurationException, IOException, SAXException
{
this(loader, name, is, prefix, true);
}
/**
* Builds a config:
* <p>
* Note that the 'name' parameter is used to obtain a valid input stream if no valid one is provided through 'is'.
* If no valid stream is provided, a valid SolrResourceLoader instance should be provided through 'loader' so
* the resource can be opened (@see SolrResourceLoader#openResource); if no SolrResourceLoader instance is provided, a default one
* will be created.
* </p>
* <p>
* Consider passing a non-null 'name' parameter in all use-cases since it is used for logging & exception reporting.
* </p>
* @param loader the resource loader used to obtain an input stream if 'is' is null
* @param name the resource name used if the input stream 'is' is null
* @param is the resource as a SAX InputSource
* @param prefix an optional prefix that will be preprended to all non-absolute xpath expressions
*/
public Config(SolrResourceLoader loader, String name, InputSource is, String prefix, boolean subProps) throws ParserConfigurationException, IOException, SAXException
{
if( loader == null ) {
loader = new SolrResourceLoader( null );
}
this.loader = loader;
this.name = name;
this.prefix = (prefix != null && !prefix.endsWith("/"))? prefix + '/' : prefix;
try {
javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
if (is == null) {
is = new InputSource(loader.openConfig(name));
is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(name));
}
// only enable xinclude, if a SystemId is available
if (is.getSystemId() != null) {
try {
dbf.setXIncludeAware(true);
dbf.setNamespaceAware(true);
} catch(UnsupportedOperationException e) {
log.warn(name + " XML parser doesn't support XInclude option");
}
}
final DocumentBuilder db = dbf.newDocumentBuilder();
db.setEntityResolver(new SystemIdResolver(loader));
db.setErrorHandler(xmllog);
try {
doc = db.parse(is);
} finally {
// some XML parsers are broken and don't close the byte stream (but they should according to spec)
IOUtils.closeQuietly(is.getByteStream());
}
if (subProps) {
DOMUtil.substituteProperties(doc, loader.getCoreProperties());
}
} catch (ParserConfigurationException e) {
SolrException.log(log, "Exception during parsing file: " + name, e);
throw e;
} catch (SAXException e) {
SolrException.log(log, "Exception during parsing file: " + name, e);
throw e;
} catch( SolrException e ){
SolrException.log(log,"Error in "+name,e);
throw e;
}
}
public Config(SolrResourceLoader loader, String name, Document doc) {
this.prefix = null;
this.doc = doc;
this.name = name;
this.loader = loader;
}
/**
* @since solr 1.3
*/
public SolrResourceLoader getResourceLoader()
{
return loader;
}
/**
* @since solr 1.3
*/
public String getResourceName() {
return name;
}
public String getName() {
return name;
}
public Document getDocument() {
return doc;
}
public XPath getXPath() {
return xpathFactory.newXPath();
}
private String normalize(String path) {
return (prefix==null || path.startsWith("/")) ? path : prefix+path;
}
public void substituteProperties() {
DOMUtil.substituteProperties(doc, loader.getCoreProperties());
}
public Object evaluate(String path, QName type) {
XPath xpath = xpathFactory.newXPath();
try {
String xstr=normalize(path);
// TODO: instead of prepending /prefix/, we could do the search rooted at /prefix...
Object o = xpath.evaluate(xstr, doc, type);
return o;
} catch (XPathExpressionException e) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + path +" for " + name,e);
}
}
public Node getNode(String path, boolean errIfMissing) {
XPath xpath = xpathFactory.newXPath();
Node nd = null;
String xstr = normalize(path);
try {
nd = (Node)xpath.evaluate(xstr, doc, XPathConstants.NODE);
if (nd==null) {
if (errIfMissing) {
throw new RuntimeException(name + " missing "+path);
} else {
log.debug(name + " missing optional " + path);
return null;
}
}
log.trace(name + ":" + path + "=" + nd);
return nd;
} catch (XPathExpressionException e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr + " for " + name,e);
} catch (SolrException e) {
throw(e);
} catch (Throwable e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr+ " for " + name,e);
}
}
public NodeList getNodeList(String path, boolean errIfMissing) {
XPath xpath = xpathFactory.newXPath();
String xstr = normalize(path);
try {
NodeList nodeList = (NodeList)xpath.evaluate(xstr, doc, XPathConstants.NODESET);
if (null == nodeList) {
if (errIfMissing) {
throw new RuntimeException(name + " missing "+path);
} else {
log.debug(name + " missing optional " + path);
return null;
}
}
log.trace(name + ":" + path + "=" + nodeList);
return nodeList;
} catch (XPathExpressionException e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr + " for " + name,e);
} catch (SolrException e) {
throw(e);
} catch (Throwable e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr+ " for " + name,e);
}
}
/**
* Returns the set of attributes on the given element that are not among the given knownAttributes,
* or null if all attributes are known.
*/
public Set<String> getUnknownAttributes(Element element, String... knownAttributes) {
Set<String> knownAttributeSet = new HashSet<String>(Arrays.asList(knownAttributes));
Set<String> unknownAttributeSet = null;
NamedNodeMap attributes = element.getAttributes();
for (int i = 0 ; i < attributes.getLength() ; ++i) {
final String attributeName = attributes.item(i).getNodeName();
if ( ! knownAttributeSet.contains(attributeName)) {
if (null == unknownAttributeSet) {
unknownAttributeSet = new HashSet<String>();
}
unknownAttributeSet.add(attributeName);
}
}
return unknownAttributeSet;
}
/**
* Logs an error and throws an exception if any of the element(s) at the given elementXpath
* contains an attribute name that is not among knownAttributes.
*/
public void complainAboutUnknownAttributes(String elementXpath, String... knownAttributes) {
SortedMap<String,SortedSet<String>> problems = new TreeMap<String,SortedSet<String>>();
NodeList nodeList = getNodeList(elementXpath, false);
for (int i = 0 ; i < nodeList.getLength() ; ++i) {
Element element = (Element)nodeList.item(i);
Set<String> unknownAttributes = getUnknownAttributes(element, knownAttributes);
if (null != unknownAttributes) {
String elementName = element.getNodeName();
SortedSet<String> allUnknownAttributes = problems.get(elementName);
if (null == allUnknownAttributes) {
allUnknownAttributes = new TreeSet<String>();
problems.put(elementName, allUnknownAttributes);
}
allUnknownAttributes.addAll(unknownAttributes);
}
}
if (problems.size() > 0) {
StringBuilder message = new StringBuilder();
for (Map.Entry<String,SortedSet<String>> entry : problems.entrySet()) {
if (message.length() > 0) {
message.append(", ");
}
message.append('<');
message.append(entry.getKey());
for (String attributeName : entry.getValue()) {
message.append(' ');
message.append(attributeName);
message.append("=\"...\"");
}
message.append('>');
}
message.insert(0, "Unknown attribute(s) on element(s): ");
String msg = message.toString();
SolrException.log(log, msg);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
}
}
public String getVal(String path, boolean errIfMissing) {
Node nd = getNode(path,errIfMissing);
if (nd==null) return null;
String txt = DOMUtil.getText(nd);
log.debug(name + ' '+path+'='+txt);
return txt;
/******
short typ = nd.getNodeType();
if (typ==Node.ATTRIBUTE_NODE || typ==Node.TEXT_NODE) {
return nd.getNodeValue();
}
return nd.getTextContent();
******/
}
public String get(String path) {
return getVal(path,true);
}
public String get(String path, String def) {
String val = getVal(path, false);
if (val == null || val.length() == 0) {
return def;
}
return val;
}
public int getInt(String path) {
return Integer.parseInt(getVal(path, true));
}
public int getInt(String path, int def) {
String val = getVal(path, false);
return val!=null ? Integer.parseInt(val) : def;
}
public boolean getBool(String path) {
return Boolean.parseBoolean(getVal(path, true));
}
public boolean getBool(String path, boolean def) {
String val = getVal(path, false);
return val!=null ? Boolean.parseBoolean(val) : def;
}
public float getFloat(String path) {
return Float.parseFloat(getVal(path, true));
}
public float getFloat(String path, float def) {
String val = getVal(path, false);
return val!=null ? Float.parseFloat(val) : def;
}
public double getDouble(String path){
return Double.parseDouble(getVal(path, true));
}
public double getDouble(String path, double def) {
String val = getVal(path, false);
return val!=null ? Double.parseDouble(val) : def;
}
public Version getLuceneVersion(String path) {
return parseLuceneVersionString(getVal(path, true));
}
public Version getLuceneVersion(String path, Version def) {
String val = getVal(path, false);
return val!=null ? parseLuceneVersionString(val) : def;
}
private static final AtomicBoolean versionWarningAlreadyLogged = new AtomicBoolean(false);
public static final Version parseLuceneVersionString(final String matchVersion) {
final Version version;
try {
version = Version.parseLeniently(matchVersion);
} catch (IllegalArgumentException iae) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Invalid luceneMatchVersion '" + matchVersion +
"', valid values are: " + Arrays.toString(Version.values()) +
" or a string in format 'V.V'", iae);
}
if (version == Version.LUCENE_CURRENT && !versionWarningAlreadyLogged.getAndSet(true)) {
log.warn(
"You should not use LUCENE_CURRENT as luceneMatchVersion property: "+
"if you use this setting, and then Solr upgrades to a newer release of Lucene, "+
"sizable changes may happen. If precise back compatibility is important "+
"then you should instead explicitly specify an actual Lucene version."
);
}
return version;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty.http;
import java.net.URI;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
import org.apache.camel.Processor;
import org.apache.camel.component.netty.NettyComponent;
import org.apache.camel.component.netty.NettyConfiguration;
import org.apache.camel.component.netty.NettyServerBootstrapConfiguration;
import org.apache.camel.component.netty.http.handlers.HttpServerMultiplexChannelHandler;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.spi.HeaderFilterStrategyAware;
import org.apache.camel.spi.RestApiConsumerFactory;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.spi.RestConsumerFactory;
import org.apache.camel.util.FileUtil;
import org.apache.camel.util.HostUtils;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ServiceHelper;
import org.apache.camel.util.URISupport;
import org.apache.camel.util.UnsafeUriCharactersEncoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Netty HTTP based component.
*/
public class NettyHttpComponent extends NettyComponent implements HeaderFilterStrategyAware, RestConsumerFactory, RestApiConsumerFactory {
private static final Logger LOG = LoggerFactory.getLogger(NettyHttpComponent.class);
// factories which is created by this component and therefore manage their lifecycles
private final Map<Integer, HttpServerConsumerChannelFactory> multiplexChannelHandlers = new HashMap<Integer, HttpServerConsumerChannelFactory>();
private final Map<String, HttpServerBootstrapFactory> bootstrapFactories = new HashMap<String, HttpServerBootstrapFactory>();
private NettyHttpBinding nettyHttpBinding;
private HeaderFilterStrategy headerFilterStrategy;
private NettyHttpSecurityConfiguration securityConfiguration;
public NettyHttpComponent() {
// use the http configuration and filter strategy
super(NettyHttpEndpoint.class);
setConfiguration(new NettyHttpConfiguration());
setHeaderFilterStrategy(new NettyHttpHeaderFilterStrategy());
// use the binding that supports Rest DSL
setNettyHttpBinding(new RestNettyHttpBinding(getHeaderFilterStrategy()));
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
NettyConfiguration config;
if (getConfiguration() != null) {
config = getConfiguration().copy();
} else {
config = new NettyHttpConfiguration();
}
HeaderFilterStrategy headerFilterStrategy = resolveAndRemoveReferenceParameter(parameters, "headerFilterStrategy", HeaderFilterStrategy.class);
// merge any custom bootstrap configuration on the config
NettyServerBootstrapConfiguration bootstrapConfiguration = resolveAndRemoveReferenceParameter(parameters, "bootstrapConfiguration", NettyServerBootstrapConfiguration.class);
if (bootstrapConfiguration != null) {
Map<String, Object> options = new HashMap<String, Object>();
if (IntrospectionSupport.getProperties(bootstrapConfiguration, options, null, false)) {
IntrospectionSupport.setProperties(getCamelContext().getTypeConverter(), config, options);
}
}
// any custom security configuration
NettyHttpSecurityConfiguration securityConfiguration = resolveAndRemoveReferenceParameter(parameters, "securityConfiguration", NettyHttpSecurityConfiguration.class);
Map<String, Object> securityOptions = IntrospectionSupport.extractProperties(parameters, "securityConfiguration.");
config = parseConfiguration(config, remaining, parameters);
setProperties(config, parameters);
// validate config
config.validateConfiguration();
// are we using a shared http server?
NettySharedHttpServer shared = resolveAndRemoveReferenceParameter(parameters, "nettySharedHttpServer", NettySharedHttpServer.class);
if (shared != null) {
// use port number from the shared http server
LOG.debug("Using NettySharedHttpServer: {} with port: {}", shared, shared.getPort());
config.setPort(shared.getPort());
}
// create the address uri which includes the remainder parameters (which is not configuration parameters for this component)
URI u = new URI(UnsafeUriCharactersEncoder.encodeHttpURI(remaining));
String addressUri = URISupport.createRemainingURI(u, parameters).toString();
NettyHttpEndpoint answer = new NettyHttpEndpoint(addressUri, this, config);
answer.setTimer(getTimer());
// must use a copy of the binding on the endpoint to avoid sharing same instance that can cause side-effects
if (answer.getNettyHttpBinding() == null) {
Object binding = getNettyHttpBinding();
if (binding instanceof RestNettyHttpBinding) {
NettyHttpBinding copy = ((RestNettyHttpBinding) binding).copy();
answer.setNettyHttpBinding(copy);
} else if (binding instanceof DefaultNettyHttpBinding) {
NettyHttpBinding copy = ((DefaultNettyHttpBinding) binding).copy();
answer.setNettyHttpBinding(copy);
}
}
if (headerFilterStrategy != null) {
answer.setHeaderFilterStrategy(headerFilterStrategy);
} else if (answer.getHeaderFilterStrategy() == null) {
answer.setHeaderFilterStrategy(getHeaderFilterStrategy());
}
if (securityConfiguration != null) {
answer.setSecurityConfiguration(securityConfiguration);
} else if (answer.getSecurityConfiguration() == null) {
answer.setSecurityConfiguration(getSecurityConfiguration());
}
// configure any security options
if (securityOptions != null && !securityOptions.isEmpty()) {
securityConfiguration = answer.getSecurityConfiguration();
if (securityConfiguration == null) {
securityConfiguration = new NettyHttpSecurityConfiguration();
answer.setSecurityConfiguration(securityConfiguration);
}
setProperties(securityConfiguration, securityOptions);
validateParameters(uri, securityOptions, null);
}
answer.setNettySharedHttpServer(shared);
return answer;
}
@Override
protected NettyConfiguration parseConfiguration(NettyConfiguration configuration, String remaining, Map<String, Object> parameters) throws Exception {
// ensure uri is encoded to be valid
String safe = UnsafeUriCharactersEncoder.encodeHttpURI(remaining);
URI uri = new URI(safe);
configuration.parseURI(uri, parameters, this, "http", "https");
// force using tcp as the underlying transport
configuration.setProtocol("tcp");
configuration.setTextline(false);
if (configuration instanceof NettyHttpConfiguration) {
((NettyHttpConfiguration) configuration).setPath(uri.getPath());
}
return configuration;
}
public NettyHttpBinding getNettyHttpBinding() {
return nettyHttpBinding;
}
/**
* To use a custom org.apache.camel.component.netty.http.NettyHttpBinding for binding to/from Netty and Camel Message API.
*/
public void setNettyHttpBinding(NettyHttpBinding nettyHttpBinding) {
this.nettyHttpBinding = nettyHttpBinding;
}
public HeaderFilterStrategy getHeaderFilterStrategy() {
return headerFilterStrategy;
}
/**
* To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter headers.
*/
public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) {
this.headerFilterStrategy = headerFilterStrategy;
}
public NettyHttpSecurityConfiguration getSecurityConfiguration() {
return securityConfiguration;
}
/**
* Refers to a org.apache.camel.component.netty.http.NettyHttpSecurityConfiguration for configuring secure web resources.
*/
public void setSecurityConfiguration(NettyHttpSecurityConfiguration securityConfiguration) {
this.securityConfiguration = securityConfiguration;
}
public synchronized HttpServerConsumerChannelFactory getMultiplexChannelHandler(int port) {
HttpServerConsumerChannelFactory answer = multiplexChannelHandlers.get(port);
if (answer == null) {
answer = new HttpServerMultiplexChannelHandler();
answer.init(port);
multiplexChannelHandlers.put(port, answer);
}
return answer;
}
protected synchronized HttpServerBootstrapFactory getOrCreateHttpNettyServerBootstrapFactory(NettyHttpConsumer consumer) {
String key = consumer.getConfiguration().getAddress();
HttpServerBootstrapFactory answer = bootstrapFactories.get(key);
if (answer == null) {
HttpServerConsumerChannelFactory channelFactory = getMultiplexChannelHandler(consumer.getConfiguration().getPort());
answer = new HttpServerBootstrapFactory(channelFactory);
answer.init(getCamelContext(), consumer.getConfiguration(), new HttpServerPipelineFactory(consumer));
bootstrapFactories.put(key, answer);
}
return answer;
}
@Override
public Consumer createConsumer(CamelContext camelContext, Processor processor, String verb, String basePath, String uriTemplate,
String consumes, String produces, RestConfiguration configuration, Map<String, Object> parameters) throws Exception {
return doCreateConsumer(camelContext, processor, verb, basePath, uriTemplate, consumes, produces, configuration, parameters, false);
}
@Override
public Consumer createApiConsumer(CamelContext camelContext, Processor processor, String contextPath,
RestConfiguration configuration, Map<String, Object> parameters) throws Exception {
// reuse the createConsumer method we already have. The api need to use GET and match on uri prefix
return doCreateConsumer(camelContext, processor, "GET", contextPath, null, null, null, configuration, parameters, true);
}
Consumer doCreateConsumer(CamelContext camelContext, Processor processor, String verb, String basePath, String uriTemplate,
String consumes, String produces, RestConfiguration configuration, Map<String, Object> parameters, boolean api) throws Exception {
String path = basePath;
if (uriTemplate != null) {
// make sure to avoid double slashes
if (uriTemplate.startsWith("/")) {
path = path + uriTemplate;
} else {
path = path + "/" + uriTemplate;
}
}
path = FileUtil.stripLeadingSeparator(path);
String scheme = "http";
String host = "";
int port = 0;
// if no explicit port/host configured, then use port from rest configuration
RestConfiguration config = configuration;
if (config == null) {
config = getCamelContext().getRestConfiguration("netty-http", true);
}
if (config.getScheme() != null) {
scheme = config.getScheme();
}
if (config.getHost() != null) {
host = config.getHost();
}
int num = config.getPort();
if (num > 0) {
port = num;
}
// prefix path with context-path if configured in rest-dsl configuration
String contextPath = config.getContextPath();
if (ObjectHelper.isNotEmpty(contextPath)) {
contextPath = FileUtil.stripTrailingSeparator(contextPath);
contextPath = FileUtil.stripLeadingSeparator(contextPath);
if (ObjectHelper.isNotEmpty(contextPath)) {
path = contextPath + "/" + path;
}
}
// if no explicit hostname set then resolve the hostname
if (ObjectHelper.isEmpty(host)) {
if (config.getRestHostNameResolver() == RestConfiguration.RestHostNameResolver.localHostName) {
host = HostUtils.getLocalHostName();
} else if (config.getRestHostNameResolver() == RestConfiguration.RestHostNameResolver.localIp) {
host = HostUtils.getLocalIp();
}
}
Map<String, Object> map = new HashMap<String, Object>();
// build query string, and append any endpoint configuration properties
if (config.getComponent() == null || config.getComponent().equals("netty-http")) {
// setup endpoint options
if (config.getEndpointProperties() != null && !config.getEndpointProperties().isEmpty()) {
map.putAll(config.getEndpointProperties());
}
}
String query = URISupport.createQueryString(map);
String url;
if (api) {
url = "netty-http:%s://%s:%s/%s?matchOnUriPrefix=true&httpMethodRestrict=%s";
} else {
url = "netty-http:%s://%s:%s/%s?httpMethodRestrict=%s";
}
// must use upper case for restrict
String restrict = verb.toUpperCase(Locale.US);
// get the endpoint
url = String.format(url, scheme, host, port, path, restrict);
if (!query.isEmpty()) {
url = url + "&" + query;
}
NettyHttpEndpoint endpoint = camelContext.getEndpoint(url, NettyHttpEndpoint.class);
setProperties(endpoint, parameters);
// configure consumer properties
Consumer consumer = endpoint.createConsumer(processor);
if (config.getConsumerProperties() != null && !config.getConsumerProperties().isEmpty()) {
setProperties(consumer, config.getConsumerProperties());
}
return consumer;
}
@Override
protected void doStop() throws Exception {
super.doStop();
ServiceHelper.stopServices(bootstrapFactories.values());
bootstrapFactories.clear();
ServiceHelper.stopService(multiplexChannelHandlers.values());
multiplexChannelHandlers.clear();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.catchThrowable;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Date;
import java.util.concurrent.CompletableFuture;
import org.apache.commons.lang3.NotImplementedException;
import org.junit.Before;
import org.junit.Test;
import org.apache.geode.management.api.ClusterManagementException;
import org.apache.geode.management.api.ClusterManagementGetResult;
import org.apache.geode.management.api.ClusterManagementListOperationsResult;
import org.apache.geode.management.api.ClusterManagementListResult;
import org.apache.geode.management.api.ClusterManagementOperation;
import org.apache.geode.management.api.ClusterManagementOperationResult;
import org.apache.geode.management.api.ClusterManagementRealizationResult;
import org.apache.geode.management.api.ClusterManagementServiceTransport;
import org.apache.geode.management.api.CommandType;
import org.apache.geode.management.cluster.client.internal.ClientClusterManagementService;
import org.apache.geode.management.configuration.AbstractConfiguration;
import org.apache.geode.management.operation.RebalanceOperation;
import org.apache.geode.management.runtime.OperationResult;
import org.apache.geode.management.runtime.RebalanceResult;
import org.apache.geode.management.runtime.RuntimeInfo;
public class ClientClusterManagementServiceTest {
private ClientClusterManagementService service;
private ClusterManagementServiceTransport serviceTransport;
private AbstractConfiguration<RuntimeInfo> configuration;
private ClusterManagementRealizationResult successRealizationResult;
private ClusterManagementOperationResult<RebalanceOperation, RebalanceResult> successOperationResult;
private ClusterManagementOperation<OperationResult> operation;
@SuppressWarnings("unchecked")
@Before
public void init() {
serviceTransport = mock(ClusterManagementServiceTransport.class);
configuration = mock(AbstractConfiguration.class);
service = new ClientClusterManagementService(serviceTransport);
successRealizationResult = mock(ClusterManagementRealizationResult.class);
when(successRealizationResult.isSuccessful()).thenReturn(true);
successOperationResult = mock(ClusterManagementOperationResult.class);
when(successOperationResult.isSuccessful()).thenReturn(true);
operation = mock(ClusterManagementOperation.class);
}
@Test
public void createCallsSubmitMessageAndReturnsResult() {
when(serviceTransport.submitMessage(any(), any())).thenReturn(successRealizationResult);
when(configuration.getCreationCommandType()).thenReturn(CommandType.CREATE);
ClusterManagementRealizationResult realizationResult = service.create(configuration);
assertThat(realizationResult).isSameAs(successRealizationResult);
verify(serviceTransport).submitMessage(same(configuration), same(CommandType.CREATE));
}
@Test
public void deleteCallsSubmitMessageAndReturnsResult() {
when(serviceTransport.submitMessage(any(), any())).thenReturn(successRealizationResult);
ClusterManagementRealizationResult realizationResult = service.delete(configuration);
assertThat(realizationResult).isSameAs(successRealizationResult);
verify(serviceTransport).submitMessage(same(configuration), same(CommandType.DELETE));
}
@Test
public void updateNotImplemented() {
assertThatThrownBy(() -> service.update(null))
.isInstanceOf(NotImplementedException.class)
.hasMessageContaining("Not Implemented");
}
@Test
public void listCallsSubmitMessageAndReturnsResult() {
@SuppressWarnings("unchecked")
ClusterManagementListResult<AbstractConfiguration<RuntimeInfo>, RuntimeInfo> successListResult =
mock(ClusterManagementListResult.class);
when(successListResult.isSuccessful()).thenReturn(true);
when(serviceTransport.submitMessageForList(any())).thenReturn(successListResult);
ClusterManagementListResult<AbstractConfiguration<RuntimeInfo>, RuntimeInfo> listResult =
service.list(configuration);
assertThat(listResult).isSameAs(successListResult);
verify(serviceTransport).submitMessageForList(same(configuration));
}
@Test
public void getCallsSubmitMessageAndReturnsResult() {
@SuppressWarnings("unchecked")
ClusterManagementGetResult<AbstractConfiguration<RuntimeInfo>, RuntimeInfo> successGetResult =
mock(ClusterManagementGetResult.class);
when(successGetResult.isSuccessful()).thenReturn(true);
when(serviceTransport.submitMessageForGet(any())).thenReturn(successGetResult);
ClusterManagementGetResult<AbstractConfiguration<RuntimeInfo>, RuntimeInfo> getResult =
service.get(configuration);
assertThat(getResult).isSameAs(successGetResult);
verify(serviceTransport).submitMessageForGet(same(configuration));
}
@Test
public void startCallsSubmitMessageAndReturnsResult() {
RebalanceOperation rebalanceOperation = new RebalanceOperation();
doReturn(successOperationResult).when(serviceTransport)
.submitMessageForStart(any(RebalanceOperation.class));
ClusterManagementOperationResult<RebalanceOperation, RebalanceResult> operationResult =
service.start(rebalanceOperation);
assertThat(operationResult).isSameAs(successOperationResult);
}
@Test
public void getOperationCallsSubmitMessageAndReturnsResult() {
String opId = "opId";
RebalanceOperation opType = new RebalanceOperation();
doReturn(successOperationResult).when(serviceTransport)
.submitMessageForGetOperation(same(opType), same(opId));
ClusterManagementOperationResult<RebalanceOperation, RebalanceResult> operationResult =
service.get(opType, opId);
assertThat(operationResult).isSameAs(successOperationResult);
verify(serviceTransport).submitMessageForGetOperation(same(opType), same(opId));
}
@Test
public void getOperationCallsSubmitMessageAndReturnsFuture() {
String opId = "opId";
RebalanceOperation opType = new RebalanceOperation();
doReturn(successOperationResult).when(serviceTransport)
.submitMessageForGetOperation(same(opType), same(opId));
CompletableFuture<ClusterManagementOperationResult<RebalanceOperation, RebalanceResult>> future =
service.getFuture(opType, opId);
await().untilAsserted(
() -> verify(serviceTransport, atLeastOnce()).submitMessageForGetOperation(same(opType),
same(opId)));
assertThat(future.isDone()).isFalse();
future.cancel(true);
}
@Test
public void getOperationCallsSubmitMessageAndReturnsFutureThatCompletes() throws Exception {
String opId = "opId";
RebalanceOperation opType = new RebalanceOperation();
doReturn(successOperationResult).when(serviceTransport)
.submitMessageForGetOperation(same(opType), same(opId));
CompletableFuture<ClusterManagementOperationResult<RebalanceOperation, RebalanceResult>> future =
service.getFuture(opType, opId);
await().untilAsserted(
() -> verify(serviceTransport, atLeastOnce()).submitMessageForGetOperation(same(opType),
same(opId)));
assertThat(future.isDone()).isFalse();
when(successOperationResult.getOperationEnd()).thenReturn(new Date());
await().untilAsserted(future::isDone);
assertThat(future.get()).isSameAs(successOperationResult);
}
@Test
public void listOperationCallsSubmitMessageAndReturnsResult() {
@SuppressWarnings("unchecked")
ClusterManagementListOperationsResult<ClusterManagementOperation<OperationResult>, OperationResult> successListOperationsResult =
mock(ClusterManagementListOperationsResult.class);
when(successListOperationsResult.isSuccessful()).thenReturn(true);
doReturn(successListOperationsResult).when(serviceTransport)
.submitMessageForListOperation(any());
ClusterManagementListOperationsResult<ClusterManagementOperation<OperationResult>, OperationResult> operationResult =
service.list(operation);
assertThat(operationResult).isSameAs(successListOperationsResult);
verify(serviceTransport).submitMessageForListOperation(same(operation));
}
@Test
public void createWithNullResultThrows() {
when(serviceTransport.submitMessage(any(), any())).thenReturn(null);
assertThatThrownBy(() -> service.create(configuration))
.hasMessageContaining("Unable to parse server response.");
}
@Test
public void createWithFailedResult() {
ClusterManagementRealizationResult realizationResult =
mock(ClusterManagementRealizationResult.class);
when(realizationResult.isSuccessful()).thenReturn(false);
when(serviceTransport.submitMessage(any(), any())).thenReturn(realizationResult);
Throwable throwable = catchThrowable(() -> service.create(configuration));
assertThat(throwable).isInstanceOf(ClusterManagementException.class);
assertThat(((ClusterManagementException) throwable).getResult()).isSameAs(realizationResult);
}
}
| |
/**
* $Id: mxGraphLayout.java,v 1.19 2010/01/13 10:43:46 gaudenz Exp $
* Copyright (c) 2008-2009, JGraph Ltd
*/
package com.mxgraph.layout;
import java.util.List;
import java.util.Map;
import com.mxgraph.model.mxGeometry;
import com.mxgraph.model.mxIGraphModel;
import com.mxgraph.util.mxConstants;
import com.mxgraph.util.mxPoint;
import com.mxgraph.util.mxRectangle;
import com.mxgraph.view.mxCellState;
import com.mxgraph.view.mxGraph;
/**
* Abstract bass class for layouts
*/
public abstract class mxGraphLayout implements mxIGraphLayout
{
/**
* Holds the enclosing graph.
*/
protected mxGraph graph;
/**
* Boolean indicating if the bounding box of the label should be used if
* its available. Default is true.
*/
protected boolean useBoundingBox = true;
/**
* Constructs a new fast organic layout for the specified graph.
*/
public mxGraphLayout(mxGraph graph)
{
this.graph = graph;
}
/* (non-Javadoc)
* @see com.mxgraph.layout.mxIGraphLayout#move(java.lang.Object, double, double)
*/
public void moveCell(Object cell, double x, double y)
{
// TODO: Map the position to a child index for
// the cell to be placed closest to the position
}
/**
* Returns the associated graph.
*/
public mxGraph getGraph()
{
return graph;
}
/**
* Returns the constraint for the given key and cell. This implementation
* always returns the value for the given key in the style of the given
* cell.
*
* @param key Key of the constraint to be returned.
* @param cell Cell whose constraint should be returned.
*/
public Object getConstraint(Object key, Object cell)
{
return getConstraint(key, cell, null, false);
}
/**
* Returns the constraint for the given key and cell. The optional edge and
* source arguments are used to return inbound and outgoing routing-
* constraints for the given edge and vertex. This implementation always
* returns the value for the given key in the style of the given cell.
*
* @param key Key of the constraint to be returned.
* @param cell Cell whose constraint should be returned.
* @param edge Optional cell that represents the connection whose constraint
* should be returned. Default is null.
* @param source Optional boolean that specifies if the connection is incoming
* or outgoing. Default is false.
*/
public Object getConstraint(Object key, Object cell, Object edge,
boolean source)
{
mxCellState state = graph.getView().getState(cell);
Map<String, Object> style = (state != null) ? state.getStyle() : graph
.getCellStyle(cell);
return (style != null) ? style.get(key) : null;
}
/**
* @return the useBoundingBox
*/
public boolean isUseBoundingBox()
{
return useBoundingBox;
}
/**
* @param useBoundingBox the useBoundingBox to set
*/
public void setUseBoundingBox(boolean useBoundingBox)
{
this.useBoundingBox = useBoundingBox;
}
/**
* Returns true if the given vertex may be moved by the layout.
*
* @param vertex Object that represents the vertex to be tested.
* @return Returns true if the vertex can be moved.
*/
public boolean isVertexMovable(Object vertex)
{
return graph.isCellMovable(vertex);
}
/**
* Returns true if the given vertex has no connected edges.
*
* @param vertex Object that represents the vertex to be tested.
* @return Returns true if the vertex should be ignored.
*/
public boolean isVertexIgnored(Object vertex)
{
return !graph.getModel().isVertex(vertex)
|| !graph.isCellVisible(vertex);
}
/**
* Returns true if the given edge has no source or target terminal.
*
* @param edge Object that represents the edge to be tested.
* @return Returns true if the edge should be ignored.
*/
public boolean isEdgeIgnored(Object edge)
{
mxIGraphModel model = graph.getModel();
return !model.isEdge(edge) || !graph.isCellVisible(edge)
|| model.getTerminal(edge, true) == null
|| model.getTerminal(edge, false) == null;
}
/**
* Disables or enables the edge style of the given edge.
*/
public void setEdgeStyleEnabled(Object edge, boolean value)
{
graph.setCellStyles(mxConstants.STYLE_NOEDGESTYLE, (value) ? "0" : "1",
new Object[] { edge });
}
/**
* Sets the control points of the given edge to the given
* list of mxPoints. Set the points to null to remove all
* existing points for an edge.
*/
public void setEdgePoints(Object edge, List<mxPoint> points)
{
mxIGraphModel model = graph.getModel();
mxGeometry geometry = model.getGeometry(edge);
if (geometry == null)
{
geometry = new mxGeometry();
geometry.setRelative(true);
}
else
{
geometry = (mxGeometry) geometry.clone();
}
geometry.setPoints(points);
model.setGeometry(edge, geometry);
}
/**
* Returns an <mxRectangle> that defines the bounds of the given cell
* or the bounding box if <useBoundingBox> is true.
*/
public mxRectangle getVertexBounds(Object vertex)
{
mxRectangle geo = graph.getModel().getGeometry(vertex);
// Checks for oversize label bounding box and corrects
// the return value accordingly
if (useBoundingBox)
{
mxCellState state = graph.getView().getState(vertex);
if (state != null)
{
double scale = graph.getView().getScale();
mxRectangle tmp = state.getBoundingBox();
double dx0 = (tmp.getX() - state.getX()) / scale;
double dy0 = (tmp.getY() - state.getY()) / scale;
double dx1 = (tmp.getX() + tmp.getWidth() - state.getX() - state
.getWidth())
/ scale;
double dy1 = (tmp.getY() + tmp.getHeight() - state.getY() - state
.getHeight())
/ scale;
geo = new mxRectangle(geo.getX() + dx0, geo.getY() + dy0, geo
.getWidth()
- dx0 + dx1, geo.getHeight() + -dy0 + dy1);
}
}
return new mxRectangle(geo);
}
/**
* Sets the new position of the given cell taking into account the size of
* the bounding box if <useBoundingBox> is true. The change is only carried
* out if the new location is not equal to the existing location, otherwise
* the geometry is not replaced with an updated instance. The new or old
* bounds are returned (including overlapping labels).
*
* Parameters:
*
* cell - <mxCell> whose geometry is to be set.
* x - Integer that defines the x-coordinate of the new location.
* y - Integer that defines the y-coordinate of the new location.
*/
public mxRectangle setVertexLocation(Object vertex, double x, double y)
{
mxIGraphModel model = graph.getModel();
mxGeometry geometry = model.getGeometry(vertex);
mxRectangle result = null;
if (geometry != null)
{
result = new mxRectangle(x, y, geometry.getWidth(), geometry
.getHeight());
// Checks for oversize labels and offset the result
if (useBoundingBox)
{
mxCellState state = graph.getView().getState(vertex);
if (state != null
&& state.getBoundingBox().getX() < state.getX())
{
double scale = graph.getView().getScale();
mxRectangle box = state.getBoundingBox();
x += (state.getX() - box.getX()) / scale;
result.setWidth(box.getWidth());
}
}
if (geometry.getX() != x || geometry.getY() != y)
{
geometry = (mxGeometry) geometry.clone();
geometry.setX(x);
geometry.setY(y);
model.setGeometry(vertex, geometry);
}
}
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.common.concurrent;
import com.google.common.base.Stopwatch;
import com.google.common.collect.Lists;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.bookkeeper.common.stats.OpStatsListener;
import org.apache.bookkeeper.common.util.OrderedScheduler;
import org.apache.bookkeeper.stats.OpStatsLogger;
/**
* Future related utils.
*/
@Slf4j
public final class FutureUtils {
private FutureUtils() {}
private static final Function<Throwable, Exception> DEFAULT_EXCEPTION_HANDLER = cause -> {
if (cause instanceof Exception) {
return (Exception) cause;
} else {
return new Exception(cause);
}
};
public static CompletableFuture<Void> Void() {
return value(null);
}
public static <T> T result(CompletableFuture<T> future) throws Exception {
return FutureUtils.result(future, DEFAULT_EXCEPTION_HANDLER);
}
public static <T> T result(CompletableFuture<T> future, long timeout, TimeUnit timeUnit) throws Exception {
return FutureUtils.result(future, DEFAULT_EXCEPTION_HANDLER, timeout, timeUnit);
}
@SneakyThrows(InterruptedException.class)
public static <T, ExceptionT extends Throwable> T result(
CompletableFuture<T> future, Function<Throwable, ExceptionT> exceptionHandler) throws ExceptionT {
try {
return future.get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw e;
} catch (ExecutionException e) {
ExceptionT cause = exceptionHandler.apply(e.getCause());
if (null == cause) {
return null;
} else {
throw cause;
}
}
}
@SneakyThrows(InterruptedException.class)
public static <T, ExceptionT extends Throwable> T result(
CompletableFuture<T> future,
Function<Throwable, ExceptionT> exceptionHandler,
long timeout,
TimeUnit timeUnit) throws ExceptionT, TimeoutException {
try {
return future.get(timeout, timeUnit);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw e;
} catch (ExecutionException e) {
ExceptionT cause = exceptionHandler.apply(e.getCause());
if (null == cause) {
return null;
} else {
throw cause;
}
}
}
public static <T> CompletableFuture<T> createFuture() {
return new CompletableFuture<T>();
}
public static <T> CompletableFuture<T> value(T value) {
return CompletableFuture.completedFuture(value);
}
public static <T> CompletableFuture<T> exception(Throwable cause) {
CompletableFuture<T> future = FutureUtils.createFuture();
future.completeExceptionally(cause);
return future;
}
public static <T> void complete(CompletableFuture<T> result,
T value) {
if (null == result) {
return;
}
result.complete(value);
}
public static <T> void completeExceptionally(CompletableFuture<T> result,
Throwable cause) {
if (null == result) {
return;
}
result.completeExceptionally(cause);
}
/**
* Completing the {@code future} in the thread in the scheduler identified by
* the {@code scheduleKey}.
*
* @param future future to complete
* @param action action to execute when complete
* @param scheduler scheduler to execute the action.
* @param scheduleKey key to choose the thread to execute the action
* @param <T>
* @return
*/
public static <T> CompletableFuture<T> whenCompleteAsync(
CompletableFuture<T> future,
BiConsumer<? super T, ? super Throwable> action,
OrderedScheduler scheduler,
Object scheduleKey) {
return future.whenCompleteAsync(action, scheduler.chooseThread(scheduleKey));
}
public static <T> CompletableFuture<List<T>> collect(List<CompletableFuture<T>> futureList) {
CompletableFuture<Void> finalFuture =
CompletableFuture.allOf(futureList.toArray(new CompletableFuture[futureList.size()]));
return finalFuture.thenApply(result ->
futureList
.stream()
.map(CompletableFuture::join)
.collect(Collectors.toList()));
}
public static <T> void proxyTo(CompletableFuture<T> src,
CompletableFuture<T> target) {
src.whenComplete((value, cause) -> {
if (null == cause) {
target.complete(value);
} else {
target.completeExceptionally(cause);
}
});
}
//
// Process futures
//
private static class ListFutureProcessor<T, R>
implements FutureEventListener<R>, Runnable {
private volatile boolean done = false;
private final Iterator<T> itemsIter;
private final Function<T, CompletableFuture<R>> processFunc;
private final CompletableFuture<List<R>> promise;
private final List<R> results;
private final ExecutorService callbackExecutor;
ListFutureProcessor(List<T> items,
Function<T, CompletableFuture<R>> processFunc,
ExecutorService callbackExecutor) {
this.itemsIter = items.iterator();
this.processFunc = processFunc;
this.promise = new CompletableFuture<>();
this.results = Lists.newArrayListWithExpectedSize(items.size());
this.callbackExecutor = callbackExecutor;
}
@Override
public void onSuccess(R value) {
results.add(value);
if (null == callbackExecutor) {
run();
} else {
callbackExecutor.submit(this);
}
}
@Override
public void onFailure(final Throwable cause) {
done = true;
if (null == callbackExecutor) {
promise.completeExceptionally(cause);
} else {
callbackExecutor.submit((Runnable) () -> promise.completeExceptionally(cause));
}
}
@Override
public void run() {
if (done) {
log.debug("ListFutureProcessor is interrupted.");
return;
}
if (!itemsIter.hasNext()) {
promise.complete(results);
done = true;
return;
}
processFunc.apply(itemsIter.next()).whenComplete(this);
}
}
/**
* Process the list of items one by one using the process function <i>processFunc</i>.
* The process will be stopped immediately if it fails on processing any one.
*
* @param collection list of items
* @param processFunc process function
* @param callbackExecutor executor to process the item
* @return future presents the list of processed results
*/
public static <T, R> CompletableFuture<List<R>> processList(List<T> collection,
Function<T, CompletableFuture<R>> processFunc,
@Nullable ExecutorService callbackExecutor) {
ListFutureProcessor<T, R> processor =
new ListFutureProcessor<T, R>(collection, processFunc, callbackExecutor);
if (null != callbackExecutor) {
callbackExecutor.submit(processor);
} else {
processor.run();
}
return processor.promise;
}
/**
* Raise an exception to the <i>promise</i> within a given <i>timeout</i> period.
* If the promise has been satisfied before raising, it won't change the state of the promise.
*
* @param promise promise to raise exception
* @param timeout timeout period
* @param unit timeout period unit
* @param cause cause to raise
* @param scheduler scheduler to execute raising exception
* @param key the submit key used by the scheduler
* @return the promise applied with the raise logic
*/
public static <T> CompletableFuture<T> within(final CompletableFuture<T> promise,
final long timeout,
final TimeUnit unit,
final Throwable cause,
final OrderedScheduler scheduler,
final Object key) {
if (timeout < 0 || promise.isDone()) {
return promise;
}
// schedule a timeout to raise timeout exception
final java.util.concurrent.ScheduledFuture<?> task = scheduler.scheduleOrdered(key, () -> {
if (!promise.isDone() && promise.completeExceptionally(cause)) {
log.info("Raise exception", cause);
}
}, timeout, unit);
// when the promise is satisfied, cancel the timeout task
promise.whenComplete((value, throwable) -> {
if (!task.cancel(true)) {
log.debug("Failed to cancel the timeout task");
}
}
);
return promise;
}
/**
* Ignore exception from the <i>future</i>.
*
* @param future the original future
* @return a transformed future ignores exceptions
*/
public static <T> CompletableFuture<Void> ignore(CompletableFuture<T> future) {
return ignore(future, null);
}
/**
* Ignore exception from the <i>future</i> and log <i>errorMsg</i> on exceptions.
*
* @param future the original future
* @param errorMsg the error message to log on exceptions
* @return a transformed future ignores exceptions
*/
public static <T> CompletableFuture<Void> ignore(CompletableFuture<T> future,
final String errorMsg) {
final CompletableFuture<Void> promise = new CompletableFuture<Void>();
future.whenComplete(new FutureEventListener<T>() {
@Override
public void onSuccess(T value) {
promise.complete(null);
}
@Override
public void onFailure(Throwable cause) {
if (null != errorMsg) {
log.error(errorMsg, cause);
}
promise.complete(null);
}
});
return promise;
}
public static <T> CompletableFuture<T> ensure(CompletableFuture<T> future,
Runnable ensureBlock) {
return future.whenComplete((value, cause) -> {
ensureBlock.run();
});
}
public static <T> CompletableFuture<T> rescue(CompletableFuture<T> future,
Function<Throwable, CompletableFuture<T>> rescueFuc) {
CompletableFuture<T> result = FutureUtils.createFuture();
future.whenComplete((value, cause) -> {
if (null == cause) {
result.complete(value);
return;
}
proxyTo(rescueFuc.apply(cause), result);
});
return result;
}
/**
* Add a event listener over <i>result</i> for collecting the operation stats.
*
* @param result result to listen on
* @param opStatsLogger stats logger to record operations stats
* @param stopwatch stop watch to time operation
* @param <T>
* @return result after registered the event listener
*/
public static <T> CompletableFuture<T> stats(CompletableFuture<T> result,
OpStatsLogger opStatsLogger,
Stopwatch stopwatch) {
return result.whenComplete(new OpStatsListener<T>(opStatsLogger, stopwatch));
}
}
| |
/*
* Copyright 2015, Yahoo! Inc.
* Licensed under the terms of the Apache License 2.0. See LICENSE file at the project root for terms.
*/
package com.yahoo.sketches.theta;
import static com.yahoo.sketches.Util.DEFAULT_NOMINAL_ENTRIES;
import static com.yahoo.sketches.Util.DEFAULT_UPDATE_SEED;
import static com.yahoo.sketches.Util.checkIfPowerOf2;
import static com.yahoo.sketches.Util.LS;
import static com.yahoo.sketches.Util.TAB;
import com.yahoo.sketches.Family;
import com.yahoo.sketches.Util;
import com.yahoo.sketches.memory.Memory;
/**
* For building a new SetOperation.
*
* @author Lee Rhodes
*/
public class SetOperationBuilder {
private int bLgNomLongs;
private long bSeed;
private ResizeFactor bRF;
private float bP;
private Memory bDstMem;
/**
* Constructor for building a new SetOperation. The default configuration is
* <ul>
* <li>Nominal Entries: {@value com.yahoo.sketches.Util#DEFAULT_NOMINAL_ENTRIES}</li>
* <li>Seed: {@value com.yahoo.sketches.Util#DEFAULT_UPDATE_SEED}</li>
* <li>{@link com.yahoo.sketches.theta.ResizeFactor#X8}</li>
* <li>Input Sampling Probability: 1.0</li>
* <li>Memory: null</li>
* </ul>
*/
public SetOperationBuilder() {
bLgNomLongs = Integer.numberOfTrailingZeros(DEFAULT_NOMINAL_ENTRIES);
bSeed = DEFAULT_UPDATE_SEED;
bP = (float) 1.0;
bRF = ResizeFactor.X8;
bDstMem = null;
}
/**
* Sets the Nominal Entries for this set operation.
* @param nomEntries <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entres</a>
* @return this SetOperationBuilder
*/
public SetOperationBuilder setNominalEntries(int nomEntries) {
Util.checkIfPowerOf2(nomEntries, "nomEntries");
bLgNomLongs = Integer.numberOfTrailingZeros(nomEntries);
return this;
}
/**
* Returns Log-base 2 Nominal Entries
* @return Log-base 2 Nominal Entries
*/
public int getLgNominalEntries() {
return bLgNomLongs;
}
/**
* Sets the long seed value that is require by the hashing function.
* @param seed <a href="{@docRoot}/resources/dictionary.html#seed">See seed</a>
* @return this SetOperationBuilder
*/
public SetOperationBuilder setSeed(long seed) {
bSeed = seed;
return this;
}
/**
* Returns the seed
* @return the seed
*/
public long getSeed() {
return bSeed;
}
/**
* Sets the upfront uniform sampling probability, <i>p</i>. Although this functionality is
* implemented for Unions only, it rarely makes sense to use it. The proper use of upfront
* sampling is when building the sketches.
* @param p <a href="{@docRoot}/resources/dictionary.html#p">See Sampling Probability, <i>p</i></a>
* @return this SetOperationBuilder
*/
public SetOperationBuilder setP(float p) {
if ((p <= 0.0) || (p > 1.0)) {
throw new IllegalArgumentException("p must be > 0 and <= 1.0: "+p);
}
bP = p;
return this;
}
/**
* Returns the pre-sampling probability <i>p</i>
* @return the pre-sampling probability <i>p</i>
*/
public float getP() {
return bP;
}
/**
* Sets the cache Resize Factor
* @param rf <a href="{@docRoot}/resources/dictionary.html#resizeFactor">See Resize Factor</a>
* @return this SetOperationBuilder
*/
public SetOperationBuilder setResizeFactor(ResizeFactor rf) {
bRF = rf;
return this;
}
/**
* Returns the Resize Factor
* @return the Resize Factor
*/
public ResizeFactor getResizeFactor() {
return bRF;
}
/**
* Initializes the backing Memory store.
* @param dstMem The destination Memory.
* <a href="{@docRoot}/resources/dictionary.html#dstMem">See Destination Memory</a>
* @return this SetOperationBuilder
*/
public SetOperationBuilder initMemory(Memory dstMem) {
bDstMem = dstMem;
return this;
}
/**
* Returns the Destination Memory
* <a href="{@docRoot}/resources/dictionary.html#dstMem">See Destination Memory</a>.
* @return the Destination Memory
*/
public Memory getMemory() {
return bDstMem;
}
/**
* Returns a SetOperation with the current configuration of this Builder and the given Family.
* @param family the chosen SetOperation family
* @return a SetOperation
*/
public SetOperation build(Family family) {
SetOperation setOp = null;
switch (family) {
case UNION: {
if (bDstMem == null) {
setOp = new HeapUnion(bLgNomLongs, bSeed, bP, bRF);
}
else {
setOp = new DirectUnion(bLgNomLongs, bSeed, bP, bRF, bDstMem);
}
break;
}
case INTERSECTION: {
if (bDstMem == null) {
setOp = new HeapIntersection(bLgNomLongs, bSeed);
}
else {
setOp = new DirectIntersection(bLgNomLongs, bSeed, bDstMem);
}
break;
}
case A_NOT_B: {
if (bDstMem == null) {
setOp = new HeapAnotB(bSeed);
}
else throw new IllegalArgumentException(
"AnotB is a stateless operation and cannot be persisted.");
break;
}
default:
throw new IllegalArgumentException(
"Given Family cannot be built as a SetOperation: "+family.toString());
}
return setOp;
}
/**
* Returns a SetOperation with the current configuration of this Builder and the given
* <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entries</a> and Family.
* @param nomEntries <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entres</a>
* @param family build this SetOperation family
* @return a SetOperation
*/
public SetOperation build(int nomEntries, Family family) {
checkIfPowerOf2(nomEntries, "nomEntries");
bLgNomLongs = Integer.numberOfTrailingZeros(nomEntries);
return build(family);
}
/**
* Convenience method, returns a configured SetOperation Union with
* <a href="{@docRoot}/resources/dictionary.html#defaultNomEntries">Default Nominal Entries</a>
* @return a Union object
*/
public Union buildUnion() {
return (Union) build(Family.UNION);
}
/**
* Convenience method, returns a configured SetOperation Union with the given
* <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entries</a>.
* @param nomEntries <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entres</a>
* @return a Union object
*/
public Union buildUnion(int nomEntries) {
return (Union) build(nomEntries, Family.UNION);
}
/**
* Convenience method, returns a configured SetOperation Intersection with
* <a href="{@docRoot}/resources/dictionary.html#defaultNomEntries">Default Nominal Entries</a>
* @return an Intersection object
*/
public Intersection buildIntersection() {
return (Intersection) build(Family.INTERSECTION);
}
/**
* Convenience method, returns a configured SetOperation Intersection with the given
* <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entries</a>.
* @param nomEntries <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entres</a>
* @return an Intersection object
*/
public Intersection buildIntersection(int nomEntries) {
return (Intersection) build(nomEntries, Family.INTERSECTION);
}
/**
* Convenience method, returns a configured SetOperation ANotB with
* <a href="{@docRoot}/resources/dictionary.html#defaultNomEntries">Default Nominal Entries</a>
* @return an ANotB object
*/
public AnotB buildANotB() {
return (AnotB) build(Family.A_NOT_B);
}
/**
* Convenience method, returns a configured SetOperation ANotB with the given
* <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entries</a>.
* @param nomEntries <a href="{@docRoot}/resources/dictionary.html#nomEntries">Nominal Entres</a>
* @return an ANotB object
*/
public AnotB buildANotB(int nomEntries) {
return (AnotB) build(nomEntries, Family.A_NOT_B);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("SetOperationBuilder configuration:").append(LS).
append("LgK:").append(TAB).append(bLgNomLongs).append(LS).
append("K:").append(TAB).append(1 << bLgNomLongs).append(LS).
append("Seed:").append(TAB).append(bSeed).append(LS).
append("p:").append(TAB).append(bP).append(LS).
append("ResizeFactor:").append(TAB).append(bRF).append(LS).
append("DstMemory:").append(TAB).append(bDstMem != null).append(LS);
return sb.toString();
}
}
| |
package com.communote.server.web.api.service;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.params.HttpClientParams;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.HttpProtocolParams;
import org.apache.http.protocol.HTTP;
import org.apache.http.util.EntityUtils;
import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.node.ArrayNode;
import org.testng.Assert;
import com.communote.server.api.util.JsonHelper;
/**
* @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a>
*/
public class AbstractApiTest {
private static final String API_RESULT_RESULT = "result";
private static final String API_RESULT_MESSAGE = "message";
private static final String API_RESULT_STATUS = "status";
private final static Logger LOG = Logger.getLogger(AbstractApiTest.class);
private final String protocol = "http";
private final String host = "localhost";
private final int port = 8080;
private final String servlet = "";
private final String moduleId = "microblog";
private final String clientId = "global";
private final String apiPart = "api";
private final String apiVersion = "v1.0.1";
private final String defaultUsername = "communote";
private final String defaultPassword = "123456";
/**
* Check if all fields are set correctly for the JSON object representing a blog
*
* @param blog
* the blog to check
* @param prefix
* the prefix used for logging only
*/
protected void checkJSONBlogResult(JsonNode blog, String prefix) {
Assert.assertNotNull(blog, prefix + " should not be null!");
Assert.assertTrue(blog.isObject());
Assert.assertNotNull(blog.get("blogId").asText(), "id should not be null!");
Assert.assertNotNull(blog.get("title").asText(), "title should not be null!");
Assert.assertNotNull(blog.get("lastModificationDate").asText(),
"lastModificationDate should not be null!");
}
/**
* Check the detail post list item, does a version check differently based on the version
*
* @param post
* the post to check
* @param prefix
* the preifx
*/
protected void checkJSONDetailPostListItemResult(JsonNode post, String prefix) {
if (getApiVersion().equals("1.0")) {
checkJSONv10DetailPostListItemResult(post, prefix);
} else {
checkJSONv101DetailPostListItemResult(post, prefix);
}
}
/**
* @param blog
* the json object to check
* @param prefix
* the prefix for error messages only
*/
protected void checkJSONUserDetailBlogResult(JsonNode blog, String prefix) {
Assert.assertNotNull(blog, prefix + " should not be null!");
Assert.assertTrue(blog.isObject());
checkJSONBlogResult(blog, "blog");
prefix += ".";
Assert.assertTrue(blog.get("allCanRead").isBoolean(), "allCanRead should not be null!");
Assert.assertTrue(blog.get("allCanWrite").isBoolean(), "allCanWrite should not be null!");
Assert.assertNotNull(blog.get("tags"), "tags should not be null!");
Assert.assertNotNull(blog.get("description"), "description should not be null!");
Assert.assertNotNull(blog.get("userRole"), "userRole should not be null!");
Assert.assertNotNull(blog.get("blogEmail"), "blogEmail should not be null!");
Assert.assertNotNull(blog.get("readingUserIds"), "readingUserIds should not be null!");
Assert.assertNotNull(blog.get("writingUserIds"), "writingUserIds should not be null!");
Assert.assertNotNull(blog.get("managingUserIds"), "managingUserIds should not be null!");
}
/**
* Check if all fields are set correctly for the JSON object representing a user
*
* @param user
* the user to check
* @param prefix
* the prefix used for logging only
*/
protected void checkJSONUserResult(JsonNode user, String prefix) {
if (getApiVersion().equals("1.0")) {
checkJSONv10UserResult(user, prefix);
} else {
checkJSONv101UserResult(user, prefix);
}
}
/**
* Check if all fields are set correctly for the JSON object representing a detailed post
*
* @param post
* the post to check
* @param prefix
* the prefix used for logging only
*/
protected void checkJSONv101DetailPostListItemResult(JsonNode post, String prefix) {
Assert.assertNotNull(post, prefix + " should not be null!");
Assert.assertTrue(post.isObject());
prefix += ".";
Assert.assertTrue(post.get("postId").isNumber(), prefix + "postId should not be null!");
Assert.assertNotNull(post.get("text").asText(), prefix + "text should not be null!");
Assert.assertTrue(post.get("blogId").isNumber(), prefix + "blogId should not be null!");
Assert.assertTrue(post.get("userId").isNumber(), prefix + "userId should not be null!");
if (post.has("parentUserId") && !post.get("parentUserId").isNull()) {
Assert.assertTrue(post.get("parentPostId").isNumber(), prefix
+ "parentPostId should not be null!");
} else if (post.has("parentPostId") && !post.get("parentPostId").isNull()) {
Assert.fail("parentPostId=" + post.get("parentPostId")
+ " hence parentUserId should be set but is null!");
}
// TODO date check
Assert.assertTrue(post.get("creationDate").isNumber(), "creationDate should not be null!");
// TODO date check
Assert.assertTrue(post.get("lastModificationDate").isNumber(),
"lastModificationDate should not be null!");
// TODO on client side
// Assert.assertNotNull(result.get("canEdit"), "canEdit should not be null!");
}
/**
* @param user
* the user to check
* @param prefix
* the prefix to check
*/
private void checkJSONv101UserResult(JsonNode user, String prefix) {
Assert.assertNotNull(user, prefix + " should not be null!");
Assert.assertTrue(user.isObject());
prefix += ".";
Assert.assertTrue(user.get("userId").isNumber(), prefix + "userId should not be null!");
Assert.assertNotNull(user.get("firstName").asText(), prefix
+ "firstName should not be null!");
Assert.assertNotNull(user.get("lastName").asText(), prefix + "lastName should not be null!");
Assert.assertNotNull(user.get("email").asText(), prefix + "email should not be null!");
Assert.assertNotNull(user.get("salutation"), prefix + "salutation should not be null!");
Assert.assertNotNull(user.get("alias").asText(), prefix + "alias should not be null!");
Assert.assertTrue(user.get("lastModificationDate").isNumber(), prefix
+ "lastModifcationDate should not be null!");
// date can be null
if (user.has("lastPhotoModificationDate")
&& !user.get("lastPhotoModificationDate").isNull()) {
Assert.assertTrue(user.get("lastPhotoModificationDate").isNumber(), prefix
+ "lastPhotoModifcationDate should not be null!");
}
}
/**
* Check if all fields are set correctly for the JSON object representing a detailed post
*
* @param post
* the post to check
* @param prefix
* the prefix used for logging only
*/
protected void checkJSONv10DetailPostListItemResult(JsonNode post, String prefix) {
Assert.assertNotNull(post, prefix + " should not be null!");
Assert.assertTrue(post.isObject());
prefix += ".";
Assert.assertTrue(post.get("postId").isNumber(), prefix + "postId should not be null!");
Assert.assertTrue(post.get("text").isTextual(), prefix + "text should not be null!");
JsonNode blog = post.get("blog");
Assert.assertTrue(blog.isObject());
checkJSONBlogResult(blog, prefix + "blog");
JsonNode user = post.get("user");
checkJSONUserResult(user, prefix + "user");
if (!post.get("parentPostAuthor").isNull()) {
JsonNode parentUser = post.get("parentPostAuthor");
Assert.assertTrue(post.get("parentPostId").isNumber(),
"parentPostId should not be null if parentUser is set!");
checkJSONUserResult(parentUser, prefix + "parentPostAuthor");
} else if (post.has("parentPostId") && !post.get("parentPostId").isNull()) {
Assert.fail("parentPostId=" + post.get("parentPostId")
+ " hence parentPostUser should be set but is null!");
}
Assert.assertTrue(post.get("creationDate").isNumber(), "creationDate should not be null!");
Assert.assertNotNull(post.get("lastModificationDate").isNumber(),
"lastModificationDate should not be null!");
// TODO on client side
// Assert.assertNotNull(result.get("canEdit"), "canEdit should not be null!");
}
/**
* @param user
* the user to check
* @param prefix
* the prefix to check
*/
private void checkJSONv10UserResult(JsonNode user, String prefix) {
Assert.assertNotNull(user, prefix + " should not be null!");
Assert.assertTrue(user.isObject());
prefix += ".";
Assert.assertTrue(user.get("userId").isNumber(), prefix + "userId should not be null!");
Assert.assertNotNull(user.get("firstName").asText(), prefix
+ "firstName should not be null!");
Assert.assertNotNull(user.get("lastName").asText(), prefix + "lastName should not be null!");
Assert.assertNotNull(user.get("email").asText(), prefix + "email should not be null!");
Assert.assertNotNull(user.get("salutation"), prefix + "salutation should not be null!");
Assert.assertNotNull(user.get("alias").asText(), prefix + "alias should not be null!");
}
/**
* @param username
* the username to use, can be null to avoid adding user name and password
* @param password
* the password to use
* @return the http client configured with username and password
*/
private HttpClient configureHttpClient(String username, String password) {
DefaultHttpClient httpClient = new DefaultHttpClient();
if (username != null) {
Credentials defaultcreds = new UsernamePasswordCredentials(username, password);
httpClient.getCredentialsProvider().setCredentials(
new AuthScope(host, port, AuthScope.ANY_REALM), defaultcreds);
HttpClientParams.setAuthenticating(httpClient.getParams(), true);
}
HttpClientParams.setRedirecting(httpClient.getParams(), false);
return httpClient;
}
/**
* Do a get request and return the result of api result object
*
* @param apiUri
* the uri of the api after the version
* @param username
* username for authentication, if null username and password will be ignored
* @param password
* password for authentication
* @return the api result (with status, message) as json object
* @throws Exception
* in case of an error
*/
protected JsonNode doApiGetRequest(String apiUri, String username, String password)
throws Exception {
HttpClient httpClient = configureHttpClient(username, password);
HttpGet httpGet = new HttpGet(getBasicUri() + apiUri);
HttpResponse response = httpClient.execute(httpGet);
int statusCode = response.getStatusLine().getStatusCode();
LOG.debug("Status=" + statusCode + " for uri=" + httpGet.getURI());
Assert.assertEquals(statusCode, HttpStatus.SC_OK, "Status must be " + HttpStatus.SC_OK);
String responseString = EntityUtils.toString(response.getEntity());
if (LOG.isDebugEnabled()) {
for (Header header : response.getAllHeaders()) {
LOG.debug(header.getName() + "=" + header.getValue());
}
}
JsonNode json = JsonHelper.getSharedObjectMapper().readTree(responseString);
String apiStatus = json.get(API_RESULT_STATUS).asText();
Assert.assertEquals(apiStatus, "OK",
"Api Result Status must be ok!. message=" + json.get(API_RESULT_MESSAGE));
if (LOG.isDebugEnabled() && json.get(API_RESULT_MESSAGE).asText() != null) {
LOG.debug(json.get(API_RESULT_MESSAGE));
}
httpClient.getConnectionManager().shutdown();
return json;
}
/**
* Do an post request to the api. Fails if the api result object does not contain an OK status
*
* @param apiUri
* the url, relative to the api (starting after .../api/v1.0)
* @param username
* username for authentication, if null username and password will be ignored
* @param password
* the password to use
* @param parameters
* the parameters for the post (can be null or empty)
* @return the api result object
* @throws Exception
* in case of an error
*/
protected JsonNode doApiPostRequest(String apiUri, String username, String password,
Map<String, String> parameters) throws Exception {
return doApiPostRequest(apiUri, username, password, parameters, "OK");
}
/**
* Do an post request to the api. Fails if the api result object does not contain an OK status
*
* @param apiUri
* the url, relative to the api (starting after .../api/v1.0)
* @param username
* username for authentication, if null username and password will be ignored
* @param password
* the password to use
* @param parameters
* the parameters for the post (can be null or empty)
* @param expectedResultStatus
* the expected result status (OK, WARNING or ERROR)
* @return the api result object
* @throws Exception
* in case of an error
*/
protected JsonNode doApiPostRequest(String apiUri, String username, String password,
Map<String, String> parameters, String expectedResultStatus) throws Exception {
String requestUrl = getBasicUri() + apiUri;
String responseString = doPostRequest(requestUrl, username, password, parameters,
HttpStatus.SC_OK);
if (LOG.isDebugEnabled()) {
LOG.debug("apiUrl= '" + requestUrl + "' responseString= '" + responseString + "'");
}
JsonNode json = JsonHelper.getSharedObjectMapper().readTree(responseString);
String apiStatus = json.get(API_RESULT_STATUS).asText();
Assert.assertEquals(apiStatus, expectedResultStatus, "Api Result Status must be "
+ expectedResultStatus + "!. message=" + json.get(API_RESULT_MESSAGE));
if (LOG.isDebugEnabled() && json.has(API_RESULT_MESSAGE)
&& !json.get(API_RESULT_MESSAGE).isNull()) {
LOG.debug(json.get(API_RESULT_MESSAGE));
}
return json;
}
/**
* Do a get request and return the result of api result object
*
* @param apiUri
* the uri of the api after the version
* @param username
* username for authentication, if null username and password will be ignored
* @param password
* password for authentication
* @return the result of the call
* @throws Exception
* in case of an error
*/
protected ArrayNode doGetRequestAsJSONArray(String apiUri, String username, String password)
throws Exception {
JsonNode apiResult = doApiGetRequest(apiUri, username, password);
JsonNode result = apiResult.get(API_RESULT_RESULT);
if (result.isArray() && result instanceof ArrayNode) {
return (ArrayNode) result;
}
Assert.fail("response is not an array");
return null;
}
/**
* Do a get request and return the result of api result object
*
* @param apiUri
* the uri of the api after the version
* @param username
* username for authentication, if null username and password will be ignored
* @param password
* password for authentication
* @return the result of the call
* @throws Exception
* in case of an error
*/
protected JsonNode doGetRequestAsJSONObject(String apiUri, String username, String password)
throws Exception {
JsonNode apiResult = doApiGetRequest(apiUri, username, password);
JsonNode result = apiResult.get(API_RESULT_RESULT);
Assert.assertTrue(result.isObject(), "Result should not be null!");
return result;
}
/**
*
* @param requestUrl
* the complete request URL
* @param usernameusername
* for authentication, if null username and password will be ignored
* @param password
* the password to use
* @param parameters
* the parameters for the post (can be null or empty)
* @param expectedStatusCode
* the status code the request should have
* @return the response of the request as string
* @throws Exception
* in case of an error
*/
protected String doPostRequest(String requestUrl, String username, String password,
Map<String, String> parameters, int expectedStatusCode) throws Exception {
HttpClient httpClient = configureHttpClient(username, password);
HttpPost httpPost = new HttpPost(requestUrl);
HttpProtocolParams.setContentCharset(httpPost.getParams(), HTTP.UTF_8);
if (parameters != null) {
List<NameValuePair> pairs = new ArrayList<NameValuePair>();
for (Map.Entry<String, String> parameterEntry : parameters.entrySet()) {
pairs.add(new BasicNameValuePair(parameterEntry.getKey(), parameterEntry.getValue()));
}
httpPost.setEntity(new UrlEncodedFormEntity(pairs, HTTP.UTF_8));
}
HttpResponse response = httpClient.execute(httpPost);
int status = response.getStatusLine().getStatusCode();
if (LOG.isDebugEnabled()) {
LOG.debug("Status=" + status + " for uri=" + httpPost.getURI());
}
if (LOG.isDebugEnabled()) {
for (Header header : response.getAllHeaders()) {
LOG.debug(header.getName() + "=" + header.getValue());
}
}
Assert.assertEquals(status, expectedStatusCode, "Status must be " + expectedStatusCode);
String responseString = EntityUtils.toString(response.getEntity());
httpClient.getConnectionManager().shutdown();
return responseString;
}
/**
* @return the apiVersion
*/
public String getApiVersion() {
return apiVersion;
}
/**
* @return the base uri for the api calls
*/
private String getBasicUri() {
String[] segments = { host + ":" + port, servlet, moduleId, clientId, apiPart, apiVersion };
return protocol + "://" + StringUtils.join(segments, "/");
}
/**
* Use the api to get the blog
*
* @param blogId
* the blog id to get
* @return the json objects of the blog
* @throws Exception
* in case of an error
*/
protected JsonNode getBlog(Long blogId) throws Exception {
JsonNode blog = doGetRequestAsJSONObject("/blogs/" + blogId + ".json",
getDefaultUsername(), getDefaultPassword());
if (LOG.isDebugEnabled()) {
LOG.debug("Checking blog=" + blog);
}
checkJSONUserDetailBlogResult(blog, "blog");
return blog;
}
/**
* @return the default password
*/
public String getDefaultPassword() {
return defaultPassword;
}
/**
* @return the default username
*/
public String getDefaultUsername() {
return defaultUsername;
}
/**
* Uses the api to get an id of a blog that can be managed
*
* @param username
* the username
* @param password
* the password
* @param index
* index of element to return
* @return an id of an blog that can be managed by the user
* @throws Exception
* in case of an error
*/
public Long getManageableBlogId(String username, String password, int index) throws Exception {
ArrayNode array = doGetRequestAsJSONArray("/blogs.json?blogListType=MANAGER", username,
password);
if (array.size() > 0 && index <= (array.size() - 1)) {
JsonNode blog = array.get(index);
if (LOG.isDebugEnabled()) {
LOG.debug("Checking blog=" + blog);
}
return blog.get("blogId").asLong();
}
throw new Exception("No blogId for management found!");
}
/**
* Uses the api to get an id of a post that can be managed (delete, edit)
*
* @param blogId
* the blog id to use, must be one of a manageable blog, e.g. retrieved by
* {@link #getManageableBlogId(String, String)}
* @param username
* the username
* @param password
* the password
* @return the post id
* @throws Exception
* in case of an error
*/
public Long getManageablePostId(Long blogId, String username, String password) throws Exception {
ArrayNode array = doGetRequestAsJSONArray("/filter/posts.json?blogIds=" + blogId, username,
password);
if (array.size() > 0) {
JsonNode post = array.get(0);
if (LOG.isDebugEnabled()) {
LOG.debug("Checking blog=" + post);
}
return post.get("postId").asLong();
}
throw new Exception("No postId for blogId=" + blogId + " found!");
}
/**
* Get a post id for whatever you want to do with it
*
* @param username
* the user name to use
* @param password
* the password to use
* @return the post id
* @throws Exception
* in case something goes wrong
*/
public Long getPostId(String username, String password) throws Exception {
ArrayNode result = doGetRequestAsJSONArray("/filter/posts.json?maxCount=1", username,
password);
Assert.assertEquals(result.size(), 1, "Result length must match!");
checkJSONDetailPostListItemResult(result.get(0), "result[" + 0 + "].post");
return result.get(0).get("postId").asLong();
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.origin.lib;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.streamsets.pipeline.api.ConfigDef;
import com.streamsets.pipeline.api.Dependency;
import com.streamsets.pipeline.api.ListBeanModel;
import com.streamsets.pipeline.api.ProtoConfigurableEntity;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.ValueChooserModel;
import com.streamsets.pipeline.common.DataFormatConstants;
import com.streamsets.pipeline.config.AvroSchemaLookupMode;
import com.streamsets.pipeline.config.CharsetChooserValues;
import com.streamsets.pipeline.config.Compression;
import com.streamsets.pipeline.config.CompressionChooserValues;
import com.streamsets.pipeline.config.CsvHeader;
import com.streamsets.pipeline.config.CsvHeaderChooserValues;
import com.streamsets.pipeline.config.CsvMode;
import com.streamsets.pipeline.config.CsvModeChooserValues;
import com.streamsets.pipeline.config.CsvRecordType;
import com.streamsets.pipeline.config.CsvRecordTypeChooserValues;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.config.DatagramMode;
import com.streamsets.pipeline.config.DatagramModeChooserValues;
import com.streamsets.pipeline.config.ExcelHeader;
import com.streamsets.pipeline.config.ExcelHeaderChooserValues;
import com.streamsets.pipeline.config.JsonMode;
import com.streamsets.pipeline.config.JsonModeChooserValues;
import com.streamsets.pipeline.config.LogMode;
import com.streamsets.pipeline.config.LogModeChooserValues;
import com.streamsets.pipeline.config.OnParseError;
import com.streamsets.pipeline.config.OnParseErrorChooserValues;
import com.streamsets.pipeline.config.OriginAvroSchemaLookupModeChooserValues;
import com.streamsets.pipeline.config.OriginAvroSchemaSource;
import com.streamsets.pipeline.config.OriginAvroSchemaSourceChooserValues;
import com.streamsets.pipeline.lib.el.DataUnitsEL;
import com.streamsets.pipeline.lib.el.RecordEL;
import com.streamsets.pipeline.lib.el.TimeEL;
import com.streamsets.pipeline.lib.parser.DataParserFactory;
import com.streamsets.pipeline.lib.parser.DataParserFactoryBuilder;
import com.streamsets.pipeline.lib.parser.DataParserFormat;
import com.streamsets.pipeline.lib.parser.log.LogDataFormatValidator;
import com.streamsets.pipeline.lib.parser.log.LogDataParserFactory;
import com.streamsets.pipeline.lib.parser.log.RegExConfig;
import com.streamsets.pipeline.lib.parser.net.netflow.NetflowDataParserFactory;
import com.streamsets.pipeline.lib.parser.net.netflow.OutputValuesMode;
import com.streamsets.pipeline.lib.parser.net.netflow.OutputValuesModeChooserValues;
import com.streamsets.pipeline.lib.parser.text.TextDataParserFactory;
import com.streamsets.pipeline.lib.parser.udp.DatagramParserFactory;
import com.streamsets.pipeline.lib.parser.xml.XmlDataParserFactory;
import com.streamsets.pipeline.lib.util.DelimitedDataConstants;
import com.streamsets.pipeline.lib.util.ProtobufConstants;
import com.streamsets.pipeline.lib.xml.Constants;
import com.streamsets.pipeline.lib.xml.xpath.XPathValidatorUtil;
import com.streamsets.pipeline.stage.common.DataFormatConfig;
import com.streamsets.pipeline.stage.common.DataFormatErrors;
import com.streamsets.pipeline.stage.common.DataFormatGroups;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.streamsets.pipeline.lib.util.AvroSchemaHelper.*;
import static com.streamsets.pipeline.stage.common.DataFormatErrors.DATA_FORMAT_11;
/**
* Instances of this object must be called 'dataFormatConfig' exactly for error
* messages to be placed in the correct location on the UI.
*/
public class DataParserFormatConfig implements DataFormatConfig {
private static final Logger LOG = LoggerFactory.getLogger(DataParserFormat.class);
private static final String DEFAULT_REGEX =
"^(\\S+) (\\S+) (\\S+) \\[([\\w:/]+\\s[+\\-]\\d{4})\\] \"(\\S+) (\\S+) (\\S+)\" (\\d{3}) (\\d+)";
private static final String DEFAULT_APACHE_CUSTOM_LOG_FORMAT = "%h %l %u %t \"%r\" %>s %b";
private static final String DEFAULT_GROK_PATTERN = "%{COMMONAPACHELOG}";
private static final String DEFAULT_LOG4J_CUSTOM_FORMAT = "%r [%t] %-5p %c %x - %m%n";
private LogDataFormatValidator logDataFormatValidator;
private DataParserFactory parserFactory;
/* Compression always shown immediately after Data Format */
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Compression Format",
description = "Compression formats gzip, bzip2, xz, lzma, Pack200, DEFLATE and Z are supported. " +
"Archive formats 7z, ar, arj, cpio, dump, tar and zip are supported.",
defaultValue = "NONE",
dependsOn = "dataFormat^",
// Show for all except Avro, Datagram, Whole File
triggeredByValue = {"TEXT", "JSON", "DELIMITED", "XML", "SDC_JSON", "LOG", "BINARY", "PROTOBUF"},
displayPosition = 2,
group = "DATA_FORMAT"
)
@ValueChooserModel(CompressionChooserValues.class)
public Compression compression = Compression.NONE;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "File Name Pattern within Compressed Directory",
description = "A glob pattern that defines the pattern of the file names within the compressed " +
"directory.",
defaultValue = "*",
displayPosition = 3,
group = "DATA_FORMAT",
dependsOn = "compression",
triggeredByValue = {"ARCHIVE", "COMPRESSED_ARCHIVE"}
)
public String filePatternInArchive = "*";
/* Charset Related -- Shown last */
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "UTF-8",
label = "Charset",
displayPosition = 999,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = {"TEXT", "JSON", "DELIMITED", "XML", "LOG", "DATAGRAM"}
)
@ValueChooserModel(CharsetChooserValues.class)
public String charset = "UTF-8";
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Ignore Control Characters",
description = "Use only if required as it impacts reading performance",
dependsOn = "dataFormat^",
triggeredByValue = {"TEXT", "JSON", "DELIMITED", "XML", "LOG", "DATAGRAM"},
displayPosition = 1000,
group = "DATA_FORMAT"
)
public boolean removeCtrlChars = false;
/* End Charset Related */
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "1024",
label = "Max Line Length",
description = "Longer lines are truncated",
displayPosition = 340,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "TEXT",
min = 1,
max = Integer.MAX_VALUE
)
public int textMaxLineLen = 1024;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Use Custom Delimiter",
description = "Use custom delimiters to create records",
displayPosition = 342,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "TEXT"
)
public boolean useCustomDelimiter = TextDataParserFactory.USE_CUSTOM_DELIMITER_DEFAULT;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = TextDataParserFactory.CUSTOM_DELIMITER_DEFAULT,
label = "Custom Delimiter",
description = "One or more characters. Leading and trailing spaces are stripped.",
displayPosition = 344,
group = "DATA_FORMAT",
dependsOn = "useCustomDelimiter",
triggeredByValue = "true"
)
public String customDelimiter = TextDataParserFactory.CUSTOM_DELIMITER_DEFAULT;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "\r\n",
label = "Include Custom Delimiter",
description = "Include custom delimiters in the data",
displayPosition = 346,
group = "DATA_FORMAT",
dependsOn = "useCustomDelimiter",
triggeredByValue = "true"
)
public boolean includeCustomDelimiterInTheText = TextDataParserFactory.INCLUDE_CUSTOM_DELIMITER_IN_TEXT_DEFAULT;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "MULTIPLE_OBJECTS",
label = "JSON Content",
description = "",
displayPosition = 350,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "JSON"
)
@ValueChooserModel(JsonModeChooserValues.class)
public JsonMode jsonContent = JsonMode.MULTIPLE_OBJECTS;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "4096",
label = "Max Object Length (chars)",
description = "Larger objects are not processed",
displayPosition = 360,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "JSON",
min = 1,
max = Integer.MAX_VALUE
)
public int jsonMaxObjectLen = 4096;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "CSV",
label = "Delimiter Format Type",
description = "",
displayPosition = 370,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "DELIMITED"
)
@ValueChooserModel(CsvModeChooserValues.class)
public CsvMode csvFileFormat = CsvMode.CSV;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "NO_HEADER",
label = "Header Line",
description = "",
displayPosition = 380,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "DELIMITED"
)
@ValueChooserModel(CsvHeaderChooserValues.class)
public CsvHeader csvHeader = CsvHeader.NO_HEADER;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Allow Extra Columns",
description = "When false, rows with more columns than the header are sent to error.",
displayPosition = 385,
group = "DATA_FORMAT",
dependencies = {
@Dependency(configName = "dataFormat^", triggeredByValues = "DELIMITED"),
@Dependency(configName = "csvHeader", triggeredByValues = "WITH_HEADER")
}
)
public boolean csvAllowExtraColumns = false;
@ConfigDef(
required = false,
type = ConfigDef.Type.STRING,
defaultValue = DelimitedDataConstants.DEFAULT_EXTRA_COLUMN_PREFIX,
label = "Extra Column Prefix",
description = "Each extra column is labeled with this prefix followed by an integer",
displayPosition = 386,
group = "DATA_FORMAT",
dependencies = {
@Dependency(configName = "dataFormat^", triggeredByValues = "DELIMITED"),
@Dependency(configName = "csvAllowExtraColumns", triggeredByValues = "true")
}
)
public String csvExtraColumnPrefix = DelimitedDataConstants.DEFAULT_EXTRA_COLUMN_PREFIX;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "1024",
label = "Max Record Length (chars)",
description = "Larger objects are not processed",
displayPosition = 390,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "DELIMITED",
min = 1,
max = Integer.MAX_VALUE
)
public int csvMaxObjectLen = 1024;
@ConfigDef(
required = false,
type = ConfigDef.Type.CHARACTER,
defaultValue = "|",
label = "Delimiter Character",
displayPosition = 400,
group = "DATA_FORMAT",
dependsOn = "csvFileFormat",
triggeredByValue = "CUSTOM"
)
public char csvCustomDelimiter = '|';
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = DelimitedDataConstants.DEFAULT_MULTI_CHARACTER_FIELD_DELIMITER,
label = "Multi Character Field Delimiter",
description = "Delimiter between fields in multi-character delimited mode.",
displayPosition = 405,
group = "DATA_FORMAT",
dependsOn = "csvFileFormat",
triggeredByValue = "MULTI_CHARACTER"
)
public String multiCharacterFieldDelimiter = DelimitedDataConstants.DEFAULT_MULTI_CHARACTER_FIELD_DELIMITER;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = DelimitedDataConstants.DEFAULT_MULTI_CHARACTER_LINE_DELIMITER_EL,
label = "Multi Character Line Delimiter",
description = "Delimiter between lines (i.e. different records) in multi-character delimited mode.",
displayPosition = 406,
group = "DATA_FORMAT",
dependsOn = "csvFileFormat",
triggeredByValue = "MULTI_CHARACTER"
)
public String multiCharacterLineDelimiter = String.format(
"${str:unescapeJava('%s')}",
DelimitedDataConstants.DEFAULT_MULTI_CHARACTER_LINE_DELIMITER_EL
);
@ConfigDef(
required = false,
type = ConfigDef.Type.CHARACTER,
defaultValue = "\\",
label = "Escape Character",
displayPosition = 410,
group = "DATA_FORMAT",
dependsOn = "csvFileFormat",
triggeredByValue = {"CUSTOM", "MULTI_CHARACTER"}
)
public char csvCustomEscape = '\\';
@ConfigDef(
required = false,
type = ConfigDef.Type.CHARACTER,
defaultValue = "\"",
label = "Quote Character",
displayPosition = 420,
group = "DATA_FORMAT",
dependsOn = "csvFileFormat",
triggeredByValue = {"CUSTOM", "MULTI_CHARACTER"}
)
public char csvCustomQuote = '\"';
@ConfigDef(
required = false,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Enable comments",
displayPosition = 425,
group = "DATA_FORMAT",
dependsOn = "csvFileFormat",
triggeredByValue = "CUSTOM"
)
public boolean csvEnableComments = false;
@ConfigDef(
required = true,
type = ConfigDef.Type.CHARACTER,
defaultValue = "#",
label = "Comment marker",
displayPosition = 426,
group = "DATA_FORMAT",
dependsOn = "csvEnableComments",
triggeredByValue = "true"
)
public char csvCommentMarker;
@ConfigDef(
required = false,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "true",
label = "Ignore empty lines",
displayPosition = 427,
group = "DATA_FORMAT",
dependencies = {
@Dependency(configName = "csvFileFormat", triggeredByValues = {"CUSTOM"})
}
)
public boolean csvIgnoreEmptyLines = true;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "LIST_MAP",
label = "Root Field Type",
description = "",
displayPosition = 430,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "DELIMITED"
)
@ValueChooserModel(CsvRecordTypeChooserValues.class)
public CsvRecordType csvRecordType = CsvRecordType.LIST_MAP;
@ConfigDef(
required = false,
type = ConfigDef.Type.NUMBER,
defaultValue = "0",
label = "Lines to Skip",
description = "Number of lines to skip before reading",
displayPosition = 435,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "DELIMITED",
min = 0
)
public int csvSkipStartLines;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Parse NULLs",
description = "When checked, configured string constant will be converted into NULL field.",
displayPosition = 436,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "DELIMITED"
)
public boolean parseNull;
@ConfigDef(
required = false,
type = ConfigDef.Type.STRING,
defaultValue = "\\\\N",
label = "NULL constant",
description = "String constant that should be converted to a NULL rather then passed as it is.",
displayPosition = 437,
group = "DATA_FORMAT",
dependsOn = "parseNull",
triggeredByValue = "true"
)
public String nullConstant;
@ConfigDef(
required = false,
type = ConfigDef.Type.STRING,
label = "Delimiter Element",
defaultValue = "",
description = Constants.XML_RECORD_ELEMENT_DESCRIPTION,
displayPosition = 440,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "XML"
)
public String xmlRecordElement = "";
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
label = "Include Field XPaths",
defaultValue = ""+XmlDataParserFactory.INCLUDE_FIELD_XPATH_ATTRIBUTES_DEFAULT,
description = Constants.INCLUDE_FIELD_XPATH_ATTRIBUTES_DESCRIPTION,
displayPosition = 442,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "XML"
)
public boolean includeFieldXpathAttributes = XmlDataParserFactory.INCLUDE_FIELD_XPATH_ATTRIBUTES_DEFAULT;
@ConfigDef(
required = false,
type = ConfigDef.Type.MAP,
label = "Namespaces",
description = Constants.XPATH_NAMESPACE_CONTEXT_DESCRIPTION,
defaultValue = "{}",
displayPosition = 445,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "XML"
)
public Map<String, String> xPathNamespaceContext = new HashMap<>();
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
label = "Output Field Attributes",
description = Constants.OUTPUT_FIELD_ATTRIBUTES_DESCRIPTION,
defaultValue = ""+XmlDataParserFactory.USE_FIELD_ATTRIBUTES_DEFAULT,
displayPosition = 448,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "XML"
)
public boolean outputFieldAttributes = XmlDataParserFactory.USE_FIELD_ATTRIBUTES_DEFAULT;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "4096",
label = "Max Record Length (chars)",
description = "Larger records are not processed",
displayPosition = 450,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "XML",
min = 1,
max = Integer.MAX_VALUE
)
public int xmlMaxObjectLen = 4096;
// LOG Configuration
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "COMMON_LOG_FORMAT",
label = "Log Format",
description = "",
displayPosition = 460,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "LOG"
)
@ValueChooserModel(LogModeChooserValues.class)
public LogMode logMode = LogMode.COMMON_LOG_FORMAT;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "1024",
label = "Max Line Length",
description = "Longer lines are truncated",
displayPosition = 470,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "LOG",
min = 1,
max = Integer.MAX_VALUE
)
public int logMaxObjectLen = 1024;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Retain Original Line",
description = "Indicates if the original line of log should be retained in the record",
displayPosition = 480,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "LOG"
)
public boolean retainOriginalLine = false;
//APACHE_CUSTOM_LOG_FORMAT
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = DEFAULT_APACHE_CUSTOM_LOG_FORMAT,
label = "Custom Log Format",
description = "",
displayPosition = 490,
group = "DATA_FORMAT",
dependsOn = "logMode",
triggeredByValue = "APACHE_CUSTOM_LOG_FORMAT"
)
public String customLogFormat = DEFAULT_APACHE_CUSTOM_LOG_FORMAT;
//REGEX
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = DEFAULT_REGEX,
label = "Regular Expression",
description = "The regular expression which is used to parse the log line.",
displayPosition = 500,
group = "DATA_FORMAT",
dependsOn = "logMode",
triggeredByValue = "REGEX"
)
public String regex = DEFAULT_REGEX;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "",
label = "Field Path To RegEx Group Mapping",
description = "Map groups in the regular expression to field paths",
displayPosition = 510,
group = "DATA_FORMAT",
dependsOn = "logMode",
triggeredByValue = "REGEX"
)
@ListBeanModel
public List<RegExConfig> fieldPathsToGroupName = new ArrayList<>();
//GROK
@ConfigDef(
required = false,
type = ConfigDef.Type.TEXT,
defaultValue = "",
label = "Grok Pattern Definition",
description = "Define your own grok patterns which will be used to parse the logs",
displayPosition = 520,
group = "DATA_FORMAT",
dependsOn = "logMode",
triggeredByValue = "GROK",
mode = ConfigDef.Mode.PLAIN_TEXT
)
public String grokPatternDefinition = "";
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = DEFAULT_GROK_PATTERN,
label = "Grok Pattern",
description = "The grok pattern which is used to parse the log line",
displayPosition = 530,
group = "DATA_FORMAT",
dependsOn = "logMode",
triggeredByValue = "GROK"
)
public String grokPattern = DEFAULT_GROK_PATTERN;
//LOG4J
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "ERROR",
label = "On Parse Error",
description = "",
displayPosition = 540,
group = "DATA_FORMAT",
dependsOn = "logMode",
triggeredByValue = "LOG4J"
)
@ValueChooserModel(OnParseErrorChooserValues.class)
public OnParseError onParseError = OnParseError.ERROR;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "50",
label = "Trim Stack Trace to Length",
description = "Any line that does not match the expected pattern will be treated as a Stack trace as long as it " +
"is part of the same message. The stack trace will be trimmed to the specified number of lines.",
displayPosition = 550,
group = "DATA_FORMAT",
dependsOn = "onParseError",
triggeredByValue = "INCLUDE_AS_STACK_TRACE",
min = 0,
max = Integer.MAX_VALUE
)
public int maxStackTraceLines = 50;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Use Custom Log Format",
description = "",
displayPosition = 560,
group = "DATA_FORMAT",
dependsOn = "logMode",
triggeredByValue = "LOG4J"
)
public boolean enableLog4jCustomLogFormat = false;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = DEFAULT_LOG4J_CUSTOM_FORMAT,
label = "Custom Log4J Format",
description = "Specify your own custom log4j format.",
displayPosition = 570,
group = "DATA_FORMAT",
dependsOn = "enableLog4jCustomLogFormat",
triggeredByValue = "true"
)
public String log4jCustomLogFormat = DEFAULT_LOG4J_CUSTOM_FORMAT;
//AVRO
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Avro Schema Location",
description = "Where to load the Avro Schema from.",
displayPosition = 400,
dependsOn = "dataFormat^",
triggeredByValue = "AVRO",
group = "DATA_FORMAT"
)
@ValueChooserModel(OriginAvroSchemaSourceChooserValues.class)
public OriginAvroSchemaSource avroSchemaSource;
@ConfigDef(
required = true,
type = ConfigDef.Type.TEXT,
label = "Avro Schema",
description = "Overrides the schema included in the data (if any). Optionally use the runtime:loadResource " +
"function to use a schema stored in a file.",
displayPosition = 410,
group = "DATA_FORMAT",
dependencies = {
@Dependency(configName = "dataFormat^", triggeredByValues = "AVRO"),
@Dependency(configName = "avroSchemaSource", triggeredByValues = "INLINE")
},
mode = ConfigDef.Mode.JSON
)
public String avroSchema;
@ConfigDef(
required = true,
type = ConfigDef.Type.LIST,
label = "Schema Registry URLs",
description = "List of Confluent Schema Registry URLs",
dependencies = {
@Dependency(configName = "dataFormat^", triggeredByValues = "AVRO"),
@Dependency(configName = "avroSchemaSource", triggeredByValues = "REGISTRY")
},
displayPosition = 420,
group = "DATA_FORMAT"
)
public List<String> schemaRegistryUrls = new ArrayList<>();
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Lookup Schema By",
description = "Whether to look up the Avro Schema by ID or fetch the latest schema for a Subject.",
defaultValue = "SUBJECT",
dependsOn = "avroSchemaSource",
triggeredByValue = "REGISTRY",
displayPosition = 430,
group = "DATA_FORMAT"
)
@ValueChooserModel(OriginAvroSchemaLookupModeChooserValues.class)
public AvroSchemaLookupMode schemaLookupMode = AvroSchemaLookupMode.AUTO;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Schema Subject",
dependencies = {
@Dependency(configName = "dataFormat^", triggeredByValues = "AVRO"),
@Dependency(configName = "avroSchemaSource", triggeredByValues = "REGISTRY"),
@Dependency(configName = "schemaLookupMode", triggeredByValues = "SUBJECT"),
},
displayPosition = 440,
group = "DATA_FORMAT"
)
public String subject;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "Schema ID",
min = 1,
dependencies = {
@Dependency(configName = "dataFormat^", triggeredByValues = "AVRO"),
@Dependency(configName = "avroSchemaSource", triggeredByValues = "REGISTRY"),
@Dependency(configName = "schemaLookupMode", triggeredByValues = "ID"),
},
displayPosition = 450,
group = "DATA_FORMAT"
)
public int schemaId;
// PROTOBUF
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = "",
label = "Protobuf Descriptor File",
description = "Protobuf Descriptor File (.desc) path relative to SDC resources directory",
displayPosition = 600,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "PROTOBUF"
)
public String protoDescriptorFile = "";
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = "",
description = "Fully Qualified Message Type name. Use format <packageName>.<messageTypeName>",
label = "Message Type",
displayPosition = 610,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "PROTOBUF"
)
public String messageType = "";
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "true",
label = "Delimited Messages",
description = "Should be checked when the input data is prepended with the message size. When unchecked " +
"only a single message can be present in the source file/Kafka message, etc.",
displayPosition = 620,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "PROTOBUF"
)
public boolean isDelimited = true;
// BINARY
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "1024",
label = "Max Data Size (bytes)",
description = "Larger objects are not processed",
displayPosition = 700,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "BINARY",
min = 1,
max = Integer.MAX_VALUE
)
public int binaryMaxObjectLen;
// DATAGRAM
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Datagram Packet Format",
defaultValue = "SYSLOG",
group = "DATA_FORMAT",
displayPosition = 800,
dependsOn = "dataFormat^",
triggeredByValue = "DATAGRAM"
)
@ValueChooserModel(DatagramModeChooserValues.class)
public DatagramMode datagramMode;
@ConfigDef(
required = false,
type = ConfigDef.Type.STRING,
label = "TypesDB File Path",
description = "User-specified TypesDB file. Overrides the included version.",
displayPosition = 820,
group = "DATA_FORMAT",
dependsOn = "datagramMode",
triggeredByValue = "COLLECTD"
)
public String typesDbPath;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Convert Hi-Res Time & Interval",
description = "Converts high resolution time format interval and timestamp to unix time in (ms).",
displayPosition = 830,
group = "DATA_FORMAT",
dependsOn = "datagramMode",
triggeredByValue = "COLLECTD"
)
public boolean convertTime;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "true",
label = "Exclude Interval",
description = "Excludes the interval field from output records.",
displayPosition = 840,
group = "DATA_FORMAT",
dependsOn = "datagramMode",
triggeredByValue = "COLLECTD"
)
public boolean excludeInterval;
@ConfigDef(
required = false,
type = ConfigDef.Type.STRING,
label = "Auth File",
description = "",
displayPosition = 850,
group = "DATA_FORMAT",
dependsOn = "datagramMode",
triggeredByValue = "COLLECTD"
)
public String authFilePath;
// Netflow v9
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = NetflowDataParserFactory.DEFAULT_OUTPUT_VALUES_MODE_STR,
label = NetflowDataParserFactory.OUTPUT_VALUES_MODE_LABEL,
description = NetflowDataParserFactory.OUTPUT_VALUES_MODE_TOOLTIP,
displayPosition = 870,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "NETFLOW"
)
@ValueChooserModel(OutputValuesModeChooserValues.class)
public OutputValuesMode netflowOutputValuesMode = NetflowDataParserFactory.DEFAULT_OUTPUT_VALUES_MODE;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = NetflowDataParserFactory.DEFAULT_MAX_TEMPLATE_CACHE_SIZE_STR,
label = NetflowDataParserFactory.MAX_TEMPLATE_CACHE_SIZE_LABEL,
description = NetflowDataParserFactory.MAX_TEMPLATE_CACHE_SIZE_TOOLTIP,
displayPosition = 880,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "NETFLOW"
)
public int maxTemplateCacheSize = NetflowDataParserFactory.DEFAULT_MAX_TEMPLATE_CACHE_SIZE;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = NetflowDataParserFactory.DEFAULT_TEMPLATE_CACHE_TIMEOUT_MS_STR,
label = NetflowDataParserFactory.TEMPLATE_CACHE_TIMEOUT_MS_LABEL,
description = NetflowDataParserFactory.TEMPLATE_CACHE_TIMEOUT_MS_TOOLTIP,
displayPosition = 890,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "NETFLOW"
)
public int templateCacheTimeoutMs = NetflowDataParserFactory.DEFAULT_TEMPLATE_CACHE_TIMEOUT_MS;
// within Datagram packet (since we don't allow logical OR dependencies)
// TODO: remove duplicate fields once API-149 is implemented
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = NetflowDataParserFactory.DEFAULT_OUTPUT_VALUES_MODE_STR,
label = NetflowDataParserFactory.OUTPUT_VALUES_MODE_LABEL,
description = NetflowDataParserFactory.OUTPUT_VALUES_MODE_TOOLTIP,
displayPosition = 870,
group = "DATA_FORMAT",
dependsOn = "datagramMode",
triggeredByValue = "NETFLOW"
)
@ValueChooserModel(OutputValuesModeChooserValues.class)
public OutputValuesMode netflowOutputValuesModeDatagram = NetflowDataParserFactory.DEFAULT_OUTPUT_VALUES_MODE;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = NetflowDataParserFactory.DEFAULT_MAX_TEMPLATE_CACHE_SIZE_STR,
label = NetflowDataParserFactory.MAX_TEMPLATE_CACHE_SIZE_LABEL,
description = NetflowDataParserFactory.MAX_TEMPLATE_CACHE_SIZE_TOOLTIP,
displayPosition = 880,
group = "DATA_FORMAT",
dependsOn = "datagramMode",
triggeredByValue = "NETFLOW"
)
public int maxTemplateCacheSizeDatagram = NetflowDataParserFactory.DEFAULT_MAX_TEMPLATE_CACHE_SIZE;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = NetflowDataParserFactory.DEFAULT_TEMPLATE_CACHE_TIMEOUT_MS_STR,
label = NetflowDataParserFactory.TEMPLATE_CACHE_TIMEOUT_MS_LABEL,
description = NetflowDataParserFactory.TEMPLATE_CACHE_TIMEOUT_MS_TOOLTIP,
displayPosition = 890,
group = "DATA_FORMAT",
dependsOn = "datagramMode",
triggeredByValue = "NETFLOW"
)
public int templateCacheTimeoutMsDatagram = NetflowDataParserFactory.DEFAULT_TEMPLATE_CACHE_TIMEOUT_MS;
//Whole File
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "8192",
label = "Buffer Size (bytes)",
description = "Size of the Buffer used to copy the file.",
displayPosition = 900,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "WHOLE_FILE",
min = 1,
max = Integer.MAX_VALUE
)
//Optimal 8KB
public int wholeFileMaxObjectLen = 8 * 1024;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
defaultValue = "-1",
label = "Rate per second",
description = "Rate / sec to manipulate bandwidth requirements for File Transfer." +
" Use <= 0 to opt out. Default unit is B/sec",
displayPosition = 920,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "WHOLE_FILE",
elDefs = {DataUnitsEL.class},
evaluation = ConfigDef.Evaluation.EXPLICIT
)
public String rateLimit = "-1";
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Verify Checksum",
description = "When checked verifies the checksum of the stream during read.",
displayPosition = 1000,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "WHOLE_FILE"
)
public boolean verifyChecksum = false;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Excel Header Option",
description = "Excel headers",
displayPosition = 1000,
group = "DATA_FORMAT",
dependsOn = "dataFormat^",
triggeredByValue = "EXCEL"
)
@ValueChooserModel(ExcelHeaderChooserValues.class)
public ExcelHeader excelHeader;
// Size of StringBuilder pool maintained by Text and Log Data Parser Factories.
// The default value is 1 for regular origins. Multithreaded origins should override this value as required.
public int stringBuilderPoolSize = DataFormatConstants.STRING_BUILDER_POOL_SIZE;
@Override
public boolean init(
ProtoConfigurableEntity.Context context,
DataFormat dataFormat,
String stageGroup,
String configPrefix,
List<Stage.ConfigIssue> issues
) {
return init(
context,
dataFormat,
stageGroup,
configPrefix,
DataFormatConstants.MAX_OVERRUN_LIMIT,
false,
issues
);
}
public boolean init(
ProtoConfigurableEntity.Context context,
DataFormat dataFormat,
String stageGroup,
String configPrefix,
boolean multiLines,
List<Stage.ConfigIssue> issues
) {
return init(
context,
dataFormat,
stageGroup,
configPrefix,
DataFormatConstants.MAX_OVERRUN_LIMIT,
multiLines,
issues
);
}
public boolean init(
ProtoConfigurableEntity.Context context,
DataFormat dataFormat,
String stageGroup,
String configPrefix,
int overrunLimit,
List<Stage.ConfigIssue> issues
) {
return init(
context,
dataFormat,
stageGroup,
configPrefix,
overrunLimit,
false,
issues
);
}
public boolean init(
ProtoConfigurableEntity.Context context,
DataFormat dataFormat,
String stageGroup,
String configPrefix,
int overrunLimit,
boolean multiLines,
List<Stage.ConfigIssue> issues
) {
boolean valid = true;
if (dataFormat == null) {
issues.add(context.createConfigIssue(
stageGroup,
configPrefix + "dataFormat",
DataFormatErrors.DATA_FORMAT_12,
dataFormat
));
return false;
}
switch (dataFormat) {
case JSON:
valid = validateJson(context, configPrefix, issues);
break;
case TEXT:
valid = validateText(context, configPrefix, issues);
break;
case DELIMITED:
valid = validateDelimited(context, configPrefix, issues);
break;
case XML:
valid = validateXml(context, configPrefix, issues);
break;
case LOG:
validateLogFormat(context, configPrefix, issues);
break;
case PROTOBUF:
validateProtobuf(context, configPrefix, issues);
break;
case DATAGRAM:
if (datagramMode == DatagramMode.COLLECTD) {
checkCollectdParserConfigs(context, configPrefix, issues);
} else if (datagramMode == DatagramMode.NETFLOW) {
NetflowDataParserFactory.validateConfigs(
context,
issues,
stageGroup,
configPrefix + ".",
maxTemplateCacheSizeDatagram,
templateCacheTimeoutMsDatagram,
"maxTemplateCacheSizeDatagram",
"templateCacheTimeoutMsDatagram"
);
}
break;
case WHOLE_FILE:
valid = validateWholeFile(context, configPrefix, issues);
break;
case NETFLOW:
NetflowDataParserFactory.validateConfigs(
context,
issues,
stageGroup,
configPrefix + ".",
maxTemplateCacheSize,
templateCacheTimeoutMs
);
break;
case EXCEL:
valid = validateWorkbook(context, configPrefix, issues);
break;
case SDC_JSON:
case BINARY:
case AVRO:
case SYSLOG:
// nothing to validate for these formats
break;
default:
issues.add(context.createConfigIssue(
stageGroup,
configPrefix + "dataFormat",
DataFormatErrors.DATA_FORMAT_04,
dataFormat
));
valid = false;
break;
}
valid &= validateDataParser(
context,
dataFormat,
stageGroup,
configPrefix,
overrunLimit,
multiLines,
issues
);
return valid;
}
private boolean validateWorkbook(ProtoConfigurableEntity.Context context, String configPrefix, List<Stage.ConfigIssue> issues) {
boolean valid = true;
if (excelHeader == null) {
valid = false;
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "excelHeader",
DataFormatErrors.DATA_FORMAT_200
)
);
}
return valid;
}
private boolean validateJson(ProtoConfigurableEntity.Context context, String configPrefix, List<Stage.ConfigIssue> issues) {
boolean valid = true;
if (jsonMaxObjectLen < 1) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "jsonMaxObjectLen",
DataFormatErrors.DATA_FORMAT_01
)
);
valid = false;
}
return valid;
}
private boolean validateText(ProtoConfigurableEntity.Context context, String configPrefix, List<Stage.ConfigIssue> issues) {
boolean valid = true;
if (textMaxLineLen < 1) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "textMaxLineLen",
DataFormatErrors.DATA_FORMAT_01
)
);
valid = false;
}
if (useCustomDelimiter && customDelimiter.isEmpty()) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "customDelimiter",
DataFormatErrors.DATA_FORMAT_200
)
);
valid = false;
}
return valid;
}
private boolean validateDelimited(ProtoConfigurableEntity.Context context, String configPrefix, List<Stage.ConfigIssue> issues) {
boolean valid = true;
if (csvMaxObjectLen < 1) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "csvMaxObjectLen",
DataFormatErrors.DATA_FORMAT_01
)
);
valid = false;
}
return valid;
}
private boolean validateXml(ProtoConfigurableEntity.Context context, String configPrefix, List<Stage.ConfigIssue> issues) {
boolean valid = true;
if (xmlMaxObjectLen < 1) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "xmlMaxObjectLen",
DataFormatErrors.DATA_FORMAT_01
)
);
valid = false;
}
if (xmlRecordElement != null && !xmlRecordElement.isEmpty()) {
String invalidXPathError = XPathValidatorUtil.getXPathValidationError(xmlRecordElement);
if (!Strings.isNullOrEmpty(invalidXPathError)) {
issues.add(
context.createConfigIssue(DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "xmlRecordElement",
DataFormatErrors.DATA_FORMAT_03,
xmlRecordElement,
invalidXPathError
)
);
valid = false;
}
}
return valid;
}
private boolean validateWholeFile(
ProtoConfigurableEntity.Context context,
String configPrefix,
List<Stage.ConfigIssue> issues
) {
boolean valid = true;
if (wholeFileMaxObjectLen < 1) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "wholeFileMaxObjectLen",
DataFormatErrors.DATA_FORMAT_01
)
);
valid = false;
}
return valid;
}
private void validateProtobuf(
ProtoConfigurableEntity.Context context,
String configPrefix,
List<Stage.ConfigIssue> issues
) {
if (protoDescriptorFile == null || protoDescriptorFile.isEmpty()) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "protoDescriptorFile",
DataFormatErrors.DATA_FORMAT_07
)
);
} else {
File file = new File(context.getResourcesDirectory(), protoDescriptorFile);
if (!file.exists()) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "protoDescriptorFile",
DataFormatErrors.DATA_FORMAT_09,
file.getAbsolutePath()
)
);
}
if (messageType == null || messageType.isEmpty()) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "messageType",
DataFormatErrors.DATA_FORMAT_08
)
);
}
}
}
private void validateLogFormat(
ProtoConfigurableEntity.Context context,
String configPrefix,
List<Stage.ConfigIssue> issues
) {
logDataFormatValidator = new LogDataFormatValidator(
logMode,
logMaxObjectLen,
retainOriginalLine,
customLogFormat,
regex,
grokPatternDefinition,
grokPattern,
enableLog4jCustomLogFormat,
log4jCustomLogFormat,
onParseError,
maxStackTraceLines,
DataFormatGroups.DATA_FORMAT.name(),
getFieldPathToGroupMap(fieldPathsToGroupName)
);
logDataFormatValidator.validateLogFormatConfig(context, configPrefix, issues);
}
void checkCollectdParserConfigs(
ProtoConfigurableEntity.Context context,
String configPrefix,
List<Stage.ConfigIssue> issues
) {
if (null != typesDbPath && !typesDbPath.isEmpty()) {
File typesDbFile = new File(typesDbPath);
if (!typesDbFile.canRead() || !typesDbFile.isFile()) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "typesDbPath",
DataFormatErrors.DATA_FORMAT_400, typesDbPath
)
);
}
}
if (null != authFilePath && !authFilePath.isEmpty()) {
File authFile = new File(authFilePath);
if (!authFile.canRead() || !authFile.isFile()) {
issues.add(
context.createConfigIssue(
DataFormatGroups.DATA_FORMAT.name(),
configPrefix + "authFilePath",
DataFormatErrors.DATA_FORMAT_401, authFilePath
)
);
}
}
}
private boolean validateDataParser(
ProtoConfigurableEntity.Context context,
DataFormat dataFormat,
String stageGroup,
String configPrefix,
int overrunLimit,
boolean multiLines,
List<Stage.ConfigIssue> issues
) {
boolean valid = true;
DataParserFactoryBuilder builder = new DataParserFactoryBuilder(context, dataFormat.getParserFormat());
Charset fileCharset;
try {
fileCharset = Charset.forName(charset);
} catch (UnsupportedCharsetException ignored) { // NOSONAR
// setting it to a valid one so the parser factory can be configured and tested for more errors
fileCharset = StandardCharsets.UTF_8;
issues.add(
context.createConfigIssue(
stageGroup,
configPrefix + "charset",
DataFormatErrors.DATA_FORMAT_05,
charset
)
);
valid = false;
}
builder.setCharset(fileCharset);
builder.setOverRunLimit(overrunLimit);
builder.setRemoveCtrlChars(removeCtrlChars);
builder.setCompression(compression);
builder.setFilePatternInArchive(filePatternInArchive);
switch (dataFormat) {
case TEXT:
buildTextParser(builder, multiLines);
break;
case JSON:
builder.setMaxDataLen(jsonMaxObjectLen).setMode(jsonContent);
break;
case DELIMITED:
buildDelimitedParser(builder);
break;
case XML:
builder.setMaxDataLen(xmlMaxObjectLen).setConfig(XmlDataParserFactory.RECORD_ELEMENT_KEY, xmlRecordElement)
.setConfig(XmlDataParserFactory.INCLUDE_FIELD_XPATH_ATTRIBUTES_KEY, includeFieldXpathAttributes)
.setConfig(XmlDataParserFactory.RECORD_ELEMENT_XPATH_NAMESPACES_KEY, xPathNamespaceContext)
.setConfig(XmlDataParserFactory.USE_FIELD_ATTRIBUTES, outputFieldAttributes);
break;
case SDC_JSON:
builder.setMaxDataLen(-1);
break;
case BINARY:
builder.setMaxDataLen(binaryMaxObjectLen);
break;
case LOG:
buildLogParser(builder, multiLines);
break;
case AVRO:
buildAvroParser(builder);
break;
case PROTOBUF:
buildProtobufParser(builder);
break;
case DATAGRAM:
buildDatagramParser(builder);
break;
case WHOLE_FILE:
builder.setCompression(Compression.NONE);
builder.setMaxDataLen(wholeFileMaxObjectLen);
break;
case SYSLOG:
buildSyslogParser(builder);
break;
case NETFLOW:
buildNetflowParser(builder);
break;
case EXCEL:
buildWorkbookParser(builder);
break;
default:
throw new IllegalStateException("Unexpected data format" + dataFormat);
}
try {
parserFactory = builder.build();
} catch (Exception ex) {
LOG.error("Can't create parserFactory", ex);
issues.add(context.createConfigIssue(null, null, DataFormatErrors.DATA_FORMAT_06, ex.toString(), ex));
valid = false;
}
return valid;
}
private void buildAvroParser(DataParserFactoryBuilder builder) {
builder
.setMaxDataLen(-1)
.setConfig(SCHEMA_KEY, avroSchema)
.setConfig(SCHEMA_SOURCE_KEY, avroSchemaSource)
.setConfig(SCHEMA_REPO_URLS_KEY, schemaRegistryUrls);
if (schemaLookupMode == AvroSchemaLookupMode.SUBJECT) {
// Subject used for looking up schema
builder.setConfig(SUBJECT_KEY, subject);
} else {
// Schema ID used for looking up schema
builder.setConfig(SCHEMA_ID_KEY, schemaId);
}
}
private void buildDelimitedParser(DataParserFactoryBuilder builder) {
builder
.setMaxDataLen(csvMaxObjectLen)
.setMode(csvFileFormat).setMode(csvHeader)
.setMode(csvRecordType)
.setConfig(DelimitedDataConstants.SKIP_START_LINES, csvSkipStartLines)
.setConfig(DelimitedDataConstants.DELIMITER_CONFIG, csvCustomDelimiter)
.setConfig(DelimitedDataConstants.ESCAPE_CONFIG, csvCustomEscape)
.setConfig(DelimitedDataConstants.QUOTE_CONFIG, csvCustomQuote)
.setConfig(DelimitedDataConstants.PARSE_NULL, parseNull)
.setConfig(DelimitedDataConstants.NULL_CONSTANT, nullConstant)
.setConfig(DelimitedDataConstants.COMMENT_ALLOWED_CONFIG, csvEnableComments)
.setConfig(DelimitedDataConstants.COMMENT_MARKER_CONFIG, csvCommentMarker)
.setConfig(DelimitedDataConstants.IGNORE_EMPTY_LINES_CONFIG, csvIgnoreEmptyLines)
.setConfig(DelimitedDataConstants.ALLOW_EXTRA_COLUMNS, csvAllowExtraColumns)
.setConfig(DelimitedDataConstants.EXTRA_COLUMN_PREFIX, csvExtraColumnPrefix)
.setConfig(
DelimitedDataConstants.MULTI_CHARACTER_FIELD_DELIMITER_CONFIG,
multiCharacterFieldDelimiter
).setConfig(
DelimitedDataConstants.MULTI_CHARACTER_LINE_DELIMITER_CONFIG,
multiCharacterLineDelimiter
);
}
private void buildProtobufParser(DataParserFactoryBuilder builder) {
builder
.setConfig(ProtobufConstants.PROTO_DESCRIPTOR_FILE_KEY, protoDescriptorFile)
.setConfig(ProtobufConstants.MESSAGE_TYPE_KEY, messageType)
.setConfig(ProtobufConstants.DELIMITED_KEY, isDelimited)
.setMaxDataLen(-1);
}
void buildDatagramParser(DataParserFactoryBuilder builder) {
builder
.setConfig(DatagramParserFactory.CONVERT_TIME_KEY, convertTime)
.setConfig(DatagramParserFactory.EXCLUDE_INTERVAL_KEY, excludeInterval)
.setConfig(DatagramParserFactory.AUTH_FILE_PATH_KEY, authFilePath)
.setConfig(DatagramParserFactory.TYPES_DB_PATH_KEY, typesDbPath)
.setConfig(NetflowDataParserFactory.OUTPUT_VALUES_MODE_KEY, netflowOutputValuesModeDatagram)
.setConfig(NetflowDataParserFactory.MAX_TEMPLATE_CACHE_SIZE_KEY, maxTemplateCacheSizeDatagram)
.setConfig(NetflowDataParserFactory.TEMPLATE_CACHE_TIMEOUT_MS_KEY, templateCacheTimeoutMsDatagram)
.setMode(datagramMode)
.setMaxDataLen(-1);
}
private void buildTextParser(DataParserFactoryBuilder builder, boolean multiLines) {
builder
.setMaxDataLen(textMaxLineLen)
.setStringBuilderPoolSize(stringBuilderPoolSize)
.setConfig(TextDataParserFactory.MULTI_LINE_KEY, multiLines)
.setConfig(TextDataParserFactory.USE_CUSTOM_DELIMITER_KEY, useCustomDelimiter)
.setConfig(TextDataParserFactory.CUSTOM_DELIMITER_KEY, customDelimiter)
.setConfig(TextDataParserFactory.INCLUDE_CUSTOM_DELIMITER_IN_TEXT_KEY, includeCustomDelimiterInTheText);
}
private void buildLogParser(DataParserFactoryBuilder builder, boolean multiLines) {
builder
.setStringBuilderPoolSize(stringBuilderPoolSize)
.setConfig(LogDataParserFactory.MULTI_LINES_KEY, multiLines);
logDataFormatValidator.populateBuilder(builder);
}
private void buildSyslogParser(DataParserFactoryBuilder builder) {
builder
.setMaxDataLen(-1)
.setCharset(Charset.forName(charset));
}
private void buildNetflowParser(DataParserFactoryBuilder builder) {
builder
.setMaxDataLen(-1)
.setConfig(NetflowDataParserFactory.OUTPUT_VALUES_MODE_KEY, netflowOutputValuesMode)
.setConfig(NetflowDataParserFactory.MAX_TEMPLATE_CACHE_SIZE_KEY, maxTemplateCacheSize)
.setConfig(NetflowDataParserFactory.TEMPLATE_CACHE_TIMEOUT_MS_KEY, templateCacheTimeoutMs);
}
private void buildWorkbookParser(DataParserFactoryBuilder builder) {
builder
.setMode(excelHeader)
.setMaxDataLen(-1);
}
/**
* Returns the DataParserFactory instance.
*
* The DataParserFactory instance is not thread safe.
* To improve performance the DataParserFactory instance may share a buffer among the data parser instances that
* it creates.
*
* @return
*/
public DataParserFactory getParserFactory() {
return parserFactory;
}
private static Map<String, Integer> getFieldPathToGroupMap(List<RegExConfig> fieldPathsToGroupName) {
if (fieldPathsToGroupName == null) {
return new HashMap<>();
}
Map<String, Integer> fieldPathToGroup = new HashMap<>();
for (RegExConfig r : fieldPathsToGroupName) {
fieldPathToGroup.put(r.fieldPath, r.group);
}
return fieldPathToGroup;
}
/**
* This is used to make sure AUTO schema lookup mode is not used for
* stages that do not support it (e.g. file based ones).
* This includes HDFS, Directory, S3, SFTP
* @param dataFormat DataFormat selected in the config
* @param configBeanPrefix prefix for the config bean so the validation errors are shown in the correct place
* @param context stage context
* @param issues list of issues to add any validation errors to.
*/
public void checkForInvalidAvroSchemaLookupMode(
DataFormat dataFormat,
String configBeanPrefix,
ProtoConfigurableEntity.Context context,
List<Stage.ConfigIssue> issues
) {
if (dataFormat != DataFormat.AVRO) {
return;
}
if (avroSchemaSource == OriginAvroSchemaSource.REGISTRY &&
schemaLookupMode == AvroSchemaLookupMode.AUTO) {
issues.add(context.createConfigIssue(
"AVRO",
Joiner.on(".").join(configBeanPrefix, "schemaLookupMode"),
DATA_FORMAT_11
));
}
}
}
| |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.mixed;
import static org.junit.Assert.*;
import java.util.List;
import org.junit.Test;
import io.reactivex.*;
import io.reactivex.disposables.Disposables;
import io.reactivex.exceptions.*;
import io.reactivex.functions.*;
import io.reactivex.internal.functions.Functions;
import io.reactivex.observers.TestObserver;
import io.reactivex.plugins.RxJavaPlugins;
import io.reactivex.subjects.*;
public class ObservableSwitchMapCompletableTest {
@Test
public void normal() {
Observable.range(1, 10)
.switchMapCompletable(new Function<Integer, CompletableSource>() {
@Override
public CompletableSource apply(Integer v) throws Exception {
return Completable.complete();
}
})
.test()
.assertResult();
}
@Test
public void mainError() {
Observable.<Integer>error(new TestException())
.switchMapCompletable(new Function<Integer, CompletableSource>() {
@Override
public CompletableSource apply(Integer v) throws Exception {
return Completable.complete();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void innerError() {
PublishSubject<Integer> ps = PublishSubject.create();
CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletable(Functions.justFunction(cs))
.test();
assertTrue(ps.hasObservers());
assertFalse(cs.hasObservers());
ps.onNext(1);
assertTrue(cs.hasObservers());
to.assertEmpty();
cs.onError(new TestException());
to.assertFailure(TestException.class);
assertFalse(ps.hasObservers());
assertFalse(cs.hasObservers());
}
@Test
public void switchOver() {
final CompletableSubject[] css = {
CompletableSubject.create(),
CompletableSubject.create()
};
PublishSubject<Integer> ps = PublishSubject.create();
TestObserver<Void> to = ps.switchMapCompletable(new Function<Integer, CompletableSource>() {
@Override
public CompletableSource apply(Integer v) throws Exception {
return css[v];
}
})
.test();
to.assertEmpty();
ps.onNext(0);
assertTrue(css[0].hasObservers());
ps.onNext(1);
assertFalse(css[0].hasObservers());
assertTrue(css[1].hasObservers());
ps.onComplete();
to.assertEmpty();
assertTrue(css[1].hasObservers());
css[1].onComplete();
to.assertResult();
}
@Test
public void dispose() {
PublishSubject<Integer> ps = PublishSubject.create();
CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletable(Functions.justFunction(cs))
.test();
ps.onNext(1);
assertTrue(ps.hasObservers());
assertTrue(cs.hasObservers());
to.dispose();
assertFalse(ps.hasObservers());
assertFalse(cs.hasObservers());
}
@Test
public void checkDisposed() {
PublishSubject<Integer> ps = PublishSubject.create();
CompletableSubject cs = CompletableSubject.create();
TestHelper.checkDisposed(ps.switchMapCompletable(Functions.justFunction(cs)));
}
@Test
public void checkBadSource() {
TestHelper.checkDoubleOnSubscribeObservableToCompletable(new Function<Observable<Object>, Completable>() {
@Override
public Completable apply(Observable<Object> f) throws Exception {
return f.switchMapCompletable(Functions.justFunction(Completable.never()));
}
});
}
@Test
public void mapperCrash() {
Observable.range(1, 5).switchMapCompletable(new Function<Integer, CompletableSource>() {
@Override
public CompletableSource apply(Integer f) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void mapperCancels() {
final TestObserver<Void> to = new TestObserver<Void>();
Observable.range(1, 5).switchMapCompletable(new Function<Integer, CompletableSource>() {
@Override
public CompletableSource apply(Integer f) throws Exception {
to.cancel();
return Completable.complete();
}
})
.subscribe(to);
to.assertEmpty();
}
@Test
public void onNextInnerCompleteRace() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
final PublishSubject<Integer> ps = PublishSubject.create();
final CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletable(Functions.justFunction(cs)).test();
ps.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
ps.onNext(2);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
cs.onComplete();
}
};
TestHelper.race(r1, r2);
to.assertEmpty();
}
}
@Test
public void onNextInnerErrorRace() {
final TestException ex = new TestException();
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final PublishSubject<Integer> ps = PublishSubject.create();
final CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletable(Functions.justFunction(cs)).test();
ps.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
ps.onNext(2);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
cs.onError(ex);
}
};
TestHelper.race(r1, r2);
to.assertError(new Predicate<Throwable>() {
@Override
public boolean test(Throwable e) throws Exception {
return e instanceof TestException || e instanceof CompositeException;
}
});
if (!errors.isEmpty()) {
TestHelper.assertUndeliverable(errors, 0, TestException.class);
}
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void onErrorInnerErrorRace() {
final TestException ex0 = new TestException();
final TestException ex = new TestException();
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final PublishSubject<Integer> ps = PublishSubject.create();
final CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletable(Functions.justFunction(cs)).test();
ps.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
ps.onError(ex0);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
cs.onError(ex);
}
};
TestHelper.race(r1, r2);
to.assertError(new Predicate<Throwable>() {
@Override
public boolean test(Throwable e) throws Exception {
return e instanceof TestException || e instanceof CompositeException;
}
});
if (!errors.isEmpty()) {
TestHelper.assertUndeliverable(errors, 0, TestException.class);
}
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void innerErrorThenMainError() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposables.empty());
observer.onNext(1);
observer.onError(new TestException("main"));
}
}
.switchMapCompletable(Functions.justFunction(Completable.error(new TestException("inner"))))
.test()
.assertFailureAndMessage(TestException.class, "inner");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "main");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void innerErrorDelayed() {
final PublishSubject<Integer> ps = PublishSubject.create();
final CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletableDelayError(Functions.justFunction(cs)).test();
ps.onNext(1);
cs.onError(new TestException());
to.assertEmpty();
assertTrue(ps.hasObservers());
ps.onComplete();
to.assertFailure(TestException.class);
}
@Test
public void mainCompletesinnerErrorDelayed() {
final PublishSubject<Integer> ps = PublishSubject.create();
final CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletableDelayError(Functions.justFunction(cs)).test();
ps.onNext(1);
ps.onComplete();
to.assertEmpty();
cs.onError(new TestException());
to.assertFailure(TestException.class);
}
@Test
public void mainErrorDelayed() {
final PublishSubject<Integer> ps = PublishSubject.create();
final CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = ps.switchMapCompletableDelayError(Functions.justFunction(cs)).test();
ps.onNext(1);
ps.onError(new TestException());
to.assertEmpty();
assertTrue(cs.hasObservers());
cs.onComplete();
to.assertFailure(TestException.class);
}
@Test
public void scalarMapperCrash() {
TestObserver<Void> to = Observable.just(1)
.switchMapCompletable(new Function<Integer, CompletableSource>() {
@Override
public CompletableSource apply(Integer v)
throws Exception {
throw new TestException();
}
})
.test();
to.assertFailure(TestException.class);
}
@Test
public void scalarEmptySource() {
CompletableSubject cs = CompletableSubject.create();
Observable.empty()
.switchMapCompletable(Functions.justFunction(cs))
.test()
.assertResult();
assertFalse(cs.hasObservers());
}
@Test
public void scalarSource() {
CompletableSubject cs = CompletableSubject.create();
TestObserver<Void> to = Observable.just(1)
.switchMapCompletable(Functions.justFunction(cs))
.test();
assertTrue(cs.hasObservers());
to.assertEmpty();
cs.onComplete();
to.assertResult();
}
}
| |
package gov.cdc.sdp.cbr.queue;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import org.apache.camel.CamelContext;
import org.apache.camel.EndpointInject;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Produce;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.impl.DefaultExchange;
import org.apache.camel.impl.DefaultMessage;
import org.apache.camel.test.spring.CamelSpringJUnit4ClassRunner;
import org.apache.camel.test.spring.CamelTestContextBootstrapper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.PropertySource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.BootstrapWith;
import org.springframework.test.context.ContextConfiguration;
@RunWith(CamelSpringJUnit4ClassRunner.class)
@BootstrapWith(CamelTestContextBootstrapper.class)
@ContextConfiguration(locations = { "classpath:DatabaseQueueComponentTest-context.xml" })
@PropertySource("classpath:application.properties")
public class DatabaseQueueComponentTest {
private static final String DELETE_FROM = "DELETE FROM ";
private static final String SELECT_FROM = "SELECT * FROM ";
private static final String HEADERS = "(id, cbr_id, source, source_id, payload, cbr_recevied_time)";
private static final String INSERT_INTO = "INSERT into ";
@Autowired
protected CamelContext camelContext;
@EndpointInject(uri = "mock:mock_endpoint")
protected MockEndpoint mock_endpoint;
@EndpointInject(uri = "mock:mock_endpoint2")
protected MockEndpoint mock_endpoint2;
@EndpointInject(uri = "mock:mock_endpoint3")
protected MockEndpoint mock_endpoint3;
@EndpointInject(uri = "mock:mock_endpoint4")
protected MockEndpoint mock_endpoint4;
@EndpointInject(uri = "mock:mock_endpoint5")
protected MockEndpoint mock_endpoint5;
@Produce(uri = "direct:start")
protected ProducerTemplate template;
@Test
@DirtiesContext
public void testLimitedQueueConsumer_Pass() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String minimum_required_headers = HEADERS;
String col_val = minimum_required_headers
+ " values (1337, 'cbr_1337', 'mockland', 'mockland_1', 'the payload', '"
+ new Date(System.currentTimeMillis()) + "')";
String tableName = "message_queue_four";
String create_dummy_data = INSERT_INTO + tableName + col_val;
String check_sent = SELECT_FROM + tableName + " WHERE id=1337";
String clear_dummy_data = DELETE_FROM + tableName + " WHERE id=1337";
String get_count = SELECT_FROM + tableName;
int initial_count = 0;
try {
initial_count = jdbcTemplate.queryForList(get_count).size();
int rows_affected = jdbcTemplate.update(create_dummy_data);
assertEquals(1, rows_affected);
DatabaseQueueEndpoint endpoint100 = (DatabaseQueueEndpoint) camelContext.getRegistry().lookupByName("consumer_from");
DatabaseQueueEndpoint endpoint5 = (DatabaseQueueEndpoint) camelContext.getRegistry().lookupByName("limit_from");
assertEquals(endpoint100.getLimit(), 100);
assertEquals(endpoint5.getLimit(), 5);
mock_endpoint4.expectedMessageCount(rows_affected);
mock_endpoint4.assertIsSatisfied();
List<Map<String, Object>> lst = jdbcTemplate.queryForList(check_sent);
assertEquals(1, lst.size());
} finally {
jdbcTemplate.update(clear_dummy_data);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(get_count);
assertEquals(initial_count, lst.size());
}
}
@Test
@DirtiesContext
public void testMaxAttemptsQueueConsumer_fail() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String[] all_entries = new String[20];
String tableName = "message_queue_five";
for (int i = 0; i < 20; i++) {
String content = HEADERS + " values ("+ i +", 'cbr_"+ i + "', 'mockland', 'mockland_"
+ i + "', 'the payload', '" + new Date(System.currentTimeMillis()) + "')";
all_entries[i] = INSERT_INTO + tableName + content;
}
String check_sent = SELECT_FROM + tableName + " WHERE status='sent'";
String check_unsent = SELECT_FROM + tableName + " WHERE status='queued'";
String clear_dummy_data = DELETE_FROM + tableName + " WHERE source='mockland'";
String get_count = SELECT_FROM + tableName;
int initial_count = 0;
try {
initial_count = jdbcTemplate.queryForList(get_count).size();
int rows_affected = 0;
int[] ra = jdbcTemplate.batchUpdate(all_entries);
for (int r : ra) {
rows_affected += r;
}
assertEquals(20, rows_affected);
mock_endpoint5.expectedMessageCount(0);
mock_endpoint5.assertIsSatisfied();
List<Map<String, Object>> lst = jdbcTemplate.queryForList(check_sent);
assertEquals(0, lst.size());
lst = jdbcTemplate.queryForList(check_unsent);
assertEquals(20, lst.size());
} finally {
jdbcTemplate.update(clear_dummy_data);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(get_count);
assertEquals(initial_count, lst.size());
}
}
@Test
@DirtiesContext
public void testInitialDelayQueueConsumer_Fail() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String minimum_required_headers = HEADERS;
String col_val = minimum_required_headers
+ " values (1337, 'cbr_1337', 'mockland', 'mockland_1', 'the payload', '"
+ new Date(System.currentTimeMillis()) + "')";
String tableName = "message_queue_two";
String create_dummy_data = INSERT_INTO + tableName + col_val;
String check_sent = SELECT_FROM + tableName + " WHERE id=1337";
String clear_dummy_data = DELETE_FROM + tableName + " WHERE id=1337";
String get_count = SELECT_FROM + tableName;
int initial_count = 0;
try {
initial_count = jdbcTemplate.queryForList(get_count).size();
int rows_affected = jdbcTemplate.update(create_dummy_data);
assertEquals(1, rows_affected);
mock_endpoint2.expectedMessageCount(rows_affected);
mock_endpoint2.setResultWaitTime(5 * 1000);
mock_endpoint2.assertIsNotSatisfied();
List<Map<String, Object>> lst = jdbcTemplate.queryForList(check_sent);
assertEquals(1, lst.size());
} finally {
jdbcTemplate.update(clear_dummy_data);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(get_count);
assertEquals(initial_count, lst.size());
}
}
@Test
@DirtiesContext
public void testInitialDelayQueueConsumer_Pass() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String minimum_required_headers = HEADERS;
String col_val = minimum_required_headers
+ " values (1337, 'cbr_1337', 'mockland', 'mockland_1', 'the payload', '"
+ new Date(System.currentTimeMillis()) + "')";
String tableName = "message_queue_two";
String create_dummy_data = INSERT_INTO + tableName + col_val;
String check_sent = SELECT_FROM + tableName + " WHERE id=1337";
String clear_dummy_data = DELETE_FROM + tableName + " WHERE id=1337";
String get_count = SELECT_FROM + tableName;
int initial_count = 0;
try {
initial_count = jdbcTemplate.queryForList(get_count).size();
int rows_affected = jdbcTemplate.update(create_dummy_data);
assertEquals(1, rows_affected);
mock_endpoint2.expectedMessageCount(rows_affected);
// using 6.75 seconds because the wait time isn't exactly 7 seconds
mock_endpoint2.setResultMinimumWaitTime(6750);
mock_endpoint2.assertIsSatisfied();
List<Map<String, Object>> lst = jdbcTemplate.queryForList(check_sent);
assertEquals(1, lst.size());
} finally {
jdbcTemplate.update(clear_dummy_data);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(get_count);
assertEquals(initial_count, lst.size());
}
}
@Test
@DirtiesContext
public void testDelayedQueueConsumer_Fail() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String minimum_required_headers = HEADERS;
String col_val = minimum_required_headers
+ " values (1337, 'cbr_1337', 'mockland', 'mockland_1', 'the payload', '"
+ new Date(System.currentTimeMillis()) + "')";
String tableName = "message_queue_three";
String create_dummy_data = INSERT_INTO + tableName + col_val;
String check_sent = SELECT_FROM + tableName + " WHERE id=1337";
String clear_dummy_data = DELETE_FROM + tableName + " WHERE id=1337";
String get_count = SELECT_FROM + tableName;
int initial_count = 0;
try {
initial_count = jdbcTemplate.queryForList(get_count).size();
int rows_affected = jdbcTemplate.update(create_dummy_data);
assertEquals(1, rows_affected);
mock_endpoint3.expectedMessageCount(rows_affected);
mock_endpoint3.setResultWaitTime(1 * 1000);
mock_endpoint3.assertIsNotSatisfied();
List<Map<String, Object>> lst = jdbcTemplate.queryForList(check_sent);
assertEquals(1, lst.size());
} finally {
jdbcTemplate.update(clear_dummy_data);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(get_count);
assertEquals(initial_count, lst.size());
}
}
@Test
@DirtiesContext
public void testDelayedQueueConsumer_Pass() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String minimum_required_headers = HEADERS;
String col_val = minimum_required_headers
+ " values (1337, 'cbr_1337', 'mockland', 'mockland_1', 'the payload', '"
+ new Date(System.currentTimeMillis()) + "')";
String tableName = "message_queue_three";
String create_dummy_data = INSERT_INTO + tableName + col_val;
String check_sent = SELECT_FROM + tableName + " WHERE id=1337";
String clear_dummy_data = DELETE_FROM + tableName + " WHERE id=1337";
String get_count = SELECT_FROM + tableName;
int initial_count = 0;
try {
initial_count = jdbcTemplate.queryForList(get_count).size();
int rows_affected = jdbcTemplate.update(create_dummy_data);
assertEquals(1, rows_affected);
mock_endpoint3.expectedMessageCount(rows_affected);
mock_endpoint3.assertIsSatisfied();
List<Map<String, Object>> lst = jdbcTemplate.queryForList(check_sent);
assertEquals(1, lst.size());
} finally {
jdbcTemplate.update(clear_dummy_data);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(get_count);
assertEquals(initial_count, lst.size());
}
}
@Test
@DirtiesContext
public void testQueueConsumer() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String minimum_required_headers = HEADERS;
String col_val = minimum_required_headers
+ " values (1337, 'cbr_1337', 'mockland', 'mockland_1', 'the payload', '"
+ new Date(System.currentTimeMillis()) + "')";
String tableName = "message_queue";
String create_dummy_data = INSERT_INTO + tableName + col_val;
String check_sent = SELECT_FROM + tableName + " WHERE id=1337";
String clear_dummy_data = DELETE_FROM + tableName + " WHERE id=1337";
String get_count = SELECT_FROM + tableName;
int initial_count = 0;
try {
initial_count = jdbcTemplate.queryForList(get_count).size();
int rows_affected = jdbcTemplate.update(create_dummy_data);
assertEquals(1, rows_affected);
mock_endpoint.expectedMessageCount(rows_affected);
mock_endpoint.assertIsSatisfied();
List<Map<String, Object>> lst = jdbcTemplate.queryForList(check_sent);
assertEquals(1, lst.size());
} finally {
jdbcTemplate.update(clear_dummy_data);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(get_count);
assertEquals(initial_count, lst.size());
}
}
@Test
@DirtiesContext
public void testQueueProducer() throws Exception {
DataSource ds = (DataSource) camelContext.getRegistry().lookupByName("sdpqDataSource");
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
String delete_test_entries = "delete from message_queue where SOURCE_ID='testQueueProducer_rec'";
String query_queue = "select * from message_queue where SOURCE_ID='testQueueProducer_rec'";
String query_queue_for_batch_zero = "select * from message_queue where SOURCE_ID='testQueueProducer_rec' AND BATCH_INDEX=0";
String source_file = "src/test/resources/BatchTest_GenV2_2msgs.txt";
try {
Exchange exchange = new DefaultExchange(camelContext);
Message msg = new DefaultMessage();
Map<String, String> map = new HashMap<>();
map.put("recordId", "testQueueProducer_rec");
map.put("messageId", "testQueueProducer_msg");
map.put("payloadName", "Name");
map.put("payloadBinaryContent", readFile(source_file));
map.put("payloadTextContent", readFile(source_file));
map.put("localFileName", "file??");
map.put("service", "service");
map.put("action", "action");
map.put("arguments", "arge");
map.put("fromPartyId", "testQueueProducer");
map.put("messageRecipient", "recipient");
map.put("receivedTime", new Date().toString());
msg.setBody(map);
exchange.setIn(msg);
mock_endpoint.expectedMessageCount(3);
template.send(exchange);
MockEndpoint.assertIsSatisfied(camelContext);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(query_queue);
assertEquals(3, lst.size());
lst = jdbcTemplate.queryForList(query_queue_for_batch_zero);
assertEquals(1, lst.size());
} finally {
jdbcTemplate.update(delete_test_entries);
List<Map<String, Object>> lst = jdbcTemplate.queryForList(query_queue);
assertEquals(0, lst.size());
}
}
private String readFile(String file) throws IOException {
return new String(java.nio.file.Files.readAllBytes(java.nio.file.Paths.get(file)));
}
}
| |
package de.kardroids.books.fragments;
import android.support.v7.app.ActionBarActivity;
import android.app.Activity;
import android.support.v7.app.ActionBar;
import android.support.v4.app.Fragment;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
import de.kardroids.books.R;
/**
* Fragment used for managing interactions for and presentation of a navigation drawer.
* See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction">
* design guidelines</a> for a complete explanation of the behaviors implemented here.
*/
public class NavigationDrawerFragment extends Fragment {
/**
* Remember the position of the selected item.
*/
private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
/**
* A pointer to the current callbacks instance (the Activity).
*/
private NavigationDrawerCallbacks mCallbacks;
/**
* Helper component that ties the action bar to the navigation drawer.
*/
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private int mCurrentSelectedPosition = 0;
private boolean mFromSavedInstanceState;
private boolean mUserLearnedDrawer;
public NavigationDrawerFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Read in the flag indicating whether or not the user has demonstrated awareness of the
// drawer. See PREF_USER_LEARNED_DRAWER for details.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity());
mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
mFromSavedInstanceState = true;
}
// Select either the default item (0) or the last selected item.
selectItem(mCurrentSelectedPosition);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mDrawerListView = (ListView) inflater.inflate(
R.layout.fragment_navigation_drawer, container, false);
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerListView.setAdapter(new ArrayAdapter<String>(
getActionBar().getThemedContext(),
android.R.layout.simple_list_item_activated_1,
android.R.id.text1,
new String[]{
getString(R.string.title_section1),
getString(R.string.title_section2),
getString(R.string.title_section3),
}));
mDrawerListView.setItemChecked(mCurrentSelectedPosition, true);
return mDrawerListView;
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
if (!mUserLearnedDrawer) {
// The user manually opened the drawer; store this flag to prevent auto-showing
// the navigation drawer automatically in the future.
mUserLearnedDrawer = true;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(getActivity());
sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply();
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
};
// If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer,
// per the navigation drawer design guidelines.
if (!mUserLearnedDrawer && !mFromSavedInstanceState) {
mDrawerLayout.openDrawer(mFragmentContainerView);
}
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void selectItem(int position) {
mCurrentSelectedPosition = position;
if (mDrawerListView != null) {
mDrawerListView.setItemChecked(position, true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mCallbacks != null) {
mCallbacks.onNavigationDrawerItemSelected(position);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mCallbacks = (NavigationDrawerCallbacks) activity;
} catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// If the drawer is open, show the global app actions in the action bar. See also
// showGlobalContextActionBar, which controls the top-left area of the action bar.
if (mDrawerLayout != null && isDrawerOpen()) {
inflater.inflate(R.menu.global, menu);
showGlobalContextActionBar();
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
if (item.getItemId() == R.id.action_example) {
Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show();
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the global app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return ((ActionBarActivity) getActivity()).getSupportActionBar();
}
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
}
}
| |
package com.jukusoft.libgdx.rpg.engine.entity.impl.component.shadow;
import com.badlogic.gdx.graphics.*;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.Affine2;
import com.badlogic.gdx.math.Vector2;
import com.jukusoft.libgdx.rpg.engine.camera.CameraWrapper;
import com.jukusoft.libgdx.rpg.engine.entity.BaseComponent;
import com.jukusoft.libgdx.rpg.engine.entity.Entity;
import com.jukusoft.libgdx.rpg.engine.entity.IDrawComponent;
import com.jukusoft.libgdx.rpg.engine.entity.impl.component.draw.DrawTextureComponent;
import com.jukusoft.libgdx.rpg.engine.entity.impl.component.draw.DrawTextureRegionComponent;
import com.jukusoft.libgdx.rpg.engine.entity.impl.component.PositionComponent;
import com.jukusoft.libgdx.rpg.engine.entity.priority.ECSPriority;
import com.jukusoft.libgdx.rpg.engine.game.BaseGame;
import com.jukusoft.libgdx.rpg.engine.time.GameTime;
import com.jukusoft.libgdx.rpg.engine.utils.FastMath;
/**
* Created by Justin on 11.02.2017.
*/
public class ShadowComponent extends BaseComponent implements IDrawComponent {
//components
protected PositionComponent positionComponent = null;
protected DrawTextureComponent textureComponent = null;
protected DrawTextureRegionComponent textureRegionComponent = null;
protected Texture shadowTexture = null;
protected TextureRegion shadowTextureRegion = null;
protected Color shadowColor = Color.GRAY;
protected float shadowAngleDegree = 90;
//shadow width & height in pixels
protected int shadowWidth = 100;
protected int shadowHeight = 100;
protected Affine2 transform = new Affine2();
protected Vector2 cachedVector = new Vector2(0, 0);
//http://stackoverflow.com/questions/32146442/libgdx-overlapping-2d-shadows
//https://gamedevelopment.tutsplus.com/tutorials/how-to-generate-shockingly-good-2d-lightning-effects--gamedev-2681
//https://github.com/libgdx/libgdx/wiki/Spritebatch%2C-Textureregions%2C-and-Sprites
//https://github.com/libgdx/libgdx/wiki/2D-Animation
public ShadowComponent () {
//
}
@Override
public void init (BaseGame game, Entity entity) {
super.init(game, entity);
this.positionComponent = entity.getComponent(PositionComponent.class);
this.textureComponent = entity.getComponent(DrawTextureComponent.class);
this.textureRegionComponent = entity.getComponent(DrawTextureRegionComponent.class);
if (this.positionComponent == null) {
throw new IllegalStateException("entity doesnt have an PositionComponent.");
}
if (textureComponent == null && textureRegionComponent == null) {
throw new IllegalStateException("You have to set an TextureComponent or an TextureRegionComponent to entity to use ShadowComponent.");
}
if (this.textureComponent != null) {
this.textureComponent.addTextureChangedListener((Texture oldTexture, Texture newTexture) -> {
//delete old shadow texture and generate an new one
generateShadowTexture();
});
}
if (this.textureRegionComponent != null) {
this.textureRegionComponent.addTextureRegionChangedListener((TextureRegion oldTextureRegion, TextureRegion textureRegion) -> {
//delete old shadow texture and generate an new one
generateShadowTexture();
});
}
//generate shadow texture
this.generateShadowTexture();
}
@Override public void draw(GameTime time, CameraWrapper camera, SpriteBatch batch) {
/*Color c = new Color(batch.getColor());
batch.setColor(a,b,c,d);
batch.draw(yourSprite, x, y);
batch.setColor(c);*/
Affine2 affine = new Affine2();
if (shadowTexture != null) {
//http://stackoverflow.com/questions/24034352/libgdx-change-color-of-texture-at-runtime
//batch.draw(this.shadowTexture, positionComponent.getX(), positionComponent.getY(), shadowWidth, shadowHeight);
float angle = FastMath.toRadians(this.shadowAngleDegree/* - 45 - 90*/ % 360);
float sin = (float) Math.sin(angle);
float cos = (float) Math.cos(angle);
float tan = (float) Math.tan(angle);
//System.out.println("angle: " + (this.shadowAngleDegree % 360));
//TODO: debug
if (shadowAngleDegree > 90 && shadowAngleDegree <= 270) {
//mirrored texture
transform.setToTranslation(positionComponent.getX(), positionComponent.getY() - shadowHeight);
this.cachedVector.set(positionComponent.getX(), positionComponent.getY() - shadowHeight);
} else {
transform.setToTranslation(positionComponent.getX(), positionComponent.getY());
this.cachedVector.set(positionComponent.getX(), positionComponent.getY());
}
//System.out.println("sin: " + sin + ", cos: " + cos + ", tan: " + tan);
//System.out.println("transformated vector: " + transform.getTranslation(this.cachedVector));
if (sin < 0) {
sin = sin * 2;
}
transform.shear(/*0.5f*//*-1f*/sin, 0); // <- modify skew here
//TextureRegion tex = new TextureRegion(this.shadowTexture, 0, 0, shadowWidth, shadowHeight);
batch.draw(this.shadowTextureRegion, shadowWidth, shadowHeight, transform);
//batch.draw(this.shadowTexture, positionComponent.getX(), positionComponent.getY(), shadowWidth, shadowHeight);
} else {
//throw new IllegalStateException("no shadow texture is set.");
}
}
@Override public ECSPriority getDrawOrder() {
return ECSPriority.DRAW_SHADOW;
}
public void generateShadowTexture () {
Pixmap halfPixmap = null;
TextureData textureData = null;
float lightIntensity = 1;
Color shadowColor = this.shadowColor;
float angle = FastMath.toRadians(shadowAngleDegree);
Color tmpColor = new Color();
if (this.textureRegionComponent != null) {
//get texture region of entity
TextureRegion texture = this.textureRegionComponent.getTextureRegion();
if (texture == null) {
//we cannot generate an shadow, if no region is set
return;
}
//calculate shadow width and height
this.shadowWidth = texture.getRegionWidth() * 2;
this.shadowHeight = texture.getRegionHeight() / 2;
int regionX = texture.getRegionX();
int regionY = texture.getRegionY();
int regionWidth = texture.getRegionWidth();
int regionHeight = texture.getRegionHeight();
//http://stackoverflow.com/questions/24034352/libgdx-change-color-of-texture-at-runtime
//get and prepare texture data
textureData = texture.getTexture().getTextureData();
textureData.prepare();
halfPixmap = new Pixmap(regionWidth, regionHeight / 2, Pixmap.Format.RGBA8888);
//draw entity texture with half height into temporary pixmap, reduce height of texture by 50%
halfPixmap.drawPixmap(textureData.consumePixmap(), regionX, regionY, regionWidth, regionHeight, 0, 0, halfPixmap.getWidth(), halfPixmap.getHeight());
} else if (this.textureComponent != null) {
//get texture of entity
Texture texture = this.textureComponent.getTexture();
//calculate shadow width and height
this.shadowWidth = texture.getWidth() * 2;
this.shadowHeight = texture.getHeight() / 2;
//float cos = (float) Math.cos(angle);
//this.shadowHeight = Math.round(texture.getHeight() * Math.abs(cos) / 2);
//http://stackoverflow.com/questions/24034352/libgdx-change-color-of-texture-at-runtime
//get and prepare texture data
textureData = texture.getTextureData();
textureData.prepare();
halfPixmap = new Pixmap(texture.getWidth(), /*texture.getHeight() / 2*/this.shadowHeight, Pixmap.Format.RGBA8888);
//draw entity texture with half height into temporary pixmap, reduce height of texture by 50%
halfPixmap.drawPixmap(textureData.consumePixmap(), 0, 0, texture.getWidth(), texture.getHeight(), 0, 0, halfPixmap.getWidth(), halfPixmap.getHeight());
} else {
throw new IllegalStateException("No texture component or texture region component is set to entity.");
}
Vector2 vector = new Vector2(0, 0);
Color color = new Color();
//create new Pixmap and put all pixels from texture to pixmap
Pixmap shadowPixmap = new Pixmap(shadowWidth, shadowHeight, Pixmap.Format.RGBA8888);
//shadowPixmap.drawPixmap();
//set shadow color
shadowPixmap.setColor(shadowColor);
for (int x = 0; x < halfPixmap.getWidth(); x++) {
for (int y = 0; y < halfPixmap.getHeight(); y++) {
int colorInt = halfPixmap.getPixel(x, y);
color.set(colorInt);
//get color alpha value
float alpha = color.a;
if (alpha == 1) {
shadowPixmap.setColor(this.shadowColor);
//draw pixel in shadow color
shadowPixmap.fillRectangle(x, y, 1, 1);
} else if (alpha > 0f) {
//calculate alpha value
float newAlpha = alpha * lightIntensity * shadowColor.a;
//set new shadow color for this pixel
tmpColor.set(shadowColor.r, shadowColor.g, shadowColor.b, newAlpha);
shadowPixmap.setColor(tmpColor.r, tmpColor.g, tmpColor.b, newAlpha);
//get current position with point (0, 0)
int x2 = x;
int y2 = y;
//draw pixel in shadow color
shadowPixmap.fillRectangle(x2, y2, 1, 1);
} else {
shadowPixmap.setColor(new Color(0, 0, 0, 0));
shadowPixmap.fillRectangle(x, y, 1, 1);
}
}
}
//PixmapIO.writePNG(Gdx.files.absolute("./shadowMap.png"), shadowPixmap);
//get pixmap from entity texture
/*Pixmap textureMap = texture.getTextureData().consumePixmap();
//use soft rendering to create map for shadow
for (int x = 0; x < textureMap.getWidth(); x++) {
for (int y = 0; y < textureMap.getHeight(); y++) {
int colorInt = textureMap.getPixel(x, y);
color.set(colorInt);
//get alpha color value
float alpha = color.a;
if (alpha > 0f) {
int x2 = x * 2;
int y2 = y;
//shadow
//draw pixel in shadow color
shadowPixmap.fillRectangle(x2, y2, 1, 1);
}
System.out.println("alpha value of (" + x + ", " + y + "): " + color.a);
}
}
//generate texture
this.shadowTexture = new Texture(shadowPixmap);*/
//generate texture
this.shadowTexture = new Texture(shadowPixmap);
this.shadowTextureRegion = new TextureRegion(this.shadowTexture, 0, 0, this.shadowTexture.getWidth(), this.shadowTexture.getHeight());
if (shadowAngleDegree > 90 && shadowAngleDegree <= 270) {
//mirror texture
this.shadowTextureRegion.flip(false, true);
}
//dispose texture data pixmap
textureData.disposePixmap();
//dispose pixmaps
shadowPixmap.dispose();
halfPixmap.dispose();
}
public float getShadowAngle () {
return this.shadowAngleDegree;
}
public void setShadowAngle (float angle) {
angle = angle % 360;
this.shadowAngleDegree = angle;
this.generateShadowTexture();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.util;
import java.io.IOException;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.DTDHandler;
import org.xml.sax.EntityResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.ext.LexicalHandler;
/**
* SAX 2 Event Handler which simply delegates all calls to another ContentHandler. Subclasses can
* do additional processing. This class is the passive counterpart to XMLFilterImpl.
* <p>
* The ContentHandler is the only instance that is required. All others (DTDHandler,
* EntityResolver, LexicalHandler and ErrorHandler) may be ignored.
*/
public class DelegatingContentHandler
implements EntityResolver, DTDHandler, ContentHandler, LexicalHandler, ErrorHandler {
private ContentHandler delegate;
private EntityResolver entityResolver;
private DTDHandler dtdHandler;
private LexicalHandler lexicalHandler;
private ErrorHandler errorHandler;
/**
* Main constructor.
*/
public DelegatingContentHandler() {
//nop
}
/**
* Convenience constructor. If the given handler also implements any of the EntityResolver,
* DTDHandler, LexicalHandler or ErrorHandler interfaces, these are set automatically.
* @param handler the content handler to delegate to
*/
public DelegatingContentHandler(ContentHandler handler) {
setDelegateContentHandler(handler);
if (handler instanceof EntityResolver) {
setDelegateEntityResolver((EntityResolver)handler);
}
if (handler instanceof DTDHandler) {
setDelegateDTDHandler((DTDHandler)handler);
}
if (handler instanceof LexicalHandler) {
setDelegateLexicalHandler((LexicalHandler)handler);
}
if (handler instanceof ErrorHandler) {
setDelegateErrorHandler((ErrorHandler)handler);
}
}
/**
* @return the delegate that all ContentHandler events are forwarded to
*/
public ContentHandler getDelegateContentHandler() {
return this.delegate;
}
/**
* Sets the delegate ContentHandler that all events are forwarded to.
* @param handler the delegate instance
*/
public void setDelegateContentHandler(ContentHandler handler) {
this.delegate = handler;
}
/**
* Sets the delegate EntityResolver.
* @param resolver the delegate instance
*/
public void setDelegateEntityResolver(EntityResolver resolver) {
this.entityResolver = resolver;
}
/**
* Sets the delegate DTDHandler.
* @param handler the delegate instance
*/
public void setDelegateDTDHandler(DTDHandler handler) {
this.dtdHandler = handler;
}
/**
* Sets the delegate LexicalHandler.
* @param handler the delegate instance
*/
public void setDelegateLexicalHandler(LexicalHandler handler) {
this.lexicalHandler = handler;
}
/**
* Sets the delegate ErrorHandler.
* @param handler the delegate instance
*/
public void setDelegateErrorHandler(ErrorHandler handler) {
this.errorHandler = handler;
}
// ==== EntityResolver
/** {@inheritDoc} */
public InputSource resolveEntity(String publicId, String systemId)
throws SAXException, IOException {
if (entityResolver != null) {
return entityResolver.resolveEntity(publicId, systemId);
} else {
return null;
}
}
// ==== DTDHandler
/** {@inheritDoc} */
public void notationDecl(String name, String publicId, String systemId) throws SAXException {
if (dtdHandler != null) {
dtdHandler.notationDecl(name, publicId, systemId);
}
}
/** {@inheritDoc} */
public void unparsedEntityDecl(String name, String publicId, String systemId,
String notationName) throws SAXException {
if (dtdHandler != null) {
dtdHandler.unparsedEntityDecl(name, publicId, systemId, notationName);
}
}
// ==== ContentHandler
/** {@inheritDoc} */
public void setDocumentLocator(Locator locator) {
delegate.setDocumentLocator(locator);
}
/** {@inheritDoc} */
public void startDocument() throws SAXException {
delegate.startDocument();
}
/** {@inheritDoc} */
public void endDocument() throws SAXException {
delegate.endDocument();
}
/** {@inheritDoc} */
public void startPrefixMapping(String prefix, String uri) throws SAXException {
delegate.startPrefixMapping(prefix, uri);
}
/** {@inheritDoc} */
public void endPrefixMapping(String prefix) throws SAXException {
delegate.endPrefixMapping(prefix);
}
/** {@inheritDoc} */
public void startElement(String uri, String localName, String qName,
Attributes atts) throws SAXException {
delegate.startElement(uri, localName, qName, atts);
}
/** {@inheritDoc} */
public void endElement(String uri, String localName, String qName) throws SAXException {
delegate.endElement(uri, localName, qName);
}
/** {@inheritDoc} */
public void characters(char[] ch, int start, int length) throws SAXException {
delegate.characters(ch, start, length);
}
/** {@inheritDoc} */
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
delegate.ignorableWhitespace(ch, start, length);
}
/** {@inheritDoc} */
public void processingInstruction(String target, String data) throws SAXException {
delegate.processingInstruction(target, data);
}
/** {@inheritDoc} */
public void skippedEntity(String name) throws SAXException {
delegate.skippedEntity(name);
}
// ==== LexicalHandler
/** {@inheritDoc} */
public void startDTD(String name, String publicId, String systemId) throws SAXException {
if (lexicalHandler != null) {
lexicalHandler.startDTD(name, publicId, systemId);
}
}
/** {@inheritDoc} */
public void endDTD() throws SAXException {
if (lexicalHandler != null) {
lexicalHandler.endDTD();
}
}
/** {@inheritDoc} */
public void startEntity(String name) throws SAXException {
if (lexicalHandler != null) {
lexicalHandler.startEntity(name);
}
}
/** {@inheritDoc} */
public void endEntity(String name) throws SAXException {
if (lexicalHandler != null) {
lexicalHandler.endEntity(name);
}
}
/** {@inheritDoc} */
public void startCDATA() throws SAXException {
if (lexicalHandler != null) {
lexicalHandler.startCDATA();
}
}
/** {@inheritDoc} */
public void endCDATA() throws SAXException {
if (lexicalHandler != null) {
lexicalHandler.endCDATA();
}
}
/** {@inheritDoc} */
public void comment(char[] ch, int start, int length) throws SAXException {
if (lexicalHandler != null) {
lexicalHandler.comment(ch, start, length);
}
}
// ==== ErrorHandler
/** {@inheritDoc} */
public void warning(SAXParseException exception) throws SAXException {
if (errorHandler != null) {
errorHandler.warning(exception);
}
}
/** {@inheritDoc} */
public void error(SAXParseException exception) throws SAXException {
if (errorHandler != null) {
errorHandler.error(exception);
}
}
/** {@inheritDoc} */
public void fatalError(SAXParseException exception) throws SAXException {
if (errorHandler != null) {
errorHandler.fatalError(exception);
}
}
}
| |
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.award.home;
import org.kuali.coeus.common.framework.sponsor.Sponsor;
import org.kuali.coeus.sys.framework.model.KcPersistableBusinessObjectBase;
import org.kuali.kra.award.paymentreports.awardreports.AwardReportTermRecipient;
import org.springframework.util.AutoPopulatingList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class AwardTemplate extends KcPersistableBusinessObjectBase {
private static final long serialVersionUID = -3038831932003349194L;
private String statusCode;
private String description;
private AwardStatus awardTemplateStatus;
//sync to templateCode
private Integer templateCode;
//sync to primeSponsorCode
private String primeSponsorCode;
//
private String nonCompetingContPrpslDueCode;
//
private String competingRenewalPrpslDueCode;
//
private String basisOfPaymentCode;
//
private String methodOfPaymentCode;
private Sponsor primeSponsor;
private AwardBasisOfPayment awardBasisOfPayment;
private AwardMethodOfPayment awardMethodOfPayment;
private List<AwardTemplateReportTerm> templateReportTerms;
private List<AwardTemplateComment> templateComments;
private List<AwardTemplateContact> templateContacts;
private List<AwardTemplateTerm> templateTerms;
@SuppressWarnings("unchecked")
public AwardTemplate() {
templateContacts = new AutoPopulatingList<AwardTemplateContact>(AwardTemplateContact.class);
templateComments = new AutoPopulatingList<AwardTemplateComment>(AwardTemplateComment.class);
templateTerms = new AutoPopulatingList<AwardTemplateTerm>(AwardTemplateTerm.class);
templateReportTerms = new AutoPopulatingList<AwardTemplateReportTerm>(AwardTemplateReportTerm.class);
}
public String getStatusCode() {
return statusCode;
}
public void setStatusCode(String statusCode) {
this.statusCode = statusCode;
}
public AwardStatus getAwardTemplateStatus() {
return awardTemplateStatus;
}
public void setAwardTemplateStatus(AwardStatus awardTemplateStatus) {
this.awardTemplateStatus = awardTemplateStatus;
}
public List<AwardTemplateReportTerm> getTemplateReportTerms() {
return templateReportTerms;
}
public void setTemplateReportTerms(List<AwardTemplateReportTerm> templateReportTerms) {
this.templateReportTerms = templateReportTerms;
}
public List<AwardTemplateContact> getTemplateContacts() {
return templateContacts;
}
public void setTemplateContacts(List<AwardTemplateContact> templateContacts) {
this.templateContacts = templateContacts;
}
/**
* Gets the templateComments attribute.
* @return Returns the templateComments.
*/
public List<AwardTemplateComment> getTemplateComments() {
return templateComments;
}
/**
* Sets the templateComments attribute value.
* @param templateComments The templateComments to set.
*/
public void setTemplateComments(List<AwardTemplateComment> templateComments) {
this.templateComments = templateComments;
}
/**
* Gets the templateTerms attribute.
* @return Returns the templateTerms.
*/
@SuppressWarnings("unchecked")
public List<AwardTemplateTerm> getTemplateTerms() {
if (templateTerms != null) {
Collections.sort(this.templateTerms, new Comparator() {
public int compare(Object o1, Object o2) {
AwardTemplateTerm templateTermFirst = (AwardTemplateTerm) o1;
AwardTemplateTerm templateTermSecond = (AwardTemplateTerm) o2;
String sponsorTermTypeCodeFirst = null;
String sponsorTermTypeCodeSecond = null;
if (templateTermFirst == null || templateTermSecond == null) {
return 0;
}
if (templateTermFirst.isNewCollectionRecord() || templateTermSecond.isNewCollectionRecord()) {
return 0;
}
if (templateTermFirst.getSponsorTerm() == null || templateTermSecond.getSponsorTerm() == null) {
return 0;
}
if (templateTermFirst.getSponsorTerm() != null) {
sponsorTermTypeCodeFirst = templateTermFirst.getSponsorTerm().getSponsorTermTypeCode();
}
if (templateTermSecond.getSponsorTerm() != null) {
sponsorTermTypeCodeSecond = templateTermSecond.getSponsorTerm().getSponsorTermTypeCode();
}
return sponsorTermTypeCodeFirst.compareTo(sponsorTermTypeCodeSecond);
}
});
}
return templateTerms;
}
/**
* Sets the templateTerms attribute value.
* @param templateTerms The templateTerms to set.
*/
public void setTemplateTerms(List<AwardTemplateTerm> templateTerms) {
this.templateTerms = templateTerms;
}
/**
* Gets the description attribute.
* @return Returns the description.
*/
public String getDescription() {
return description;
}
/**
* Sets the description attribute value.
* @param description The description to set.
*/
public void setDescription(String description) {
this.description = description;
}
public List<AwardTemplateComment> getAwardComments() {
return getTemplateComments();
}
public List<AwardTemplateReportTerm> getAwardReportTermItems() {
return getTemplateReportTerms();
}
public List<AwardTemplateTerm> getAwardSponsorTerms() {
return getTemplateTerms();
}
public List<AwardTemplateContact> getSponsorContacts() {
return getTemplateContacts();
}
/**
* Gets the templateCode attribute.
* @return Returns the templateCode.
*/
public Integer getTemplateCode() {
return templateCode;
}
/**
* Sets the templateCode attribute value.
* @param templateCode The templateCode to set.
*/
public void setTemplateCode(Integer templateCode) {
this.templateCode = templateCode;
}
/**
* Gets the primeSponsorCode attribute.
* @return Returns the primeSponsorCode.
*/
public String getPrimeSponsorCode() {
return primeSponsorCode;
}
/**
* Sets the primeSponsorCode attribute value.
* @param primeSponsorCode The primeSponsorCode to set.
*/
public void setPrimeSponsorCode(String primeSponsorCode) {
this.primeSponsorCode = primeSponsorCode;
}
/**
* Gets the nonCompetingContPrpslDueCode attribute.
* @return Returns the nonCompetingContPrpslDueCode.
*/
public String getNonCompetingContPrpslDueCode() {
return nonCompetingContPrpslDueCode;
}
/**
* Sets the nonCompetingContPrpslDueCode attribute value.
* @param nonCompetingContPrpslDueCode The nonCompetingContPrpslDueCode to set.
*/
public void setNonCompetingContPrpslDueCode(String nonCompetingContPrpslDueCode) {
this.nonCompetingContPrpslDueCode = nonCompetingContPrpslDueCode;
}
/**
* Gets the competingRenewalPrpslDueCode attribute.
* @return Returns the competingRenewalPrpslDueCode.
*/
public String getCompetingRenewalPrpslDueCode() {
return competingRenewalPrpslDueCode;
}
/**
* Sets the competingRenewalPrpslDueCode attribute value.
* @param competingRenewalPrpslDueCode The competingRenewalPrpslDueCode to set.
*/
public void setCompetingRenewalPrpslDueCode(String competingRenewalPrpslDueCode) {
this.competingRenewalPrpslDueCode = competingRenewalPrpslDueCode;
}
/**
* Gets the basisOfPaymentCode attribute.
* @return Returns the basisOfPaymentCode.
*/
public String getBasisOfPaymentCode() {
return basisOfPaymentCode;
}
/**
* Sets the basisOfPaymentCode attribute value.
* @param basisOfPaymentCode The basisOfPaymentCode to set.
*/
public void setBasisOfPaymentCode(String basisOfPaymentCode) {
this.basisOfPaymentCode = basisOfPaymentCode;
}
/**
* Gets the methodOfPaymentCode attribute.
* @return Returns the methodOfPaymentCode.
*/
public String getMethodOfPaymentCode() {
return methodOfPaymentCode;
}
/**
* Sets the methodOfPaymentCode attribute value.
* @param methodOfPaymentCode The methodOfPaymentCode to set.
*/
public void setMethodOfPaymentCode(String methodOfPaymentCode) {
this.methodOfPaymentCode = methodOfPaymentCode;
}
/**
* Gets the awardBasisOfPayment attribute.
* @return Returns the awardBasisOfPayment.
*/
public AwardBasisOfPayment getAwardBasisOfPayment() {
return awardBasisOfPayment;
}
/**
* Sets the awardBasisOfPayment attribute value.
* @param awardBasisOfPayment The awardBasisOfPayment to set.
*/
public void setAwardBasisOfPayment(AwardBasisOfPayment awardBasisOfPayment) {
this.awardBasisOfPayment = awardBasisOfPayment;
}
/**
* Gets the awardMethodOfPayment attribute.
* @return Returns the awardMethodOfPayment.
*/
public AwardMethodOfPayment getAwardMethodOfPayment() {
return awardMethodOfPayment;
}
/**
* Sets the awardMethodOfPayment attribute value.
* @param awardMethodOfPayment The awardMethodOfPayment to set.
*/
public void setAwardMethodOfPayment(AwardMethodOfPayment awardMethodOfPayment) {
this.awardMethodOfPayment = awardMethodOfPayment;
}
/**
* Gets the primeSponsor attribute.
* @return Returns the primeSponsor.
*/
public Sponsor getPrimeSponsor() {
return primeSponsor;
}
/**
* Sets the primeSponsor attribute value.
* @param primeSponsor The primeSponsor to set.
*/
public void setPrimeSponsor(Sponsor primeSponsor) {
this.primeSponsor = primeSponsor;
}
@SuppressWarnings("unchecked")
@Override
public List buildListOfDeletionAwareLists() {
// TODO : need this ?
List managedLists = super.buildListOfDeletionAwareLists();
managedLists.add(getTemplateTerms());
managedLists.add(getAwardComments());
managedLists.add(getTemplateReportTerms());
managedLists.add(getTemplateContacts());
ArrayList<AwardReportTermRecipient> rcpts = new ArrayList<AwardReportTermRecipient>();
for (AwardTemplateReportTerm rt : getTemplateReportTerms()) {
rcpts.addAll(rt.getAwardReportTermRecipients());
}
managedLists.add(rcpts);
return managedLists;
}
public void processAfterCopy() {
this.setTemplateCode(null);
for (AwardTemplateReportTerm tempReportTerm : getTemplateReportTerms()) {
tempReportTerm.setTemplateReportTermId(null);
}
for (AwardTemplateComment tempComment : getTemplateComments()) {
tempComment.setTemplateCommentsId(null);
}
for (AwardTemplateContact tempContact : getTemplateContacts()) {
tempContact.setTemplateContactId(null);
}
for (AwardTemplateTerm tempTerm : getTemplateTerms()) {
;
tempTerm.setAwardTemplateTermId(null);
}
}
}
| |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package dunit;
import java.io.File;
import java.io.PrintWriter;
import java.io.Serializable;
import java.io.StringWriter;
import java.net.UnknownHostException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import junit.framework.TestCase;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.admin.internal.AdminDistributedSystemImpl;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.DiskStoreFactory;
import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl;
import com.gemstone.gemfire.cache.hdfs.internal.hoplog.HoplogConfig;
import com.gemstone.gemfire.cache.query.QueryTestUtils;
import com.gemstone.gemfire.cache.query.internal.QueryObserverHolder;
import com.gemstone.gemfire.cache30.GlobalLockingDUnitTest;
import com.gemstone.gemfire.cache30.MultiVMRegionTestCase;
import com.gemstone.gemfire.cache30.RegionTestCase;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.distributed.internal.DistributionConfigImpl;
import com.gemstone.gemfire.distributed.internal.DistributionMessageObserver;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem.CreationStackGenerator;
import com.gemstone.gemfire.distributed.internal.membership.jgroup.JGroupMembershipManager;
import com.gemstone.gemfire.distributed.internal.membership.jgroup.MembershipManagerHelper;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.InternalDataSerializer;
import com.gemstone.gemfire.internal.InternalInstantiator;
import com.gemstone.gemfire.internal.OSProcess;
import com.gemstone.gemfire.internal.SocketCreator;
import com.gemstone.gemfire.internal.admin.ClientStatsManager;
import com.gemstone.gemfire.internal.cache.DiskStoreObserver;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.InitialImageOperation;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.tier.InternalBridgeMembership;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.internal.cache.tier.sockets.ClientProxyMembershipID;
import com.gemstone.gemfire.internal.cache.tier.sockets.DataSerializerPropogationDUnitTest;
import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.internal.logging.LocalLogWriter;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.logging.LogWriterFactory;
import com.gemstone.gemfire.internal.logging.LogWriterImpl;
import com.gemstone.gemfire.internal.logging.ManagerLogWriter;
import com.gemstone.gemfire.internal.logging.log4j.LogWriterLogger;
import com.gemstone.gemfire.management.internal.cli.LogWrapper;
import com.gemstone.org.jgroups.Event;
import com.gemstone.org.jgroups.JChannel;
import com.gemstone.org.jgroups.stack.IpAddress;
import com.gemstone.org.jgroups.stack.Protocol;
import com.gemstone.org.jgroups.util.GemFireTracer;
import dunit.standalone.DUnitLauncher;
/**
* This class is the superclass of all distributed unit tests.
*
* tests/hydra/JUnitTestTask is the main DUnit driver. It supports two
* additional public static methods if they are defined in the test case:
*
* public static void caseSetUp() -- comparable to JUnit's BeforeClass annotation
*
* public static void caseTearDown() -- comparable to JUnit's AfterClass annotation
*
* @author David Whitlock
*/
@SuppressWarnings("serial")
public abstract class DistributedTestCase extends TestCase implements java.io.Serializable {
private static final Logger logger = LogService.getLogger();
private static final LogWriterLogger oldLogger = LogWriterLogger.create(logger);
private static void setUpCreationStackGenerator() {
// the following is moved from InternalDistributedSystem to fix #51058
InternalDistributedSystem.TEST_CREATION_STACK_GENERATOR.set(
new CreationStackGenerator() {
@Override
public Throwable generateCreationStack(final DistributionConfig config) {
final StringBuilder sb = new StringBuilder();
final String[] validAttributeNames = config.getAttributeNames();
for (int i = 0; i < validAttributeNames.length; i++) {
final String attName = validAttributeNames[i];
final Object actualAtt = config.getAttributeObject(attName);
String actualAttStr = actualAtt.toString();
sb.append(" ");
sb.append(attName);
sb.append("=\"");
if (actualAtt.getClass().isArray()) {
actualAttStr = InternalDistributedSystem.arrayToString(actualAtt);
}
sb.append(actualAttStr);
sb.append("\"");
sb.append("\n");
}
return new Throwable("Creating distributed system with the following configuration:\n" + sb.toString());
}
});
}
private static void tearDownCreationStackGenerator() {
InternalDistributedSystem.TEST_CREATION_STACK_GENERATOR.set(InternalDistributedSystem.DEFAULT_CREATION_STACK_GENERATOR);
}
/** This VM's connection to the distributed system */
public static InternalDistributedSystem system;
private static Class lastSystemCreatedInTest;
private static Properties lastSystemProperties;
public static volatile String testName;
private static ConcurrentLinkedQueue<ExpectedException> expectedExceptions = new ConcurrentLinkedQueue<ExpectedException>();
/** For formatting timing info */
private static final DecimalFormat format = new DecimalFormat("###.###");
public static boolean reconnect = false;
public static final boolean logPerTest = Boolean.getBoolean("dunitLogPerTest");
/////////////////////// Utility Methods ///////////////////////
public void attachDebugger(VM vm, final String msg) {
vm.invoke(new SerializableRunnable("Attach Debugger") {
public void run() {
com.gemstone.gemfire.internal.util.DebuggerSupport.
waitForJavaDebugger(getSystem().getLogWriter().convertToLogWriterI18n(), msg);
}
});
}
/**
* Invokes a <code>SerializableRunnable</code> in every VM that
* DUnit knows about.
*
* @see VM#invoke(Runnable)
*/
public static void invokeInEveryVM(SerializableRunnable work) {
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
vm.invoke(work);
}
}
}
public static void invokeInLocator(SerializableRunnable work) {
Host.getLocator().invoke(work);
}
/**
* Invokes a <code>SerializableCallable</code> in every VM that
* DUnit knows about.
*
* @return a Map of results, where the key is the VM and the value is the result
* @see VM#invoke(Callable)
*/
protected static Map invokeInEveryVM(SerializableCallable work) {
HashMap ret = new HashMap();
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
ret.put(vm, vm.invoke(work));
}
}
return ret;
}
/**
* Invokes a method in every remote VM that DUnit knows about.
*
* @see VM#invoke(Class, String)
*/
protected static void invokeInEveryVM(Class c, String method) {
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
vm.invoke(c, method);
}
}
}
/**
* Invokes a method in every remote VM that DUnit knows about.
*
* @see VM#invoke(Class, String)
*/
protected static void invokeInEveryVM(Class c, String method, Object[] methodArgs) {
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
vm.invoke(c, method, methodArgs);
}
}
}
/**
* The number of milliseconds to try repeating validation code in the
* event that AssertionFailedError is thrown. For ACK scopes, no
* repeat should be necessary.
*/
protected long getRepeatTimeoutMs() {
return 0;
}
protected void invokeRepeatingIfNecessary(VM vm, RepeatableRunnable task) {
vm.invokeRepeatingIfNecessary(task, getRepeatTimeoutMs());
}
/**
* Invokes a <code>SerializableRunnable</code> in every VM that
* DUnit knows about. If work.run() throws an assertion failure,
* its execution is repeated, until no assertion failure occurs or
* repeatTimeout milliseconds have passed.
*
* @see VM#invoke(Runnable)
*/
protected void invokeInEveryVMRepeatingIfNecessary(RepeatableRunnable work) {
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
vm.invokeRepeatingIfNecessary(work, getRepeatTimeoutMs());
}
}
}
/** Return the total number of VMs on all hosts */
protected static int getVMCount() {
int count = 0;
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
count += host.getVMCount();
}
return count;
}
/** print a stack dump for this vm
@author bruce
@since 5.0
*/
public static void dumpStack() {
com.gemstone.gemfire.internal.OSProcess.printStacks(0, false);
}
/** print a stack dump for the given vm
@author bruce
@since 5.0
*/
public static void dumpStack(VM vm) {
vm.invoke(dunit.DistributedTestCase.class, "dumpStack");
}
/** print stack dumps for all vms on the given host
@author bruce
@since 5.0
*/
public static void dumpStack(Host host) {
for (int v=0; v < host.getVMCount(); v++) {
host.getVM(v).invoke(dunit.DistributedTestCase.class, "dumpStack");
}
}
/** print stack dumps for all vms
@author bruce
@since 5.0
*/
public static void dumpAllStacks() {
for (int h=0; h < Host.getHostCount(); h++) {
dumpStack(Host.getHost(h));
}
}
public static String noteTiming(long operations, String operationUnit,
long beginTime, long endTime,
String timeUnit)
{
long delta = endTime - beginTime;
StringBuffer sb = new StringBuffer();
sb.append(" Performed ");
sb.append(operations);
sb.append(" ");
sb.append(operationUnit);
sb.append(" in ");
sb.append(delta);
sb.append(" ");
sb.append(timeUnit);
sb.append("\n");
double ratio = ((double) operations) / ((double) delta);
sb.append(" ");
sb.append(format.format(ratio));
sb.append(" ");
sb.append(operationUnit);
sb.append(" per ");
sb.append(timeUnit);
sb.append("\n");
ratio = ((double) delta) / ((double) operations);
sb.append(" ");
sb.append(format.format(ratio));
sb.append(" ");
sb.append(timeUnit);
sb.append(" per ");
sb.append(operationUnit);
sb.append("\n");
return sb.toString();
}
/**
* Creates a new LogWriter and adds it to the config properties. The config
* can then be used to connect to DistributedSystem, thus providing early
* access to the LogWriter before connecting. This call does not connect
* to the DistributedSystem. It simply creates and returns the LogWriter
* that will eventually be used by the DistributedSystem that connects using
* config.
*
* @param config the DistributedSystem config properties to add LogWriter to
* @return early access to the DistributedSystem LogWriter
*/
protected static LogWriter createLogWriter(Properties config) { // TODO:LOG:CONVERT: this is being used for ExpectedExceptions
Properties nonDefault = config;
if (nonDefault == null) {
nonDefault = new Properties();
}
addHydraProperties(nonDefault);
DistributionConfig dc = new DistributionConfigImpl(nonDefault);
LogWriter logger = LogWriterFactory.createLogWriterLogger(
false/*isLoner*/, false/*isSecurityLog*/, dc,
false);
// if config was non-null, then these will be added to it...
nonDefault.put(DistributionConfig.LOG_WRITER_NAME, logger);
return logger;
}
/**
* Fetches the GemFireDescription for this test and adds its
* DistributedSystem properties to the provided props parameter.
*
* @param config the properties to add hydra's test properties to
*/
protected static void addHydraProperties(Properties config) {
Properties p = DUnitEnv.get().getDistributedSystemProperties();
for (Iterator iter = p.entrySet().iterator();
iter.hasNext(); ) {
Map.Entry entry = (Map.Entry) iter.next();
String key = (String) entry.getKey();
String value = (String) entry.getValue();
if (config.getProperty(key) == null) {
config.setProperty(key, value);
}
}
}
//////////////////////// Constructors ////////////////////////
/**
* Creates a new <code>DistributedTestCase</code> test with the
* given name.
*/
public DistributedTestCase(String name) {
super(name);
DUnitLauncher.launchIfNeeded();
}
/////////////////////// Instance Methods ///////////////////////
protected Class getTestClass() {
Class clazz = getClass();
while (clazz.getDeclaringClass() != null) {
clazz = clazz.getDeclaringClass();
}
return clazz;
}
/**
* This finds the log level configured for the test run. It should be used
* when creating a new distributed system if you want to specify a log level.
* @return the dunit log-level setting
*/
public static String getDUnitLogLevel() {
Properties p = DUnitEnv.get().getDistributedSystemProperties();
String result = p.getProperty(DistributionConfig.LOG_LEVEL_NAME);
if (result == null) {
result = ManagerLogWriter.levelToString(DistributionConfig.DEFAULT_LOG_LEVEL);
}
return result;
}
public final static Properties getAllDistributedSystemProperties(Properties props) {
Properties p = DUnitEnv.get().getDistributedSystemProperties();
// our tests do not expect auto-reconnect to be on by default
if (!p.contains(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME)) {
p.put(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME, "true");
}
for (Iterator iter = props.entrySet().iterator();
iter.hasNext(); ) {
Map.Entry entry = (Map.Entry) iter.next();
String key = (String) entry.getKey();
Object value = entry.getValue();
p.put(key, value);
}
return p;
}
public void setSystem(Properties props, DistributedSystem ds) {
system = (InternalDistributedSystem)ds;
lastSystemProperties = props;
lastSystemCreatedInTest = getTestClass();
}
/**
* Returns this VM's connection to the distributed system. If
* necessary, the connection will be lazily created using the given
* <code>Properties</code>. Note that this method uses hydra's
* configuration to determine the location of log files, etc.
* Note: "final" was removed so that WANTestBase can override this method.
* This was part of the xd offheap merge.
*
* @see hydra.DistributedConnectionMgr#connect
* @since 3.0
*/
public /*final*/ InternalDistributedSystem getSystem(Properties props) {
// Setting the default disk store name is now done in setUp
if (system == null) {
system = InternalDistributedSystem.getAnyInstance();
}
if (system == null || !system.isConnected()) {
// Figure out our distributed system properties
Properties p = getAllDistributedSystemProperties(props);
lastSystemCreatedInTest = getTestClass();
if (logPerTest) {
String testMethod = getTestName();
String testName = lastSystemCreatedInTest.getName() + '-' + testMethod;
String oldLogFile = p.getProperty(DistributionConfig.LOG_FILE_NAME);
p.put(DistributionConfig.LOG_FILE_NAME,
oldLogFile.replace("system.log", testName+".log"));
String oldStatFile = p.getProperty(DistributionConfig.STATISTIC_ARCHIVE_FILE_NAME);
p.put(DistributionConfig.STATISTIC_ARCHIVE_FILE_NAME,
oldStatFile.replace("statArchive.gfs", testName+".gfs"));
}
system = (InternalDistributedSystem)DistributedSystem.connect(p);
lastSystemProperties = p;
} else {
boolean needNewSystem = false;
if(!getTestClass().equals(lastSystemCreatedInTest)) {
Properties newProps = getAllDistributedSystemProperties(props);
needNewSystem = !newProps.equals(lastSystemProperties);
if(needNewSystem) {
getLogWriter().info(
"Test class has changed and the new DS properties are not an exact match. "
+ "Forcing DS disconnect. Old props = "
+ lastSystemProperties + "new props=" + newProps);
}
} else {
Properties activeProps = system.getProperties();
for (Iterator iter = props.entrySet().iterator();
iter.hasNext(); ) {
Map.Entry entry = (Map.Entry) iter.next();
String key = (String) entry.getKey();
String value = (String) entry.getValue();
if (!value.equals(activeProps.getProperty(key))) {
needNewSystem = true;
getLogWriter().info("Forcing DS disconnect. For property " + key
+ " old value = " + activeProps.getProperty(key)
+ " new value = " + value);
break;
}
}
}
if(needNewSystem) {
// the current system does not meet our needs to disconnect and
// call recursively to get a new system.
getLogWriter().info("Disconnecting from current DS in order to make a new one");
disconnectFromDS();
getSystem(props);
}
}
return system;
}
/**
* Crash the cache in the given VM in such a way that it immediately stops communicating with
* peers. This forces the VM's membership manager to throw a ForcedDisconnectException by
* forcibly terminating the JGroups protocol stack with a fake EXIT event.<p>
*
* NOTE: if you use this method be sure that you clean up the VM before the end of your
* test with disconnectFromDS() or disconnectAllFromDS().
*/
public boolean crashDistributedSystem(VM vm) {
return (Boolean)vm.invoke(new SerializableCallable("crash distributed system") {
public Object call() throws Exception {
DistributedSystem msys = InternalDistributedSystem.getAnyInstance();
crashDistributedSystem(msys);
return true;
}
});
}
/**
* Crash the cache in the given VM in such a way that it immediately stops communicating with
* peers. This forces the VM's membership manager to throw a ForcedDisconnectException by
* forcibly terminating the JGroups protocol stack with a fake EXIT event.<p>
*
* NOTE: if you use this method be sure that you clean up the VM before the end of your
* test with disconnectFromDS() or disconnectAllFromDS().
*/
public void crashDistributedSystem(final DistributedSystem msys) {
MembershipManagerHelper.inhibitForcedDisconnectLogging(true);
MembershipManagerHelper.playDead(msys);
JChannel c = MembershipManagerHelper.getJChannel(msys);
Protocol udp = c.getProtocolStack().findProtocol("UDP");
udp.stop();
udp.passUp(new Event(Event.EXIT, new RuntimeException("killing member's ds")));
try {
MembershipManagerHelper.getJChannel(msys).waitForClose();
}
catch (InterruptedException ie) {
Thread.currentThread().interrupt();
// attempt rest of work with interrupt bit set
}
MembershipManagerHelper.inhibitForcedDisconnectLogging(false);
WaitCriterion wc = new WaitCriterion() {
public boolean done() {
return !msys.isConnected();
}
public String description() {
return "waiting for distributed system to finish disconnecting: " + msys;
}
};
// try {
waitForCriterion(wc, 10000, 1000, true);
// } finally {
// dumpMyThreads(getLogWriter());
// }
}
private String getDefaultDiskStoreName() {
String vmid = System.getProperty("vmid");
return "DiskStore-" + vmid + "-"+ getTestClass().getCanonicalName() + "." + getTestName();
}
/**
* Returns this VM's connection to the distributed system. If
* necessary, the connection will be lazily created using the
* <code>Properties</code> returned by {@link
* #getDistributedSystemProperties}.
*
* @see #getSystem(Properties)
*
* @since 3.0
*/
public final InternalDistributedSystem getSystem() {
return getSystem(this.getDistributedSystemProperties());
}
/**
* Returns a loner distributed system that isn't connected to other
* vms
*
* @since 6.5
*/
public final InternalDistributedSystem getLonerSystem() {
Properties props = this.getDistributedSystemProperties();
props.put(DistributionConfig.MCAST_PORT_NAME, "0");
props.put(DistributionConfig.LOCATORS_NAME, "");
return getSystem(props);
}
/**
* Returns a loner distributed system in combination with enforceUniqueHost
* and redundancyZone properties.
* Added specifically to test scenario of defect #47181.
*/
public final InternalDistributedSystem getLonerSystemWithEnforceUniqueHost() {
Properties props = this.getDistributedSystemProperties();
props.put(DistributionConfig.MCAST_PORT_NAME, "0");
props.put(DistributionConfig.LOCATORS_NAME, "");
props.put(DistributionConfig.ENFORCE_UNIQUE_HOST_NAME, "true");
props.put(DistributionConfig.REDUNDANCY_ZONE_NAME, "zone1");
return getSystem(props);
}
/**
* Returns an mcast distributed system that is connected to other
* vms using a random mcast port.
*/
public final InternalDistributedSystem getMcastSystem() {
Properties props = this.getDistributedSystemProperties();
int port = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS);
props.put(DistributionConfig.MCAST_PORT_NAME, ""+port);
props.put(DistributionConfig.MCAST_TTL_NAME, "0");
props.put(DistributionConfig.LOCATORS_NAME, "");
return getSystem(props);
}
/**
* Returns an mcast distributed system that is connected to other
* vms using the given mcast port.
*/
public final InternalDistributedSystem getMcastSystem(int jgroupsPort) {
Properties props = this.getDistributedSystemProperties();
props.put(DistributionConfig.MCAST_PORT_NAME, ""+jgroupsPort);
props.put(DistributionConfig.MCAST_TTL_NAME, "0");
props.put(DistributionConfig.LOCATORS_NAME, "");
return getSystem(props);
}
/**
* Returns whether or this VM is connected to a {@link
* DistributedSystem}.
*/
public final boolean isConnectedToDS() {
return system != null && system.isConnected();
}
/**
* Returns a <code>Properties</code> object used to configure a
* connection to a {@link
* com.gemstone.gemfire.distributed.DistributedSystem}.
* Unless overridden, this method will return an empty
* <code>Properties</code> object.
*
* @since 3.0
*/
public Properties getDistributedSystemProperties() {
return new Properties();
}
/**
* Sets up the test (noop).
*/
@Override
public void setUp() throws Exception {
setUpCreationStackGenerator();
testName = getName();
System.setProperty(HoplogConfig.ALLOW_LOCAL_HDFS_PROP, "true");
if (testName != null) {
GemFireCacheImpl.setDefaultDiskStoreName(getDefaultDiskStoreName());
String baseDefaultDiskStoreName = getTestClass().getCanonicalName() + "." + getTestName();
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
String vmDefaultDiskStoreName = "DiskStore-" + h + "-" + v + "-" + baseDefaultDiskStoreName;
vm.invoke(DistributedTestCase.class, "perVMSetUp", new Object[] {testName, vmDefaultDiskStoreName});
}
}
}
System.out.println("\n\n[setup] START TEST " + getClass().getSimpleName()+"."+testName+"\n\n");
}
public static void perVMSetUp(String name, String defaultDiskStoreName) {
setTestName(name);
GemFireCacheImpl.setDefaultDiskStoreName(defaultDiskStoreName);
System.setProperty(HoplogConfig.ALLOW_LOCAL_HDFS_PROP, "true");
}
public static void setTestName(String name) {
testName = name;
}
public static String getTestName() {
return testName;
}
/**
* For logPerTest to work, we have to disconnect from the DS, but all
* subclasses do not call super.tearDown(). To prevent this scenario
* this method has been declared final. Subclasses must now override
* {@link #tearDown2()} instead.
* @throws Exception
*/
@Override
public final void tearDown() throws Exception {
tearDownCreationStackGenerator();
tearDown2();
realTearDown();
tearDownAfter();
}
/**
* Tears down the test. This method is called by the final {@link #tearDown()} method and should be overridden to
* perform actual test cleanup and release resources used by the test. The tasks executed by this method are
* performed before the DUnit test framework using Hydra cleans up the client VMs.
* <p/>
* @throws Exception if the tear down process and test cleanup fails.
* @see #tearDown
* @see #tearDownAfter()
*/
// TODO rename this method to tearDownBefore and change the access modifier to protected!
public void tearDown2() throws Exception {
}
protected void realTearDown() throws Exception {
if (logPerTest) {
disconnectFromDS();
invokeInEveryVM(DistributedTestCase.class, "disconnectFromDS");
}
cleanupAllVms();
}
/**
* Tears down the test. Performs additional tear down tasks after the DUnit tests framework using Hydra cleans up
* the client VMs. This method is called by the final {@link #tearDown()} method and should be overridden to perform
* post tear down activities.
* <p/>
* @throws Exception if the test tear down process fails.
* @see #tearDown()
* @see #tearDown2()
*/
protected void tearDownAfter() throws Exception {
}
public static void cleanupAllVms()
{
cleanupThisVM();
invokeInEveryVM(DistributedTestCase.class, "cleanupThisVM");
invokeInLocator(new SerializableRunnable() {
public void run() {
DistributionMessageObserver.setInstance(null);
unregisterInstantiatorsInThisVM();
}
});
DUnitLauncher.closeAndCheckForSuspects();
}
private static void cleanupThisVM() {
IpAddress.resolve_dns = true;
SocketCreator.resolve_dns = true;
InitialImageOperation.slowImageProcessing = 0;
DistributionMessageObserver.setInstance(null);
QueryTestUtils.setCache(null);
CacheServerTestUtil.clearCacheReference();
RegionTestCase.preSnapshotRegion = null;
GlobalLockingDUnitTest.region_testBug32356 = null;
LogWrapper.close();
ClientProxyMembershipID.system = null;
MultiVMRegionTestCase.CCRegion = null;
InternalBridgeMembership.unregisterAllListeners();
ClientStatsManager.cleanupForTests();
unregisterInstantiatorsInThisVM();
GemFireTracer.DEBUG = Boolean.getBoolean("DistributionManager.DEBUG_JAVAGROUPS");
Protocol.trace = GemFireTracer.DEBUG;
DistributionMessageObserver.setInstance(null);
QueryObserverHolder.reset();
DiskStoreObserver.setInstance(null);
if (InternalDistributedSystem.systemAttemptingReconnect != null) {
InternalDistributedSystem.systemAttemptingReconnect.stopReconnecting();
}
ExpectedException ex;
while((ex = expectedExceptions.poll()) != null) {
ex.remove();
}
}
public static void unregisterAllDataSerializersFromAllVms()
{
unregisterDataSerializerInThisVM();
invokeInEveryVM(new SerializableRunnable() {
public void run() {
unregisterDataSerializerInThisVM();
}
});
invokeInLocator(new SerializableRunnable() {
public void run() {
unregisterDataSerializerInThisVM();
}
});
}
public static void unregisterInstantiatorsInThisVM() {
// unregister all the instantiators
InternalInstantiator.reinitialize();
assertEquals(0, InternalInstantiator.getInstantiators().length);
}
public static void unregisterDataSerializerInThisVM()
{
DataSerializerPropogationDUnitTest.successfullyLoadedTestDataSerializer = false;
// unregister all the Dataserializers
InternalDataSerializer.reinitialize();
// ensure that all are unregistered
assertEquals(0, InternalDataSerializer.getSerializers().length);
}
protected static void disconnectAllFromDS() {
disconnectFromDS();
invokeInEveryVM(DistributedTestCase.class,
"disconnectFromDS");
}
/**
* Disconnects this VM from the distributed system
*/
public static void disconnectFromDS() {
testName = null;
GemFireCacheImpl.testCacheXml = null;
if (system != null) {
system.disconnect();
system = null;
}
for (;;) {
DistributedSystem ds = InternalDistributedSystem.getConnectedInstance();
if (ds == null) {
break;
}
try {
ds.disconnect();
}
catch (Exception e) {
// ignore
}
}
{
AdminDistributedSystemImpl ads =
AdminDistributedSystemImpl.getConnectedInstance();
if (ads != null) {// && ads.isConnected()) {
ads.disconnect();
}
}
}
/**
* Strip the package off and gives just the class name.
* Needed because of Windows file name limits.
*/
private String getShortClassName() {
String result = this.getClass().getName();
int idx = result.lastIndexOf('.');
if (idx != -1) {
result = result.substring(idx+1);
}
return result;
}
/** get the host name to use for a server cache in client/server dunit
* testing
* @param host
* @return the host name
*/
public static String getServerHostName(Host host) {
return System.getProperty("gemfire.server-bind-address") != null?
System.getProperty("gemfire.server-bind-address")
: host.getHostName();
}
/** get the IP literal name for the current host, use this instead of
* "localhost" to avoid IPv6 name resolution bugs in the JDK/machine config.
* @return an ip literal, this method honors java.net.preferIPvAddresses
*/
public static String getIPLiteral() {
try {
return SocketCreator.getLocalHost().getHostAddress();
} catch (UnknownHostException e) {
throw new Error("problem determining host IP address", e);
}
}
/**
* Get the port that the standard dunit locator is listening on.
* @return
*/
public static int getDUnitLocatorPort() {
return DUnitEnv.get().getLocatorPort();
}
/**
* Returns a unique name for this test method. It is based on the
* name of the class as well as the name of the method.
*/
public String getUniqueName() {
return getShortClassName() + "_" + this.getName();
}
/**
* Returns a <code>LogWriter</code> for logging information
* @deprecated Use a static logger from the log4j2 LogService.getLogger instead.
*/
@Deprecated
public static InternalLogWriter getLogWriter() {
return oldLogger;
}
/**
* Helper method that causes this test to fail because of the given
* exception.
*/
public static void fail(String message, Throwable ex) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
pw.print(message);
pw.print(": ");
ex.printStackTrace(pw);
fail(sw.toString());
}
// utility methods
/** pause for a default interval */
protected void pause() {
pause(250);
}
/**
* Use of this function indicates a place in the tests tree where t
* he use of Thread.sleep() is
* highly questionable.
* <p>
* Some places in the system, especially those that test expirations and other
* timeouts, have a very good reason to call {@link Thread#sleep(long)}. The
* <em>other</em> places are marked by the use of this method.
*
* @param ms
*/
static public final void staticPause(int ms) {
// getLogWriter().info("FIXME: Pausing for " + ms + " ms..."/*, new Exception()*/);
final long target = System.currentTimeMillis() + ms;
try {
for (;;) {
long msLeft = target - System.currentTimeMillis();
if (msLeft <= 0) {
break;
}
Thread.sleep(msLeft);
}
}
catch (InterruptedException e) {
fail("interrupted", e);
}
}
/**
* Blocks until the clock used for expiration moves forward.
* @return the last time stamp observed
*/
public static final long waitForExpiryClockToChange(LocalRegion lr) {
return waitForExpiryClockToChange(lr, lr.cacheTimeMillis());
}
/**
* Blocks until the clock used for expiration moves forward.
* @param baseTime the timestamp that the clock must exceed
* @return the last time stamp observed
*/
public static final long waitForExpiryClockToChange(LocalRegion lr, final long baseTime) {
long nowTime;
do {
Thread.yield();
nowTime = lr.cacheTimeMillis();
} while ((nowTime - baseTime) <= 0L);
return nowTime;
}
/** pause for specified ms interval
* Make sure system clock has advanced by the specified number of millis before
* returning.
*/
public static final void pause(int ms) {
if (ms > 50) {
getLogWriter().info("Pausing for " + ms + " ms..."/*, new Exception()*/);
}
final long target = System.currentTimeMillis() + ms;
try {
for (;;) {
long msLeft = target - System.currentTimeMillis();
if (msLeft <= 0) {
break;
}
Thread.sleep(msLeft);
}
}
catch (InterruptedException e) {
fail("interrupted", e);
}
}
public interface WaitCriterion {
public boolean done();
public String description();
}
public interface WaitCriterion2 extends WaitCriterion {
/**
* If this method returns true then quit waiting even if we are not done.
* This allows a wait to fail early.
*/
public boolean stopWaiting();
}
/**
* If true, we randomize the amount of time we wait before polling a
* {@link WaitCriterion}.
*/
static private final boolean USE_JITTER = true;
static private final Random jitter = new Random();
/**
* Return a jittered interval up to a maximum of <code>ms</code>
* milliseconds, inclusive.
*
* The result is bounded by 50 ms as a minimum and 5000 ms as a maximum.
*
* @param ms total amount of time to wait
* @return randomized interval we should wait
*/
private static int jitterInterval(long ms) {
final int minLegal = 50;
final int maxLegal = 5000;
if (ms <= minLegal) {
return (int)ms; // Don't ever jitter anything below this.
}
int maxReturn = maxLegal;
if (ms < maxLegal) {
maxReturn = (int)ms;
}
return minLegal + jitter.nextInt(maxReturn - minLegal + 1);
}
/**
* Wait until given criterion is met
* @param ev criterion to wait on
* @param ms total time to wait, in milliseconds
* @param interval pause interval between waits
* @param throwOnTimeout if false, don't generate an error
*/
static public void waitForCriterion(WaitCriterion ev, long ms,
long interval, boolean throwOnTimeout) {
long waitThisTime;
if (USE_JITTER) {
waitThisTime = jitterInterval(interval);
}
else {
waitThisTime = interval;
}
final long tilt = System.currentTimeMillis() + ms;
for (;;) {
// getLogWriter().info("Testing to see if event has occurred: " + ev.description());
if (ev.done()) {
return; // success
}
if (ev instanceof WaitCriterion2) {
WaitCriterion2 ev2 = (WaitCriterion2)ev;
if (ev2.stopWaiting()) {
if (throwOnTimeout) {
fail("stopWaiting returned true: " + ev.description());
}
return;
}
}
// Calculate time left
long timeLeft = tilt - System.currentTimeMillis();
if (timeLeft <= 0) {
if (!throwOnTimeout) {
return; // not an error, but we're done
}
fail("Event never occurred after " + ms + " ms: " + ev.description());
}
if (waitThisTime > timeLeft) {
waitThisTime = timeLeft;
}
// Wait a little bit
Thread.yield();
try {
// getLogWriter().info("waiting " + waitThisTime + "ms for " + ev.description());
Thread.sleep(waitThisTime);
} catch (InterruptedException e) {
fail("interrupted");
}
}
}
/**
* Wait on a mutex. This is done in a loop in order to address the
* "spurious wakeup" "feature" in Java.
* @param ev condition to test
* @param mutex object to lock and wait on
* @param ms total amount of time to wait
* @param interval interval to pause for the wait
* @param throwOnTimeout if false, no error is thrown.
*/
static public void waitMutex(WaitCriterion ev, Object mutex, long ms,
long interval, boolean throwOnTimeout) {
final long tilt = System.currentTimeMillis() + ms;
long waitThisTime;
if (USE_JITTER) {
waitThisTime = jitterInterval(interval);
}
else {
waitThisTime = interval;
}
synchronized (mutex) {
for (;;) {
if (ev.done()) {
break;
}
long timeLeft = tilt - System.currentTimeMillis();
if (timeLeft <= 0) {
if (!throwOnTimeout) {
return; // not an error, but we're done
}
fail("Event never occurred after " + ms + " ms: " + ev.description());
}
if (waitThisTime > timeLeft) {
waitThisTime = timeLeft;
}
try {
mutex.wait(waitThisTime);
} catch (InterruptedException e) {
fail("interrupted");
}
} // for
} // synchronized
}
/**
* Wait for a thread to join
* @param t thread to wait on
* @param ms maximum time to wait
* @throws AssertionFailure if the thread does not terminate
*/
static public void join(Thread t, long ms, LogWriter logger) {
final long tilt = System.currentTimeMillis() + ms;
final long incrementalWait;
if (USE_JITTER) {
incrementalWait = jitterInterval(ms);
}
else {
incrementalWait = ms; // wait entire time, no looping.
}
final long start = System.currentTimeMillis();
for (;;) {
// I really do *not* understand why this check is necessary
// but it is, at least with JDK 1.6. According to the source code
// and the javadocs, one would think that join() would exit immediately
// if the thread is dead. However, I can tell you from experimentation
// that this is not the case. :-( djp 2008-12-08
if (!t.isAlive()) {
break;
}
try {
t.join(incrementalWait);
} catch (InterruptedException e) {
fail("interrupted");
}
if (System.currentTimeMillis() >= tilt) {
break;
}
} // for
if (logger == null) {
logger = new LocalLogWriter(LogWriterImpl.INFO_LEVEL, System.out);
}
if (t.isAlive()) {
logger.info("HUNG THREAD");
dumpStackTrace(t, t.getStackTrace(), logger);
dumpMyThreads(logger);
t.interrupt(); // We're in trouble!
fail("Thread did not terminate after " + ms + " ms: " + t);
// getLogWriter().warning("Thread did not terminate"
// /* , new Exception()*/
// );
}
long elapsedMs = (System.currentTimeMillis() - start);
if (elapsedMs > 0) {
String msg = "Thread " + t + " took "
+ elapsedMs
+ " ms to exit.";
logger.info(msg);
}
}
public static void dumpStackTrace(Thread t, StackTraceElement[] stack, LogWriter logger) {
StringBuilder msg = new StringBuilder();
msg.append("Thread=<")
.append(t)
.append("> stackDump:\n");
for (int i=0; i < stack.length; i++) {
msg.append("\t")
.append(stack[i])
.append("\n");
}
logger.info(msg.toString());
}
/**
* Dump all thread stacks
*/
public static void dumpMyThreads(LogWriter logger) {
OSProcess.printStacks(0, false);
}
/**
* A class that represents an currently logged expected exception, which
* should be removed
*
* @author Mitch Thomas
* @since 5.7bugfix
*/
public static class ExpectedException implements Serializable {
private static final long serialVersionUID = 1L;
final String ex;
final transient VM v;
public ExpectedException(String exception) {
this.ex = exception;
this.v = null;
}
ExpectedException(String exception, VM vm) {
this.ex = exception;
this.v = vm;
}
public String getRemoveString() {
return "<ExpectedException action=remove>" + ex + "</ExpectedException>";
}
public String getAddString() {
return "<ExpectedException action=add>" + ex + "</ExpectedException>";
}
public void remove() {
SerializableRunnable removeRunnable = new SerializableRunnable(
"removeExpectedExceptions") {
public void run() {
final String remove = getRemoveString();
final InternalDistributedSystem sys = InternalDistributedSystem
.getConnectedInstance();
if (sys != null) {
sys.getLogWriter().info(remove);
}
try {
getLogWriter().info(remove);
} catch (Exception noHydraLogger) {
}
logger.info(remove);
}
};
if (this.v != null) {
v.invoke(removeRunnable);
}
else {
invokeInEveryVM(removeRunnable);
}
String s = getRemoveString();
LogManager.getLogger(LogService.BASE_LOGGER_NAME).info(s);
// log it locally
final InternalDistributedSystem sys = InternalDistributedSystem
.getConnectedInstance();
if (sys != null) { // avoid creating a system
sys.getLogWriter().info(s);
}
getLogWriter().info(s);
}
}
/**
* Log in all VMs, in both the test logger and the GemFire logger the
* expected exception string to prevent grep logs from complaining. The
* expected string is used by the GrepLogs utility and so can contain
* regular expression characters.
*
* If you do not remove the expected exception, it will be removed at the
* end of your test case automatically.
*
* @since 5.7bugfix
* @param exception
* the exception string to expect
* @return an ExpectedException instance for removal
*/
public static ExpectedException addExpectedException(final String exception) {
return addExpectedException(exception, null);
}
/**
* Log in all VMs, in both the test logger and the GemFire logger the
* expected exception string to prevent grep logs from complaining. The
* expected string is used by the GrepLogs utility and so can contain
* regular expression characters.
*
* @since 5.7bugfix
* @param exception
* the exception string to expect
* @param v
* the VM on which to log the expected exception or null for all VMs
* @return an ExpectedException instance for removal purposes
*/
public static ExpectedException addExpectedException(final String exception,
VM v) {
final ExpectedException ret;
if (v != null) {
ret = new ExpectedException(exception, v);
}
else {
ret = new ExpectedException(exception);
}
// define the add and remove expected exceptions
final String add = ret.getAddString();
SerializableRunnable addRunnable = new SerializableRunnable(
"addExpectedExceptions") {
public void run() {
final InternalDistributedSystem sys = InternalDistributedSystem
.getConnectedInstance();
if (sys != null) {
sys.getLogWriter().info(add);
}
try {
getLogWriter().info(add);
} catch (Exception noHydraLogger) {
}
logger.info(add);
}
};
if (v != null) {
v.invoke(addRunnable);
}
else {
invokeInEveryVM(addRunnable);
}
LogManager.getLogger(LogService.BASE_LOGGER_NAME).info(add);
// Log it locally too
final InternalDistributedSystem sys = InternalDistributedSystem
.getConnectedInstance();
if (sys != null) { // avoid creating a cache
sys.getLogWriter().info(add);
}
getLogWriter().info(add);
expectedExceptions.add(ret);
return ret;
}
/**
* delete locator state files. Use this after getting a random port
* to ensure that an old locator state file isn't picked up by the
* new locator you're starting.
* @param ports
*/
public void deleteLocatorStateFile(int... ports) {
for (int i=0; i<ports.length; i++) {
File stateFile = new File("locator"+ports[i]+"state.dat");
if (stateFile.exists()) {
stateFile.delete();
}
}
}
}
| |
package nl.uva.larissa.service;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.validation.ConstraintViolation;
import javax.validation.ValidationException;
import javax.validation.Validator;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.OPTIONS;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriInfo;
import nl.uva.larissa.json.ISO8601VerboseDateFormat;
import nl.uva.larissa.json.ParseException;
import nl.uva.larissa.json.StatementParser;
import nl.uva.larissa.json.StatementPrinter;
import nl.uva.larissa.json.model.Account;
import nl.uva.larissa.json.model.Agent;
import nl.uva.larissa.json.model.IFI;
import nl.uva.larissa.json.model.Statement;
import nl.uva.larissa.json.model.StatementResult;
import nl.uva.larissa.repository.DuplicateIdException;
import nl.uva.larissa.repository.StatementFilter;
import nl.uva.larissa.repository.StatementRepository;
import nl.uva.larissa.repository.UnknownStatementException;
import nl.uva.larissa.repository.VoidingTargetException;
import org.apache.abdera.i18n.iri.IRI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
@Path(value = "/xAPI/statements")
public class StatementsResource {
@Inject
StatementParser parser;
@Inject
StatementRepository repository;
@Inject
StatementPrinter printer;
@Inject
Validator validator;
@Context
SecurityContext securityContext;
@Context
UriInfo uriInfo;
private static Logger LOGGER = LoggerFactory
.getLogger(StatementsResource.class);
@PUT
@Consumes(MediaType.APPLICATION_JSON)
public Response storeStatement(@QueryParam("statementId") String id,
String json) {
LOGGER.trace("storeStatement " + id);
if (id == null) {
return badRequest("missing required parameter 'statementId'");
}
try {
Agent authority = getAuthority();
Statement statement = parser.parseStatement(json);
statement.setAuthority(authority);
validate(statement);
final String idField = statement.getId();
if (idField != null && !idField.equals(id)) {
return badRequest(String
.format("The field statement.id exists and has a different value than parameter 'statementId' (%s != %s)",
idField, id));
}
statement.setId(id);
repository.storeStatement(statement);
} catch (ParseException | ValidationException | VoidingTargetException
| UnknownStatementException e) {
return badRequest(e);
} catch (DuplicateIdException e) {
// TODO spec mentions checking if statement is the same (although
// ADL throws 409 regardless!)
return dupeResponse(e);
}
return Response.noContent().build();
}
private Agent getAuthority() {
Agent result = new Agent();
IFI ifi = new IFI();
Account account = new Account();
account.setHomePage(new IRI(uriInfo.getBaseUri()));
account.setName(securityContext.getUserPrincipal().getName());
ifi.setAccount(account);
result.setIdentifier(ifi);
return result;
}
private Response badRequest(Exception e) {
return badRequest(e.getMessage());
}
private Response badRequest(String message) {
ResponseBuilder response = Response.status(Status.BAD_REQUEST);
if (message != null) {
response.type(MediaType.TEXT_PLAIN).entity(message);
}
return response.build();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
public Response storeStatements(String json) {
LOGGER.trace("storeStatements");
String id;
Statement statement;
try {
try {
Agent authority = getAuthority();
statement = parser.parseStatement(json);
statement.setAuthority(authority);
validate(statement);
id = repository.storeStatement(statement);
} catch (ParseException e) {
List<Statement> statements = parser.parseStatementList(json);
return validateAndStore(statements);
}
} catch (ParseException | ValidationException | VoidingTargetException
| UnknownStatementException e) {
return badRequest(e);
} catch (DuplicateIdException e) {
return dupeResponse(e);
}
return Response.ok(String.format("[\"%s\"]", id),
MediaType.APPLICATION_JSON).build();
}
private Response dupeResponse(DuplicateIdException e) {
return Response
.status(Status.CONFLICT)
.entity(String.format("a statement with id %s already exists",
e.getId())).type(MediaType.TEXT_PLAIN).build();
}
private Response validateAndStore(List<Statement> statements)
throws DuplicateIdException, ValidationException,
VoidingTargetException {
Agent authority = getAuthority();
for (Statement statement : statements) {
statement.setAuthority(authority);
validate(statement);
}
List<String> ids = repository.storeStatements(statements);
ArrayNode node = JsonNodeFactory.instance.arrayNode();
for (String id : ids) {
node.add(id);
}
return Response.ok(node.toString(), MediaType.APPLICATION_JSON).build();
}
@GET
public Response getStatements(@Context UriInfo uriInfo) {
MultivaluedMap<String, String> parameters = uriInfo
.getQueryParameters();
LOGGER.trace("getStatements " + parameters.keySet());
return getStatementsUsingParameters(parameters);
}
private Response getFilteredStatements(StatementFilter statementFilter) {
StatementResult result = repository.getStatements(statementFilter);
Response response;
try {
response = Response
.ok(printFormatted(result, statementFilter.getFormat()),
MediaType.APPLICATION_JSON)
.header(XapiHeader.CONSISTENT_THROUGH.key(),
new ISO8601VerboseDateFormat().format(result
.getConsistentThrough())).build();
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
response = Response.serverError().build();
}
return response;
}
private String printFormatted(StatementResult result, String format)
throws IOException {
if (format == null || "exact".equals(format)) {
return printer.print(result);
}
if ("ids".equals(format)) {
return printer.printIds(result);
}
throw new IllegalArgumentException(
"allowed values for parameter 'format' are {ids,exact}");
}
// xAPI 1.0.1 - 7.2.2
// GET Statements MAY be called using POST and form fields if necessary as
// query strings have limits
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
public Response getStatementsWithPost(
MultivaluedMap<String, String> parameters) {
LOGGER.trace("getStatementsWithPost " + parameters.keySet());
return getStatementsUsingParameters(parameters);
}
@OPTIONS
public Response options() {
return Response.ok().build();
}
private Response getStatement(String id) {
StatementResult result = repository.getStatement(id);
return getQueryByIdResponse(id, result);
}
private Response getVoidedStatement(String id) {
StatementResult result = repository.getVoidedStatement(id);
return getQueryByIdResponse(id, result);
}
private Response getStatementsUsingParameters(
MultivaluedMap<String, String> parameters) {
RequestTypeParser parser = new RequestTypeParser();
RequestTypeParser.Result result = parser.parse(parameters);
Response response;
switch (result.getType()) {
case SINGLE:
response = getStatement(result.getStatementId());
break;
case VOIDED:
response = getVoidedStatement(result.getStatementId());
break;
case FILTER:
response = getFilteredStatements(result.getStatementFilter());
break;
case INVALID:
response = badRequest(result.getMessage());
break;
default:
response = Response.serverError().build();
}
return response;
}
private Response getQueryByIdResponse(String id, StatementResult result) {
ResponseBuilder builder;
List<Statement> statements = result.getStatements();
switch (statements.size()) {
case 0:
builder = Response.status(Status.NOT_FOUND);
break;
case 1:
try {
builder = Response.ok(
printer.printStatement(statements.get(0)),
MediaType.APPLICATION_JSON);
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
return Response.serverError().build();
}
break;
default:
LOGGER.error(
"received more than one statement (n=%s) when querying by id '%s'",
statements.size(), id);
return Response.serverError().build();
}
builder.header(XapiHeader.CONSISTENT_THROUGH.key(),
new ISO8601VerboseDateFormat().format(result
.getConsistentThrough()));
return builder.build();
}
private void validate(Statement statement) throws ValidationException {
Set<ConstraintViolation<Statement>> violations = validator
.validate(statement);
Iterator<ConstraintViolation<Statement>> itt = violations.iterator();
if (itt.hasNext()) {
ConstraintViolation<Statement> violation = itt.next();
throw new ValidationException(violation.getMessage() + ": "
+ violation.getPropertyPath());
}
}
}
| |
/**
* Copyright 2015, Xiaomi.
* All rights reserved.
* Author: yongxing@xiaomi.com
*/
package com.xiaomi.infra.galaxy.talos.consumer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.google.common.base.Preconditions;
import com.google.gson.JsonArray;
import libthrift091.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.xiaomi.infra.galaxy.lcs.common.logger.Slf4jLogger;
import com.xiaomi.infra.galaxy.lcs.metric.lib.utils.FalconWriter;
import com.xiaomi.infra.galaxy.rpc.thrift.Credential;
import com.xiaomi.infra.galaxy.talos.admin.TalosAdmin;
import com.xiaomi.infra.galaxy.talos.client.NamedThreadFactory;
import com.xiaomi.infra.galaxy.talos.client.ScheduleInfoCache;
import com.xiaomi.infra.galaxy.talos.client.TalosClientFactory;
import com.xiaomi.infra.galaxy.talos.client.TopicAbnormalCallback;
import com.xiaomi.infra.galaxy.talos.client.Utils;
import com.xiaomi.infra.galaxy.talos.thrift.ConsumeUnit;
import com.xiaomi.infra.galaxy.talos.thrift.ConsumerService;
import com.xiaomi.infra.galaxy.talos.thrift.GetDescribeInfoRequest;
import com.xiaomi.infra.galaxy.talos.thrift.GetDescribeInfoResponse;
import com.xiaomi.infra.galaxy.talos.thrift.LockWorkerRequest;
import com.xiaomi.infra.galaxy.talos.thrift.LockWorkerResponse;
import com.xiaomi.infra.galaxy.talos.thrift.QueryWorkerRequest;
import com.xiaomi.infra.galaxy.talos.thrift.QueryWorkerResponse;
import com.xiaomi.infra.galaxy.talos.thrift.RenewRequest;
import com.xiaomi.infra.galaxy.talos.thrift.RenewResponse;
import com.xiaomi.infra.galaxy.talos.thrift.TopicTalosResourceName;
public class TalosConsumer {
/**
* Check Partition Task
*
* if partition number change, invoke ReBalanceTask
*/
private class CheckPartitionTask implements Runnable {
@Override
public void run() {
GetDescribeInfoResponse response;
try {
response = talosAdmin.getDescribeInfo(new GetDescribeInfoRequest(topicName));
} catch (Throwable throwable) {
LOG.error("Exception in CheckPartitionTask: ", throwable);
// if throwable instance of HBaseOperationFailed, just return
// if throwable instance of TopicNotExist, cancel all reading task
if (Utils.isTopicNotExist(throwable)) {
cancelAllConsumingTask();
topicAbnormalCallback.abnormalHandler(topicTalosResourceName, throwable);
}
return;
}
if (!topicTalosResourceName.equals(
response.getTopicTalosResourceName())) {
String errMsg = "The topic: " +
topicTalosResourceName.getTopicTalosResourceName() +
" not exist. It might have been deleted. " +
"The getMessage threads will be cancel.";
LOG.error(errMsg);
cancelAllConsumingTask();
topicAbnormalCallback.abnormalHandler(topicTalosResourceName,
new Throwable(errMsg));
return;
}
int topicPartitionNum = response.getPartitionNumber();
if (partitionNumber < topicPartitionNum) {
LOG.info("partitionNumber changed from " + partitionNumber + " to " +
topicPartitionNum + ", execute a re-balance task.");
// update partition number and call the re-balance
setPartitionNumber(topicPartitionNum);
// call the re-balance task
reBalanceExecutor.execute(new ReBalanceTask());
}
}
} // checkPartitionTask
/**
* Check Worker Info Task
*
* check alive worker number and get the worker serving map
* 1) get the latest worker info and synchronized update the local workInfoMap
* 2) invoke the ReBalanceTask every time
*
* Note:
* a) current alive workers refer to scan 'consumerGroup+Topic+Worker'
* b) all serving partitions got by the a)'s alive workers
*
* G+T+W G+T+P
* yes no -- normal, exist idle workers
* no yes -- abnormal, but ttl will fix it
*/
private class CheckWorkerInfoTask implements Runnable {
@Override
public void run() {
try {
getWorkerInfo();
} catch (Throwable e) {
LOG.error("Get worker info error: ", e);
}
// invoke the re-balance task every time
reBalanceExecutor.execute(new ReBalanceTask());
}
}
/**
* Re-Balance Task
*
* This task just re-calculate the 'has'/'max'/'min' and try to steal/release
* 'CheckPartitionTask' takes charge of updating partitionNumber
* 'CheckWorkerInfoTask' takes charge of updating workerInfoMap
*/
private class ReBalanceTask implements Runnable {
@Override
public void run() {
makeBalance();
}
} // ReBalanceTask
/**
* ReNew Task (contains two sections per renew)
*
* Note: we make renew process outside rather than inner PartitionFetcher class
* because:
* 1) make the partitionFetcher heartbeat and worker heartbeat together
* 2) renew all the serving partitions lock within one rpc process,
* which prevent massive rpc request to server
*
* when get what to renew, we take 'partitionFetcherMap' as guideline
*/
private class ReNewTask implements Runnable {
private List<Integer> getRenewPartitionList() {
List<Integer> toRenewList = new ArrayList<Integer>();
readWriteLock.readLock().lock();
for (Map.Entry<Integer, PartitionFetcher> entry :
partitionFetcherMap.entrySet()) {
if (entry.getValue().isHoldingLock()) {
toRenewList.add(entry.getKey());
}
}
readWriteLock.readLock().unlock();
return toRenewList;
}
@Override
public void run() {
List<Integer> toRenewPartitionList = getRenewPartitionList();
ConsumeUnit consumeUnit = new ConsumeUnit(consumerGroup,
topicTalosResourceName, toRenewPartitionList, workerId);
RenewRequest renewRequest = new RenewRequest(consumeUnit);
RenewResponse renewResponse = null;
// plus 1 to include the first renew operation
int maxRetry = talosConsumerConfig.getReNewMaxRetry() + 1;
while (maxRetry-- > 0) {
try {
renewResponse = consumerClient.renew(renewRequest);
} catch (Throwable e) {
LOG.error("Worker: " + workerId + " renew error: ", e);
continue;
}
// 1) make heartbeat success and renew partitions success
if (renewResponse.isHeartbeatSuccess() &&
renewResponse.getFailedPartitionListSize() == 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("The worker: " + workerId +
" success heartbeat and renew partitions: " + toRenewPartitionList);
}
return;
}
}
// 2) make heart beat failed, cancel all partitions
// no need to renew anything, so block the renew thread and cancel all task
if (renewResponse != null && !renewResponse.isHeartbeatSuccess()) {
LOG.error("The worker: " + workerId +
" failed to make heartbeat, cancel all consumer task");
cancelAllConsumingTask();
}
// 3) make heartbeat success but renew some partitions failed
// stop read, commit offset, unlock for renew failed partitions
// the release process is graceful, so may be a long time,
// do not block the renew thread and switch thread to re-balance thread
if (renewResponse != null && renewResponse.getFailedPartitionListSize() > 0) {
List<Integer> failedRenewList = renewResponse.getFailedPartitionList();
LOG.error("The worker: " + workerId +
" failed to renew partitions: " + failedRenewList);
releasePartitionLock(failedRenewList);
}
}
}
private class ConsumerMonitorTask implements Runnable {
@Override
public void run() {
try {
pushMetricData();
} catch (Exception e) {
LOG.error("push metric data to falcon failed.", e);
}
}
}
private class WorkerPair implements Comparable<WorkerPair> {
private String workerId;
private int hasPartitionNum;
private WorkerPair(String workerId, int hasPartitionNum) {
this.workerId = workerId;
this.hasPartitionNum = hasPartitionNum;
}
@Override
public int compareTo(WorkerPair o) {
int temp = o.hasPartitionNum - hasPartitionNum; // descending sort
if (0 == temp) {
return o.workerId.compareTo(workerId);
}
return temp;
}
@Override
public String toString() {
return "{'" + workerId + '\'' + ", " + hasPartitionNum + '}';
}
}
private static final Logger LOG = LoggerFactory.getLogger(TalosConsumer.class);
private String workerId;
private Random random;
private String consumerGroup;
private MessageProcessorFactory messageProcessorFactory;
private MessageReaderFactory messageReaderFactory;
private Map<Integer, PartitionFetcher> partitionFetcherMap;
private TalosConsumerConfig talosConsumerConfig;
private TalosClientFactory talosClientFactory;
private ScheduleInfoCache scheduleInfoCache;
private TalosAdmin talosAdmin;
private ConsumerService.Iface consumerClient;
private TopicAbnormalCallback topicAbnormalCallback;
private ReadWriteLock readWriteLock;
// 3 single scheduledExecutor respectively used for
// a) checking partition number periodically
// b) checking alive worker info periodically
// c) renew worker heartbeat and serving partition locks periodically
private ScheduledExecutorService partitionScheduledExecutor;
private ScheduledExecutorService workerScheduleExecutor;
private ScheduledExecutorService renewScheduleExecutor;
// reBalanceExecutor is a single thread pool to execute re-balance task
private ExecutorService reBalanceExecutor;
// init by getting from rpc call as follows
private String topicName;
private int partitionNumber;
private TopicTalosResourceName topicTalosResourceName;
private Map<String, List<Integer>> workerInfoMap;
private Map<Integer, Long> partitionCheckPoint;
private FalconWriter falconWriter;
private ScheduledExecutorService consumerMonitorThread;
private TalosConsumer(String consumerGroupName, TalosConsumerConfig consumerConfig,
Credential credential, String topicName,
MessageReaderFactory messageReaderFactory,
MessageProcessorFactory messageProcessorFactory, String clientIdPrefix,
TopicAbnormalCallback abnormalCallback, Map<Integer, Long> partitionCheckPoint)
throws TException {
workerId = Utils.generateClientId(consumerConfig.getClientIp(), clientIdPrefix);
random = new Random();
Utils.checkNameValidity(consumerGroupName);
consumerGroup = consumerGroupName;
this.messageProcessorFactory = messageProcessorFactory;
this.messageReaderFactory = messageReaderFactory;
partitionFetcherMap = new ConcurrentHashMap<Integer, PartitionFetcher>();
talosConsumerConfig = consumerConfig;
talosClientFactory = new TalosClientFactory(talosConsumerConfig, credential);
talosAdmin = new TalosAdmin(talosClientFactory);
this.topicTalosResourceName = talosAdmin.getDescribeInfo(new GetDescribeInfoRequest(
topicName)).getTopicTalosResourceName();
consumerClient = talosClientFactory.newConsumerClient();
topicAbnormalCallback = abnormalCallback;
readWriteLock = new ReentrantReadWriteLock();
this.partitionCheckPoint = partitionCheckPoint == null ?
new HashMap<Integer, Long>() : partitionCheckPoint;
// get scheduleInfo
this.scheduleInfoCache = ScheduleInfoCache.getScheduleInfoCache(this.topicTalosResourceName,
consumerConfig, talosClientFactory.newMessageClient(), talosClientFactory);
this.falconWriter = FalconWriter.getFalconWriter(
consumerConfig.getFalconUrl(), new Slf4jLogger(LOG));
partitionScheduledExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-partitionCheck-" + topicName));
workerScheduleExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-workerCheck-" + topicName));
renewScheduleExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-renew-" + topicName));
reBalanceExecutor = Executors.newSingleThreadExecutor(
new NamedThreadFactory("talos-consumer-reBalance-" + topicName));
consumerMonitorThread = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-monitor-" + topicName));
LOG.info("The worker: " + workerId + " is initializing...");
// check and get topic info such as partitionNumber
checkAndGetTopicInfo(this.topicTalosResourceName);
// register self workerId
registerSelf();
// get worker info
getWorkerInfo();
// do balance and init simple consumer
makeBalance();
// start CheckPartitionTask/CheckWorkerInfoTask/RenewTask
initCheckPartitionTask();
initCheckWorkerInfoTask();
initRenewTask();
initConsumerMonitorTask();
}
@Deprecated
private TalosConsumer(String consumerGroupName, TalosConsumerConfig consumerConfig,
Credential credential, TopicTalosResourceName topicTalosResourceName,
MessageReaderFactory messageReaderFactory,
MessageProcessorFactory messageProcessorFactory, String clientIdPrefix,
TopicAbnormalCallback abnormalCallback, Map<Integer, Long> partitionCheckPoint)
throws TException {
workerId = Utils.generateClientId(consumerConfig.getClientIp(), clientIdPrefix);
random = new Random();
Utils.checkNameValidity(consumerGroupName);
consumerGroup = consumerGroupName;
this.messageProcessorFactory = messageProcessorFactory;
this.messageReaderFactory = messageReaderFactory;
partitionFetcherMap = new ConcurrentHashMap<Integer, PartitionFetcher>();
talosConsumerConfig = consumerConfig;
talosClientFactory = new TalosClientFactory(talosConsumerConfig, credential);
talosAdmin = new TalosAdmin(talosClientFactory);
consumerClient = talosClientFactory.newConsumerClient();
topicAbnormalCallback = abnormalCallback;
readWriteLock = new ReentrantReadWriteLock();
this.partitionCheckPoint = partitionCheckPoint == null ?
new HashMap<Integer, Long>() : partitionCheckPoint;
// get scheduleInfo
this.scheduleInfoCache = ScheduleInfoCache.getScheduleInfoCache(topicTalosResourceName,
consumerConfig, talosClientFactory.newMessageClient(), talosClientFactory);
partitionScheduledExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-partitionCheck-" + topicName));
workerScheduleExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-workerCheck-" + topicName));
renewScheduleExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-renew-" + topicName));
reBalanceExecutor = Executors.newSingleThreadExecutor(
new NamedThreadFactory("talos-consumer-reBalance-" + topicName));
LOG.info("The worker: " + workerId + " is initializing...");
// check and get topic info such as partitionNumber
checkAndGetTopicInfo(topicTalosResourceName);
// register self workerId
registerSelf();
// get worker info
getWorkerInfo();
// do balance and init simple consumer
makeBalance();
// start CheckPartitionTask/CheckWorkerInfoTask/RenewTask
initCheckPartitionTask();
initCheckWorkerInfoTask();
initRenewTask();
}
// general construct by topicName
public TalosConsumer(String consumerGroupName, TalosConsumerConfig consumerConfig,
Credential credential, String topicName,
MessageProcessorFactory messageProcessorFactory, String clientIdPrefix,
TopicAbnormalCallback abnormalCallback)
throws TException {
this(consumerGroupName, consumerConfig, credential, topicName,
new TalosMessageReaderFactory(), messageProcessorFactory, clientIdPrefix,
abnormalCallback, new HashMap<Integer, Long>());
}
// general construct by topicTalosResourceName
@Deprecated
public TalosConsumer(String consumerGroupName, TalosConsumerConfig consumerConfig,
Credential credential, TopicTalosResourceName topicTalosResourceName,
MessageProcessorFactory messageProcessorFactory, String clientIdPrefix,
TopicAbnormalCallback abnormalCallback)
throws TException {
this(consumerGroupName, consumerConfig, credential, topicTalosResourceName,
new TalosMessageReaderFactory(), messageProcessorFactory, clientIdPrefix,
abnormalCallback, new HashMap<Integer, Long>());
}
// construct with CheckPoint
public TalosConsumer(String consumerGroupName, TalosConsumerConfig consumerConfig,
Credential credential, TopicTalosResourceName topicTalosResourceName,
MessageProcessorFactory messageProcessorFactory, String clientIdPrefix,
TopicAbnormalCallback abnormalCallback, Map<Integer, Long> checkPointMap)
throws TException {
this(consumerGroupName, consumerConfig, credential, topicTalosResourceName,
new TalosMessageReaderFactory(), messageProcessorFactory, clientIdPrefix,
abnormalCallback, checkPointMap);
}
// null credential
public TalosConsumer(String consumerGroup, TalosConsumerConfig consumerConfig,
TopicTalosResourceName topicTalosResourceName,
MessageProcessorFactory messageProcessorFactory,
TopicAbnormalCallback topicAbnormalCallback) throws TException {
this(consumerGroup, consumerConfig, new Credential(),
topicTalosResourceName, messageProcessorFactory, topicAbnormalCallback);
}
// null clientIdPrefix
public TalosConsumer(String consumerGroup, TalosConsumerConfig consumerConfig,
Credential credential, TopicTalosResourceName topicTalosResourceName,
MessageProcessorFactory messageProcessorFactory,
TopicAbnormalCallback topicAbnormalCallback) throws TException {
this(consumerGroup, consumerConfig, credential, topicTalosResourceName,
messageProcessorFactory, "", topicAbnormalCallback);
}
// for test
public TalosConsumer(String consumerGroupName, TalosConsumerConfig consumerConfig,
TopicTalosResourceName topicTalosResourceName, String workerId,
TopicAbnormalCallback abnormalCallback,
ConsumerService.Iface consumerClientMock, TalosAdmin talosAdminMock,
Map<Integer, PartitionFetcher> fetcherMap) throws Exception {
this.workerId = workerId;
random = new Random();
consumerGroup = consumerGroupName;
partitionFetcherMap = fetcherMap;
talosConsumerConfig = consumerConfig;
talosAdmin = talosAdminMock;
consumerClient = consumerClientMock;
topicAbnormalCallback = abnormalCallback;
readWriteLock = new ReentrantReadWriteLock();
this.scheduleInfoCache = ScheduleInfoCache.getScheduleInfoCache(topicTalosResourceName,
consumerConfig, null, null);
partitionScheduledExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-partitionCheck-" + topicName));
workerScheduleExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-workerCheck-" + topicName));
renewScheduleExecutor = Executors.newSingleThreadScheduledExecutor(
new NamedThreadFactory("talos-consumer-renew-" + topicName));
reBalanceExecutor = Executors.newSingleThreadExecutor(
new NamedThreadFactory("talos-consumer-reBalance-" + topicName));
LOG.info("The worker: " + workerId + " is initializing...");
// check and get topic info such as partitionNumber
checkAndGetTopicInfo(topicTalosResourceName);
// register self workerId
registerSelf();
// get worker info
getWorkerInfo();
// do balance and init simple consumer
makeBalance();
// start CheckPartitionTask/CheckWorkerInfoTask/RenewTask
initCheckPartitionTask();
initCheckWorkerInfoTask();
initRenewTask();
}
// get current committed offset of every serving partition
public Map<Integer, Long> getCurCheckPoint() {
Map<Integer, Long> curCheckPoint = new HashMap<Integer, Long>();
readWriteLock.readLock().lock();
for (Map.Entry<Integer, PartitionFetcher> entry :
partitionFetcherMap.entrySet()) {
if (entry.getValue().isHoldingLock()) {
curCheckPoint.put(entry.getKey(), entry.getValue().getCurCheckPoint());
}
}
readWriteLock.readLock().unlock();
return curCheckPoint;
}
private void checkAndGetTopicInfo(TopicTalosResourceName topicTalosResourceName)
throws TException {
topicName = Utils.getTopicNameByResourceName(
topicTalosResourceName.getTopicTalosResourceName());
GetDescribeInfoResponse response = talosAdmin.getDescribeInfo(
new GetDescribeInfoRequest(topicName));
if (!topicTalosResourceName.equals(
response.getTopicTalosResourceName())) {
LOG.info("The consumer initialize failed by topic not found");
throw new IllegalArgumentException("The topic: " +
topicTalosResourceName.getTopicTalosResourceName() + " not found");
}
setPartitionNumber(response.getPartitionNumber());
this.topicTalosResourceName = topicTalosResourceName;
LOG.info("The worker: " + workerId + " check and get topic info done");
}
private void registerSelf() throws TException {
ConsumeUnit consumeUnit = new ConsumeUnit(consumerGroup,
topicTalosResourceName, new ArrayList<Integer>(), workerId);
LockWorkerRequest request = new LockWorkerRequest(consumeUnit);
LockWorkerResponse lockWorkerResponse = null;
int tryCount = talosConsumerConfig.getSelfRegisterMaxRetry() + 1;
while (tryCount-- > 0) {
try {
lockWorkerResponse = consumerClient.lockWorker(request);
} catch (Throwable e) {
LOG.error("The worker: " + workerId + "register self got error: ", e);
continue;
}
if (lockWorkerResponse.isRegisterSuccess()) {
LOG.info("The worker: " + workerId + " register self success");
return;
}
LOG.warn("The worker: " + workerId +
" register self failed, make " + tryCount + " retry");
}
LOG.error("The worker: " + workerId + " register self failed");
throw new RuntimeException(workerId + " register self failed");
}
private void getWorkerInfo() throws TException {
QueryWorkerRequest queryWorkerRequest = new QueryWorkerRequest(
consumerGroup, topicTalosResourceName);
QueryWorkerResponse queryWorkerResponse = consumerClient.queryWorker(
queryWorkerRequest);
// if queryWorkerInfoMap size equals 0,
// it represents hbase failed error, do not update local map
// because registration, the queryWorkerInfoMap size >= 1 at least
// if queryWorkerInfoMap not contains self, it indicates renew failed,
// do not update local map to prevent a bad re-balance
if (queryWorkerResponse.getWorkerMapSize() == 0 ||
(!queryWorkerResponse.getWorkerMap().containsKey(workerId))) {
return;
}
readWriteLock.writeLock().lock();
workerInfoMap = queryWorkerResponse.getWorkerMap();
readWriteLock.writeLock().unlock();
}
private void calculateTargetList(int copyPartitionNum, int workerNumber,
List<Integer> targetList) {
if (workerNumber == 1) {
// one worker serving all partitions
targetList.add(copyPartitionNum);
} else if (copyPartitionNum < workerNumber) {
// per worker per partition, the extra worker must be idle
for (int i = 0; i < copyPartitionNum; ++i) {
targetList.add(1);
}
} else {
// calculate the target sequence
int min, remainder, sum = 0;
min = copyPartitionNum / workerNumber;
remainder = copyPartitionNum % workerNumber;
// add max by remainder
for (int i = 0; i < remainder; ++i) {
targetList.add(min + 1);
sum += (min + 1);
}
// add min by (workerNumber - remainder)
for (int i = 0; i < (workerNumber - remainder); ++i) {
targetList.add(min);
sum += min;
}
Preconditions.checkArgument(sum == copyPartitionNum);
}
// sort target by descending
Collections.sort(targetList, Collections.reverseOrder());
LOG.info("worker: " + workerId + " calculate target partitions done: " +
targetList);
}
private void calculateWorkerPairs(Map<String, List<Integer>> copyWorkerMap,
List<WorkerPair> sortedWorkerPairs) {
for (Map.Entry<String, List<Integer>> entry : copyWorkerMap.entrySet()) {
sortedWorkerPairs.add(new WorkerPair(entry.getKey(), entry.getValue().size()));
}
Collections.sort(sortedWorkerPairs); // descending
LOG.info("worker: " + workerId + " calculate sorted worker pairs: " +
sortedWorkerPairs);
}
private void makeBalance() {
/**
* When start make balance, we deep copy 'partitionNumber' and 'workerInfoMap'
* to prevent both value appear inconsistent during the process makeBalance
*/
int copyPartitionNum = partitionNumber;
Map<String, List<Integer>> copyWorkerInfoMap = deepCopyWorkerInfoMap();
/**
* if workerInfoMap not contains workerId, there must be error in renew task.
* the renew task will cancel the consuming task and stop to read data,
* so just return and do not care balance.
*/
if (!copyWorkerInfoMap.containsKey(workerId)) {
LOG.error("WorkerInfoMap not contains worker: " + workerId +
". There may be some error for renew task.");
return;
}
// calculate target and sorted worker pairs
List<Integer> targetList = new ArrayList<Integer>();
List<WorkerPair> sortedWorkerPairs = new ArrayList<WorkerPair>();
calculateTargetList(copyPartitionNum, copyWorkerInfoMap.size(), targetList);
calculateWorkerPairs(copyWorkerInfoMap, sortedWorkerPairs);
// judge stealing or release
List<Integer> toStealList = new ArrayList<Integer>();
List<Integer> toReleaseList = new ArrayList<Integer>();
for (int i = 0; i < sortedWorkerPairs.size(); ++i) {
if (sortedWorkerPairs.get(i).workerId.equals(workerId)) {
List<Integer> hasList = getHasList();
int has = hasList.size();
// workerNum > partitionNum, idle workers have no match target, do nothing
if (i >= targetList.size()) {
break;
}
int target = targetList.get(i);
if (LOG.isDebugEnabled()) {
LOG.debug("Worker: " + workerId + " has: " + has + " target: " + target);
}
// a balanced state, do nothing
if (has == target) {
break;
} else if (has > target) {
// release partitions
int toReleaseNum = has - target;
while (toReleaseNum-- > 0 && hasList.size() > 0) {
toReleaseList.add(hasList.remove(0));
}
} else {
// stealing partitions
List<Integer> idlePartitions = getIdlePartitions();
if (idlePartitions.size() > 0) {
int toStealnum = target - has;
while (toStealnum-- > 0 && idlePartitions.size() > 0) {
int randomIndex = random.nextInt(idlePartitions.size());
toStealList.add(idlePartitions.remove(randomIndex));
}
}
} // else
break;
} // if
} // for
// steal or release partition lock or reached a balance state
Preconditions.checkArgument(!(toStealList.size() > 0 &&
toReleaseList.size() > 0));
if (toStealList.size() > 0) {
stealPartitionLock(toStealList);
} else if (toReleaseList.size() > 0) {
releasePartitionLock(toReleaseList);
} else {
// do nothing when reach balance state
LOG.info("The worker: " + workerId + " have reached a balance state.");
}
}
private void stealPartitionLock(List<Integer> toStealList) {
LOG.info("Worker: " + workerId + " try to steal " + toStealList.size() +
" partition: " + toStealList);
// try to lock and invoke serving partition PartitionFetcher to 'LOCKED' state
readWriteLock.writeLock().lock();
for (Integer partitionId : toStealList) {
if (!partitionFetcherMap.containsKey(partitionId)) {
// Note 'partitionCheckPoint.get(partitionId)' may be null, it's ok
PartitionFetcher partitionFetcher = new PartitionFetcher(consumerGroup,
topicName, topicTalosResourceName, partitionId, talosConsumerConfig,
workerId, consumerClient, talosClientFactory.newMessageClient(),
messageProcessorFactory.createProcessor(),
messageReaderFactory.createMessageReader(talosConsumerConfig),
partitionCheckPoint.get(partitionId));
partitionFetcherMap.put(partitionId, partitionFetcher);
}
partitionFetcherMap.get(partitionId).lock();
}
readWriteLock.writeLock().unlock();
}
private void releasePartitionLock(List<Integer> toReleaseList) {
LOG.info("Worker: " + workerId + " try to release " + toReleaseList.size() +
" partition: " + toReleaseList);
// stop read, commit offset, unlock the partition async
for (Integer partitionId : toReleaseList) {
Preconditions.checkArgument(partitionFetcherMap.containsKey(partitionId));
partitionFetcherMap.get(partitionId).unlock();
}
}
private void initCheckPartitionTask() {
// check and update partition number every 1 minutes delay by default
partitionScheduledExecutor.scheduleWithFixedDelay(new CheckPartitionTask(),
talosConsumerConfig.getPartitionCheckInterval(),
talosConsumerConfig.getPartitionCheckInterval(), TimeUnit.MILLISECONDS);
}
private void initCheckWorkerInfoTask() {
workerScheduleExecutor.scheduleWithFixedDelay(new CheckWorkerInfoTask(),
talosConsumerConfig.getWorkerInfoCheckInterval(),
talosConsumerConfig.getWorkerInfoCheckInterval(), TimeUnit.MILLISECONDS);
}
private void initRenewTask() {
renewScheduleExecutor.scheduleAtFixedRate(new ReNewTask(),
talosConsumerConfig.getReNewCheckInterval(),
talosConsumerConfig.getReNewCheckInterval(), TimeUnit.MILLISECONDS);
}
private void setPartitionNumber(int partitionNum) {
readWriteLock.writeLock().lock();
partitionNumber = partitionNum;
readWriteLock.writeLock().unlock();
}
private List<Integer> getIdlePartitions() {
readWriteLock.readLock().lock();
Preconditions.checkArgument(partitionNumber > 0);
List<Integer> idlePartitions = new ArrayList<Integer>();
for (int i = 0; i < partitionNumber; ++i) {
idlePartitions.add(i);
}
for (List<Integer> valueList : workerInfoMap.values()) {
for (int partitionId : valueList) {
idlePartitions.remove(new Integer(partitionId));
}
}
readWriteLock.readLock().unlock();
return idlePartitions;
}
private List<Integer> getHasList() {
List<Integer> hasList = new ArrayList<Integer>();
readWriteLock.readLock().lock();
for (Map.Entry<Integer, PartitionFetcher> entry :
partitionFetcherMap.entrySet()) {
if (entry.getValue().isServing()) {
hasList.add(entry.getKey());
}
}
readWriteLock.readLock().unlock();
return hasList;
}
private void cancelAllConsumingTask() {
releasePartitionLock(getHasList());
}
private void shutDownAllFetcher() {
for (Map.Entry<Integer, PartitionFetcher> entry :
partitionFetcherMap.entrySet()) {
entry.getValue().shutDown();
}
}
public void shutDown() {
LOG.info("Worker: " + workerId + " is shutting down...");
shutDownAllFetcher();
partitionScheduledExecutor.shutdownNow();
workerScheduleExecutor.shutdownNow();
renewScheduleExecutor.shutdownNow();
reBalanceExecutor.shutdownNow();
consumerMonitorThread.shutdownNow();
scheduleInfoCache.shutDown(topicTalosResourceName);
LOG.info("Worker: " + workerId + " shutdown.");
}
private Map<String, List<Integer>> deepCopyWorkerInfoMap() {
readWriteLock.readLock().lock();
Map<String, List<Integer>> copyMap = new HashMap<String, List<Integer>>(
workerInfoMap.size());
for (Map.Entry<String, List<Integer>> entry : workerInfoMap.entrySet()) {
copyMap.put(entry.getKey(), new ArrayList<Integer>(entry.getValue()));
}
readWriteLock.readLock().unlock();
return copyMap;
}
private void initConsumerMonitorTask() {
if (talosConsumerConfig.isOpenClientMonitor()) {
// push metric data to falcon every minutes
consumerMonitorThread.scheduleAtFixedRate(new ConsumerMonitorTask(),
talosConsumerConfig.getReportMetricIntervalMillis(),
talosConsumerConfig.getReportMetricIntervalMillis(), TimeUnit.MILLISECONDS);
}
}
private void pushMetricData() {
JsonArray jsonArray = new JsonArray();
for (Map.Entry<Integer, PartitionFetcher> entry : partitionFetcherMap.entrySet()) {
jsonArray.addAll(entry.getValue().getFalconData());
}
falconWriter.pushFaclonData(jsonArray.toString());
}
}
| |
// ========================================================================
// Copyright (c) 2007-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.server;
import javax.servlet.AsyncContext;
import javax.servlet.AsyncEvent;
import javax.servlet.AsyncListener;
import javax.servlet.ServletResponseWrapper;
import javax.servlet.ServletException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import org.eclipse.jetty.continuation.Continuation;
import org.eclipse.jetty.continuation.ContinuationThrowable;
import org.eclipse.jetty.continuation.ContinuationListener;
import org.eclipse.jetty.io.AsyncEndPoint;
import org.eclipse.jetty.io.EndPoint;
import org.eclipse.jetty.server.handler.ContextHandler;
import org.eclipse.jetty.server.handler.ContextHandler.Context;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.thread.Timeout;
/* ------------------------------------------------------------ */
/** Implementation of Continuation and AsyncContext interfaces
*
*/
public class AsyncContinuation implements AsyncContext, Continuation
{
private static final Logger LOG = Log.getLogger(AsyncContinuation.class);
private final static long DEFAULT_TIMEOUT=30000L;
private final static ContinuationThrowable __exception = new ContinuationThrowable();
// STATES:
// handling() suspend() unhandle() resume() complete() doComplete()
// startAsync() dispatch()
// IDLE DISPATCHED
// DISPATCHED ASYNCSTARTED UNCOMPLETED
// ASYNCSTARTED ASYNCWAIT REDISPATCHING COMPLETING
// REDISPATCHING REDISPATCHED
// ASYNCWAIT REDISPATCH COMPLETING
// REDISPATCH REDISPATCHED
// REDISPATCHED ASYNCSTARTED UNCOMPLETED
// COMPLETING UNCOMPLETED UNCOMPLETED
// UNCOMPLETED COMPLETED
// COMPLETED
private static final int __IDLE=0; // Idle request
private static final int __DISPATCHED=1; // Request dispatched to filter/servlet
private static final int __ASYNCSTARTED=2; // Suspend called, but not yet returned to container
private static final int __REDISPATCHING=3;// resumed while dispatched
private static final int __ASYNCWAIT=4; // Suspended and parked
private static final int __REDISPATCH=5; // Has been scheduled
private static final int __REDISPATCHED=6; // Request redispatched to filter/servlet
private static final int __COMPLETING=7; // complete while dispatched
private static final int __UNCOMPLETED=8; // Request is completable
private static final int __COMPLETED=9; // Request is complete
/* ------------------------------------------------------------ */
protected HttpConnection _connection;
private List<AsyncListener> _lastAsyncListeners;
private List<AsyncListener> _asyncListeners;
private List<ContinuationListener> _continuationListeners;
/* ------------------------------------------------------------ */
private int _state;
private boolean _initial;
private boolean _resumed;
private boolean _expired;
private volatile boolean _responseWrapped;
private long _timeoutMs=DEFAULT_TIMEOUT;
private AsyncEventState _event;
private volatile long _expireAt;
private volatile boolean _continuation;
/* ------------------------------------------------------------ */
protected AsyncContinuation()
{
_state=__IDLE;
_initial=true;
}
/* ------------------------------------------------------------ */
protected void setConnection(final HttpConnection connection)
{
synchronized(this)
{
_connection=connection;
}
}
/* ------------------------------------------------------------ */
public void addListener(AsyncListener listener)
{
synchronized(this)
{
if (_asyncListeners==null)
_asyncListeners=new ArrayList<AsyncListener>();
_asyncListeners.add(listener);
}
}
/* ------------------------------------------------------------ */
public void addListener(AsyncListener listener,ServletRequest request, ServletResponse response)
{
synchronized(this)
{
// TODO handle the request/response ???
if (_asyncListeners==null)
_asyncListeners=new ArrayList<AsyncListener>();
_asyncListeners.add(listener);
}
}
/* ------------------------------------------------------------ */
public void addContinuationListener(ContinuationListener listener)
{
synchronized(this)
{
if (_continuationListeners==null)
_continuationListeners=new ArrayList<ContinuationListener>();
_continuationListeners.add(listener);
}
}
/* ------------------------------------------------------------ */
public void setTimeout(long ms)
{
synchronized(this)
{
_timeoutMs=ms;
}
}
/* ------------------------------------------------------------ */
public long getTimeout()
{
synchronized(this)
{
return _timeoutMs;
}
}
/* ------------------------------------------------------------ */
public AsyncEventState getAsyncEventState()
{
synchronized(this)
{
return _event;
}
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#keepWrappers()
*/
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#isResponseWrapped()
*/
public boolean isResponseWrapped()
{
return _responseWrapped;
}
/* ------------------------------------------------------------ */
/* (non-Javadoc)
* @see javax.servlet.ServletRequest#isInitial()
*/
public boolean isInitial()
{
synchronized(this)
{
return _initial;
}
}
/* ------------------------------------------------------------ */
/* (non-Javadoc)
* @see javax.servlet.ServletRequest#isSuspended()
*/
public boolean isSuspended()
{
synchronized(this)
{
switch(_state)
{
case __ASYNCSTARTED:
case __REDISPATCHING:
case __COMPLETING:
case __ASYNCWAIT:
return true;
default:
return false;
}
}
}
/* ------------------------------------------------------------ */
@Override
public String toString()
{
synchronized (this)
{
return super.toString()+"@"+getStatusString();
}
}
/* ------------------------------------------------------------ */
public String getStatusString()
{
synchronized (this)
{
return
((_state==__IDLE)?"IDLE":
(_state==__DISPATCHED)?"DISPATCHED":
(_state==__ASYNCSTARTED)?"ASYNCSTARTED":
(_state==__ASYNCWAIT)?"ASYNCWAIT":
(_state==__REDISPATCHING)?"REDISPATCHING":
(_state==__REDISPATCH)?"REDISPATCH":
(_state==__REDISPATCHED)?"REDISPATCHED":
(_state==__COMPLETING)?"COMPLETING":
(_state==__UNCOMPLETED)?"UNCOMPLETED":
(_state==__COMPLETED)?"COMPLETE":
("UNKNOWN?"+_state))+
(_initial?",initial":"")+
(_resumed?",resumed":"")+
(_expired?",expired":"");
}
}
/* ------------------------------------------------------------ */
/**
* @return false if the handling of the request should not proceed
*/
protected boolean handling()
{
synchronized (this)
{
_continuation=false;
_responseWrapped=false;
switch(_state)
{
case __IDLE:
_initial=true;
_state=__DISPATCHED;
if (_lastAsyncListeners!=null)
_lastAsyncListeners.clear();
if (_asyncListeners!=null)
_asyncListeners.clear();
else
{
_asyncListeners=_lastAsyncListeners;
_lastAsyncListeners=null;
}
return true;
case __COMPLETING:
_state=__UNCOMPLETED;
return false;
case __ASYNCWAIT:
return false;
case __REDISPATCH:
_state=__REDISPATCHED;
return true;
default:
throw new IllegalStateException(this.getStatusString());
}
}
}
/* ------------------------------------------------------------ */
/* (non-Javadoc)
* @see javax.servlet.ServletRequest#suspend(long)
*/
protected void suspend(final ServletContext context,
final ServletRequest request,
final ServletResponse response)
{
synchronized (this)
{
switch(_state)
{
case __DISPATCHED:
case __REDISPATCHED:
_resumed=false;
_expired=false;
if (_event==null || request!=_event.getSuppliedRequest() || response != _event.getSuppliedResponse() || context != _event.getServletContext())
_event=new AsyncEventState(context,request,response);
else
{
_event._dispatchContext=null;
_event._path=null;
}
_state=__ASYNCSTARTED;
List<AsyncListener> recycle=_lastAsyncListeners;
_lastAsyncListeners=_asyncListeners;
_asyncListeners=recycle;
if (_asyncListeners!=null)
_asyncListeners.clear();
break;
default:
throw new IllegalStateException(this.getStatusString());
}
}
if (_lastAsyncListeners!=null)
{
for (AsyncListener listener : _lastAsyncListeners)
{
try
{
listener.onStartAsync(_event);
}
catch(Exception e)
{
LOG.warn(e);
}
}
}
}
/* ------------------------------------------------------------ */
/**
* Signal that the HttpConnection has finished handling the request.
* For blocking connectors, this call may block if the request has
* been suspended (startAsync called).
* @return true if handling is complete, false if the request should
* be handled again (eg because of a resume that happened before unhandle was called)
*/
protected boolean unhandle()
{
synchronized (this)
{
switch(_state)
{
case __REDISPATCHED:
case __DISPATCHED:
_state=__UNCOMPLETED;
return true;
case __IDLE:
throw new IllegalStateException(this.getStatusString());
case __ASYNCSTARTED:
_initial=false;
_state=__ASYNCWAIT;
scheduleTimeout(); // could block and change state.
if (_state==__ASYNCWAIT)
return true;
else if (_state==__COMPLETING)
{
_state=__UNCOMPLETED;
return true;
}
_initial=false;
_state=__REDISPATCHED;
return false;
case __REDISPATCHING:
_initial=false;
_state=__REDISPATCHED;
return false;
case __COMPLETING:
_initial=false;
_state=__UNCOMPLETED;
return true;
default:
throw new IllegalStateException(this.getStatusString());
}
}
}
/* ------------------------------------------------------------ */
public void dispatch()
{
boolean dispatch=false;
synchronized (this)
{
switch(_state)
{
case __ASYNCSTARTED:
_state=__REDISPATCHING;
_resumed=true;
return;
case __ASYNCWAIT:
dispatch=!_expired;
_state=__REDISPATCH;
_resumed=true;
break;
case __REDISPATCH:
return;
default:
throw new IllegalStateException(this.getStatusString());
}
}
if (dispatch)
{
cancelTimeout();
scheduleDispatch();
}
}
/* ------------------------------------------------------------ */
protected void expired()
{
final List<ContinuationListener> cListeners;
final List<AsyncListener> aListeners;
synchronized (this)
{
switch(_state)
{
case __ASYNCSTARTED:
case __ASYNCWAIT:
cListeners=_continuationListeners;
aListeners=_asyncListeners;
break;
default:
cListeners=null;
aListeners=null;
return;
}
_expired=true;
}
if (aListeners!=null)
{
for (AsyncListener listener : aListeners)
{
try
{
listener.onTimeout(_event);
}
catch(Exception e)
{
LOG.warn(e);
}
}
}
if (cListeners!=null)
{
for (ContinuationListener listener : cListeners)
{
try
{
listener.onTimeout(this);
}
catch(Exception e)
{
LOG.warn(e);
}
}
}
synchronized (this)
{
switch(_state)
{
case __ASYNCSTARTED:
case __ASYNCWAIT:
if (_continuation)
dispatch();
else
// TODO maybe error dispatch?
complete();
}
}
scheduleDispatch();
}
/* ------------------------------------------------------------ */
/* (non-Javadoc)
* @see javax.servlet.ServletRequest#complete()
*/
public void complete()
{
// just like resume, except don't set _resumed=true;
boolean dispatch=false;
synchronized (this)
{
switch(_state)
{
case __DISPATCHED:
case __REDISPATCHED:
throw new IllegalStateException(this.getStatusString());
case __ASYNCSTARTED:
_state=__COMPLETING;
return;
case __ASYNCWAIT:
_state=__COMPLETING;
dispatch=!_expired;
break;
default:
throw new IllegalStateException(this.getStatusString());
}
}
if (dispatch)
{
cancelTimeout();
scheduleDispatch();
}
}
/* ------------------------------------------------------------ */
@Override
public <T extends AsyncListener> T createListener(Class<T> clazz) throws ServletException
{
try
{
// TODO inject
return clazz.newInstance();
}
catch(Exception e)
{
throw new ServletException(e);
}
}
/* ------------------------------------------------------------ */
/* (non-Javadoc)
* @see javax.servlet.ServletRequest#complete()
*/
protected void doComplete(Throwable ex)
{
final List<ContinuationListener> cListeners;
final List<AsyncListener> aListeners;
synchronized (this)
{
switch(_state)
{
case __UNCOMPLETED:
_state=__COMPLETED;
cListeners=_continuationListeners;
aListeners=_asyncListeners;
break;
default:
cListeners=null;
aListeners=null;
throw new IllegalStateException(this.getStatusString());
}
}
if (aListeners!=null)
{
for (AsyncListener listener : aListeners)
{
try
{
if (ex!=null)
{
_event.getSuppliedRequest().setAttribute(Dispatcher.ERROR_EXCEPTION,ex);
_event.getSuppliedRequest().setAttribute(Dispatcher.ERROR_MESSAGE,ex.getMessage());
listener.onError(_event);
}
else
listener.onComplete(_event);
}
catch(Exception e)
{
LOG.warn(e);
}
}
}
if (cListeners!=null)
{
for (ContinuationListener listener : cListeners)
{
try
{
listener.onComplete(this);
}
catch(Exception e)
{
LOG.warn(e);
}
}
}
}
/* ------------------------------------------------------------ */
protected void recycle()
{
synchronized (this)
{
switch(_state)
{
case __DISPATCHED:
case __REDISPATCHED:
throw new IllegalStateException(getStatusString());
default:
_state=__IDLE;
}
_initial = true;
_resumed=false;
_expired=false;
_responseWrapped=false;
cancelTimeout();
_timeoutMs=DEFAULT_TIMEOUT;
_continuationListeners=null;
}
}
/* ------------------------------------------------------------ */
public void cancel()
{
synchronized (this)
{
cancelTimeout();
_continuationListeners=null;
}
}
/* ------------------------------------------------------------ */
protected void scheduleDispatch()
{
EndPoint endp=_connection.getEndPoint();
if (!endp.isBlocking())
{
((AsyncEndPoint)endp).dispatch();
}
}
/* ------------------------------------------------------------ */
protected void scheduleTimeout()
{
EndPoint endp=_connection.getEndPoint();
if (_timeoutMs>0)
{
if (endp.isBlocking())
{
synchronized(this)
{
_expireAt = System.currentTimeMillis()+_timeoutMs;
long wait=_timeoutMs;
while (_expireAt>0 && wait>0 && _connection.getServer().isRunning())
{
try
{
this.wait(wait);
}
catch (InterruptedException e)
{
LOG.ignore(e);
}
wait=_expireAt-System.currentTimeMillis();
}
if (_expireAt>0 && wait<=0 && _connection.getServer().isRunning())
{
expired();
}
}
}
else
{
_connection.scheduleTimeout(_event._timeout,_timeoutMs);
}
}
}
/* ------------------------------------------------------------ */
protected void cancelTimeout()
{
EndPoint endp=_connection.getEndPoint();
if (endp.isBlocking())
{
synchronized(this)
{
_expireAt=0;
this.notifyAll();
}
}
else
{
final AsyncEventState event=_event;
if (event!=null)
_connection.cancelTimeout(event._timeout);
}
}
/* ------------------------------------------------------------ */
public boolean isCompleting()
{
synchronized (this)
{
return _state==__COMPLETING;
}
}
/* ------------------------------------------------------------ */
boolean isUncompleted()
{
synchronized (this)
{
return _state==__UNCOMPLETED;
}
}
/* ------------------------------------------------------------ */
public boolean isComplete()
{
synchronized (this)
{
return _state==__COMPLETED;
}
}
/* ------------------------------------------------------------ */
public boolean isAsyncStarted()
{
synchronized (this)
{
switch(_state)
{
case __ASYNCSTARTED:
case __REDISPATCHING:
case __REDISPATCH:
case __ASYNCWAIT:
return true;
default:
return false;
}
}
}
/* ------------------------------------------------------------ */
public boolean isAsync()
{
synchronized (this)
{
switch(_state)
{
case __IDLE:
case __DISPATCHED:
case __UNCOMPLETED:
case __COMPLETED:
return false;
default:
return true;
}
}
}
/* ------------------------------------------------------------ */
public void dispatch(ServletContext context, String path)
{
_event._dispatchContext=context;
_event._path=path;
dispatch();
}
/* ------------------------------------------------------------ */
public void dispatch(String path)
{
_event._path=path;
dispatch();
}
/* ------------------------------------------------------------ */
public Request getBaseRequest()
{
return _connection.getRequest();
}
/* ------------------------------------------------------------ */
public ServletRequest getRequest()
{
if (_event!=null)
return _event.getSuppliedRequest();
return _connection.getRequest();
}
/* ------------------------------------------------------------ */
public ServletResponse getResponse()
{
if (_responseWrapped && _event!=null && _event.getSuppliedResponse()!=null)
return _event.getSuppliedResponse();
return _connection.getResponse();
}
/* ------------------------------------------------------------ */
public void start(final Runnable run)
{
final AsyncEventState event=_event;
if (event!=null)
{
_connection.getServer().getThreadPool().dispatch(new Runnable()
{
public void run()
{
((Context)event.getServletContext()).getContextHandler().handle(run);
}
});
}
}
/* ------------------------------------------------------------ */
public boolean hasOriginalRequestAndResponse()
{
synchronized (this)
{
return (_event!=null && _event.getSuppliedRequest()==_connection._request && _event.getSuppliedResponse()==_connection._response);
}
}
/* ------------------------------------------------------------ */
public ContextHandler getContextHandler()
{
final AsyncEventState event=_event;
if (event!=null)
return ((Context)event.getServletContext()).getContextHandler();
return null;
}
/* ------------------------------------------------------------ */
/**
* @see Continuation#isResumed()
*/
public boolean isResumed()
{
synchronized (this)
{
return _resumed;
}
}
/* ------------------------------------------------------------ */
/**
* @see Continuation#isExpired()
*/
public boolean isExpired()
{
synchronized (this)
{
return _expired;
}
}
/* ------------------------------------------------------------ */
/**
* @see Continuation#resume()
*/
public void resume()
{
dispatch();
}
/* ------------------------------------------------------------ */
/**
* @see Continuation#suspend()
*/
public void suspend(ServletResponse response)
{
_continuation=true;
if (response instanceof ServletResponseWrapper)
{
_responseWrapped=true;
AsyncContinuation.this.suspend(_connection.getRequest().getServletContext(),_connection.getRequest(),response);
}
else
{
_responseWrapped=false;
AsyncContinuation.this.suspend(_connection.getRequest().getServletContext(),_connection.getRequest(),_connection.getResponse());
}
}
/* ------------------------------------------------------------ */
/**
* @see Continuation#suspend()
*/
public void suspend()
{
_responseWrapped=false;
_continuation=true;
AsyncContinuation.this.suspend(_connection.getRequest().getServletContext(),_connection.getRequest(),_connection.getResponse());
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#getServletResponse()
*/
public ServletResponse getServletResponse()
{
if (_responseWrapped && _event!=null && _event.getSuppliedResponse()!=null)
return _event.getSuppliedResponse();
return _connection.getResponse();
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#getAttribute(java.lang.String)
*/
public Object getAttribute(String name)
{
return _connection.getRequest().getAttribute(name);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#removeAttribute(java.lang.String)
*/
public void removeAttribute(String name)
{
_connection.getRequest().removeAttribute(name);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#setAttribute(java.lang.String, java.lang.Object)
*/
public void setAttribute(String name, Object attribute)
{
_connection.getRequest().setAttribute(name,attribute);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#undispatch()
*/
public void undispatch()
{
if (isSuspended())
{
if (LOG.isDebugEnabled())
throw new ContinuationThrowable();
else
throw __exception;
}
throw new IllegalStateException("!suspended");
}
/* ------------------------------------------------------------ */
/* ------------------------------------------------------------ */
public class AsyncTimeout extends Timeout.Task implements Runnable
{
@Override
public void expired()
{
AsyncContinuation.this.expired();
}
@Override
public void run()
{
AsyncContinuation.this.expired();
}
}
/* ------------------------------------------------------------ */
/* ------------------------------------------------------------ */
public class AsyncEventState extends AsyncEvent
{
private final ServletContext _suspendedContext;
private ServletContext _dispatchContext;
private String _path;
private Timeout.Task _timeout= new AsyncTimeout();
public AsyncEventState(ServletContext context, ServletRequest request, ServletResponse response)
{
super(AsyncContinuation.this, request,response);
_suspendedContext=context;
}
public ServletContext getSuspendedContext()
{
return _suspendedContext;
}
public ServletContext getDispatchContext()
{
return _dispatchContext;
}
public ServletContext getServletContext()
{
return _dispatchContext==null?_suspendedContext:_dispatchContext;
}
public String getPath()
{
return _path;
}
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.maven;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.jar.JarFile;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.model.Dependency;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import org.springframework.boot.loader.tools.DefaultLaunchScript;
import org.springframework.boot.loader.tools.LaunchScript;
import org.springframework.boot.loader.tools.Layout;
import org.springframework.boot.loader.tools.Layouts;
import org.springframework.boot.loader.tools.Libraries;
import org.springframework.boot.loader.tools.Repackager;
/**
* Repackages existing JAR and WAR archives so that they can be executed from the command
* line using {@literal java -jar}. With <code>layout=NONE</code> can also be used simply
* to package a JAR with nested dependencies (and no main class, so not executable).
*
* @author Phillip Webb
* @author Dave Syer
* @author Stephane Nicoll
*/
@Mojo(name = "repackage", defaultPhase = LifecyclePhase.PACKAGE, requiresProject = true, threadSafe = true, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, requiresDependencyCollection = ResolutionScope.COMPILE_PLUS_RUNTIME)
public class RepackageMojo extends AbstractDependencyFilterMojo {
private static final long FIND_WARNING_TIMEOUT = TimeUnit.SECONDS.toMillis(10);
/**
* The Maven project.
* @since 1.0
*/
@Parameter(defaultValue = "${project}", readonly = true, required = true)
private MavenProject project;
/**
* Maven project helper utils.
* @since 1.0
*/
@Component
private MavenProjectHelper projectHelper;
/**
* Directory containing the generated archive.
* @since 1.0
*/
@Parameter(defaultValue = "${project.build.directory}", required = true)
private File outputDirectory;
/**
* Name of the generated archive.
* @since 1.0
*/
@Parameter(defaultValue = "${project.build.finalName}", required = true)
private String finalName;
/**
* Skip the execution.
* @since 1.2
*/
@Parameter(property = "skip", defaultValue = "false")
private boolean skip;
/**
* Classifier to add to the artifact generated. If given, the artifact will be
* attached. If this is not given, it will merely be written to the output directory
* according to the finalName. Attaching the artifact allows to deploy it alongside to
* the original one, see <a href=
* "http://maven.apache.org/plugins/maven-deploy-plugin/examples/deploying-with-classifiers.html"
* > the maven documentation for more details</a>.
* @since 1.0
*/
@Parameter
private String classifier;
/**
* The name of the main class. If not specified the first compiled class found that
* contains a 'main' method will be used.
* @since 1.0
*/
@Parameter
private String mainClass;
/**
* The type of archive (which corresponds to how the dependencies are laid out inside
* it). Possible values are JAR, WAR, ZIP, DIR, NONE. Defaults to a guess based on the
* archive type.
* @since 1.0
*/
@Parameter
private LayoutType layout;
/**
* A list of the libraries that must be unpacked from fat jars in order to run. Specify
* each library as a <code><dependency></code> with a <code><groupId></code> and a
* <code><artifactId></code> and they will be unpacked at runtime in <code>$TMPDIR/spring-boot-libs</code>.
* @since 1.1
*/
@Parameter
private List<Dependency> requiresUnpack;
/**
* Make a fully executable jar for *nix machines by prepending a launch script to the
* jar.
* @since 1.3
*/
@Parameter(defaultValue = "false")
private boolean executable;
/**
* The embedded launch script to prepend to the front of the jar if it is fully
* executable. If not specified the 'Spring Boot' default script will be used.
* @since 1.3
*/
@Parameter
private File embeddedLaunchScript;
/**
* Properties that should be expanded in the embedded launch script.
* @since 1.3
*/
@Parameter
private Properties embeddedLaunchScriptProperties;
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (this.project.getPackaging().equals("pom")) {
getLog().debug("repackage goal could not be applied to pom project.");
return;
}
if (this.skip) {
getLog().debug("skipping repackaging as per configuration.");
return;
}
File source = this.project.getArtifact().getFile();
File target = getTargetFile();
Repackager repackager = new Repackager(source) {
@Override
protected String findMainMethod(JarFile source) throws IOException {
long startTime = System.currentTimeMillis();
try {
return super.findMainMethod(source);
}
finally {
long duration = System.currentTimeMillis() - startTime;
if (duration > FIND_WARNING_TIMEOUT) {
getLog().warn(
"Searching for the main-class is taking some time, "
+ "consider using the mainClass configuration "
+ "parameter");
}
}
}
};
repackager.setMainClass(this.mainClass);
if (this.layout != null) {
getLog().info("Layout: " + this.layout);
repackager.setLayout(this.layout.layout());
}
Set<Artifact> artifacts = filterDependencies(this.project.getArtifacts(),
getFilters());
Libraries libraries = new ArtifactsLibraries(artifacts, this.requiresUnpack,
getLog());
try {
LaunchScript launchScript = getLaunchScript();
repackager.repackage(target, libraries, launchScript);
}
catch (IOException ex) {
throw new MojoExecutionException(ex.getMessage(), ex);
}
if (this.classifier != null) {
getLog().info(
"Attaching archive: " + target + ", with classifier: "
+ this.classifier);
this.projectHelper.attachArtifact(this.project, this.project.getPackaging(),
this.classifier, target);
}
else if (!source.equals(target)) {
this.project.getArtifact().setFile(target);
getLog().info("Replacing main artifact " + source + " to " + target);
}
}
private File getTargetFile() {
String classifier = (this.classifier == null ? "" : this.classifier.trim());
if (classifier.length() > 0 && !classifier.startsWith("-")) {
classifier = "-" + classifier;
}
if (!this.outputDirectory.exists()) {
this.outputDirectory.mkdirs();
}
return new File(this.outputDirectory, this.finalName + classifier + "."
+ this.project.getArtifact().getArtifactHandler().getExtension());
}
private LaunchScript getLaunchScript() throws IOException {
if (this.executable || this.embeddedLaunchScript != null) {
return new DefaultLaunchScript(this.embeddedLaunchScript,
this.embeddedLaunchScriptProperties);
}
return null;
}
public enum LayoutType {
/**
* Jar Layout
*/
JAR(new Layouts.Jar()),
/**
* War Layout
*/
WAR(new Layouts.War()),
/**
* Zip Layout
*/
ZIP(new Layouts.Expanded()),
/**
* Dir Layout
*/
DIR(new Layouts.Expanded()),
/**
* Module Layout
*/
MODULE(new Layouts.Module()),
/**
* No Layout
*/
NONE(new Layouts.None());
private final Layout layout;
public Layout layout() {
return this.layout;
}
private LayoutType(Layout layout) {
this.layout = layout;
}
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.nearcache.impl;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.internal.adapter.DataStructureAdapter;
import com.hazelcast.internal.nearcache.NearCache;
import com.hazelcast.internal.nearcache.NearCacheRecordStore;
import com.hazelcast.internal.nearcache.impl.store.NearCacheDataRecordStore;
import com.hazelcast.internal.nearcache.impl.store.NearCacheObjectRecordStore;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.serialization.SerializationService;
import com.hazelcast.nearcache.NearCacheStats;
import com.hazelcast.spi.impl.executionservice.TaskScheduler;
import com.hazelcast.spi.properties.HazelcastProperties;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.hazelcast.config.NearCacheConfig.DEFAULT_MEMORY_FORMAT;
import static com.hazelcast.internal.util.Preconditions.checkInstanceOf;
import static com.hazelcast.internal.util.Preconditions.checkNotInstanceOf;
public class DefaultNearCache<K, V> implements NearCache<K, V> {
protected final String name;
protected final TaskScheduler scheduler;
protected final ClassLoader classLoader;
protected final NearCacheConfig nearCacheConfig;
protected final SerializationService serializationService;
protected ScheduledFuture expirationTaskFuture;
protected NearCacheRecordStore<K, V> nearCacheRecordStore;
private final boolean serializeKeys;
private final HazelcastProperties properties;
private volatile boolean preloadDone;
public DefaultNearCache(String name, NearCacheConfig nearCacheConfig,
SerializationService serializationService, TaskScheduler scheduler,
ClassLoader classLoader, HazelcastProperties properties) {
this(name, nearCacheConfig, null,
serializationService, scheduler, classLoader, properties);
}
public DefaultNearCache(String name, NearCacheConfig nearCacheConfig,
NearCacheRecordStore<K, V> nearCacheRecordStore,
SerializationService serializationService, TaskScheduler scheduler,
ClassLoader classLoader, HazelcastProperties properties) {
this.name = name;
this.nearCacheConfig = nearCacheConfig;
this.serializationService = serializationService;
this.classLoader = classLoader;
this.scheduler = scheduler;
this.nearCacheRecordStore = nearCacheRecordStore;
this.serializeKeys = nearCacheConfig.isSerializeKeys();
this.properties = properties;
}
@Override
public void initialize() {
if (nearCacheRecordStore == null) {
nearCacheRecordStore = createNearCacheRecordStore(name, nearCacheConfig);
}
nearCacheRecordStore.initialize();
expirationTaskFuture = createAndScheduleExpirationTask();
}
protected NearCacheRecordStore<K, V> createNearCacheRecordStore(String name, NearCacheConfig nearCacheConfig) {
InMemoryFormat inMemoryFormat = nearCacheConfig.getInMemoryFormat();
if (inMemoryFormat == null) {
inMemoryFormat = DEFAULT_MEMORY_FORMAT;
}
switch (inMemoryFormat) {
case BINARY:
return new NearCacheDataRecordStore<K, V>(name, nearCacheConfig, serializationService, classLoader);
case OBJECT:
return new NearCacheObjectRecordStore<K, V>(name, nearCacheConfig, serializationService, classLoader);
default:
throw new IllegalArgumentException("Invalid in memory format: " + inMemoryFormat);
}
}
private ScheduledFuture createAndScheduleExpirationTask() {
if (nearCacheConfig.getMaxIdleSeconds() > 0L
|| nearCacheConfig.getTimeToLiveSeconds() > 0L) {
return new ExpirationTask().schedule(scheduler);
}
return null;
}
@Override
public String getName() {
return name;
}
@Override
public NearCacheConfig getNearCacheConfig() {
return nearCacheConfig;
}
@Override
public V get(K key) {
checkKeyFormat(key);
return nearCacheRecordStore.get(key);
}
// only implemented for testing purposes
@Override
public void put(K key, Data keyData, V value, Data valueData) {
checkKeyFormat(key);
nearCacheRecordStore.doEviction(false);
nearCacheRecordStore.put(key, keyData, value, valueData);
}
@Override
public void invalidate(K key) {
checkKeyFormat(key);
nearCacheRecordStore.invalidate(key);
}
@Override
public void clear() {
nearCacheRecordStore.clear();
}
@Override
public void destroy() {
if (expirationTaskFuture != null) {
expirationTaskFuture.cancel(true);
}
nearCacheRecordStore.destroy();
}
@Override
public NearCacheStats getNearCacheStats() {
return nearCacheRecordStore.getNearCacheStats();
}
@Override
public boolean isSerializeKeys() {
return serializeKeys;
}
@Override
public int size() {
return nearCacheRecordStore.size();
}
@Override
public void preload(DataStructureAdapter<Object, ?> adapter) {
nearCacheRecordStore.loadKeys(adapter);
preloadDone = true;
}
@Override
public void storeKeys() {
// we don't store new keys, until the pre-loader is done
if (preloadDone) {
nearCacheRecordStore.storeKeys();
}
}
@Override
public boolean isPreloadDone() {
return preloadDone;
}
@Override
public <T> T unwrap(Class<T> clazz) {
if (clazz.isAssignableFrom(getClass())) {
return clazz.cast(this);
}
throw new IllegalArgumentException("Unwrapping to " + clazz + " is not supported by this implementation");
}
@Override
public long tryReserveForUpdate(K key, Data keyData, UpdateSemantic updateSemantic) {
nearCacheRecordStore.doEviction(false);
return nearCacheRecordStore.tryReserveForUpdate(key, keyData, updateSemantic);
}
@Override
public V tryPublishReserved(K key, V value, long reservationId, boolean deserialize) {
return nearCacheRecordStore.tryPublishReserved(key, value, reservationId, deserialize);
}
public NearCacheRecordStore<K, V> getNearCacheRecordStore() {
return nearCacheRecordStore;
}
private void checkKeyFormat(K key) {
if (serializeKeys) {
checkInstanceOf(Data.class, key, "key must be of type Data!");
} else {
checkNotInstanceOf(Data.class, key, "key cannot be of type Data!");
}
}
private class ExpirationTask implements Runnable {
private final AtomicBoolean expirationInProgress = new AtomicBoolean(false);
@Override
public void run() {
if (expirationInProgress.compareAndSet(false, true)) {
try {
nearCacheRecordStore.doExpiration();
} finally {
expirationInProgress.set(false);
}
}
}
private ScheduledFuture schedule(TaskScheduler scheduler) {
return scheduler.scheduleWithRepetition(this,
properties.getInteger(TASK_INITIAL_DELAY_SECONDS),
properties.getInteger(TASK_PERIOD_SECONDS),
TimeUnit.SECONDS);
}
}
@Override
public String toString() {
return "DefaultNearCache{"
+ "name='" + name + '\''
+ ", nearCacheConfig=" + nearCacheConfig
+ ", preloadDone=" + preloadDone
+ '}';
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.lib.output;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import com.google.common.annotations.VisibleForTesting;
/** An {@link OutputCommitter} that commits files specified
* in job output directory i.e. ${mapreduce.output.fileoutputformat.outputdir}.
**/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class FileOutputCommitter extends OutputCommitter {
private static final Log LOG = LogFactory.getLog(FileOutputCommitter.class);
/**
* Name of directory where pending data is placed. Data that has not been
* committed yet.
*/
public static final String PENDING_DIR_NAME = "_temporary";
/**
* Temporary directory name
*
* The static variable to be compatible with M/R 1.x
*/
@Deprecated
protected static final String TEMP_DIR_NAME = PENDING_DIR_NAME;
public static final String SUCCEEDED_FILE_NAME = "_SUCCESS";
public static final String SUCCESSFUL_JOB_OUTPUT_DIR_MARKER =
"mapreduce.fileoutputcommitter.marksuccessfuljobs";
public static final String FILEOUTPUTCOMMITTER_ALGORITHM_VERSION =
"mapreduce.fileoutputcommitter.algorithm.version";
public static final int FILEOUTPUTCOMMITTER_ALGORITHM_VERSION_DEFAULT = 2;
// Skip cleanup _temporary folders under job's output directory
public static final String FILEOUTPUTCOMMITTER_CLEANUP_SKIPPED =
"mapreduce.fileoutputcommitter.cleanup.skipped";
public static final boolean
FILEOUTPUTCOMMITTER_CLEANUP_SKIPPED_DEFAULT = false;
// Ignore exceptions in cleanup _temporary folder under job's output directory
public static final String FILEOUTPUTCOMMITTER_CLEANUP_FAILURES_IGNORED =
"mapreduce.fileoutputcommitter.cleanup-failures.ignored";
public static final boolean
FILEOUTPUTCOMMITTER_CLEANUP_FAILURES_IGNORED_DEFAULT = false;
// Number of attempts when failure happens in commit job
public static final String FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS =
"mapreduce.fileoutputcommitter.failures.attempts";
// default value to be 1 to keep consistent with previous behavior
public static final int FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS_DEFAULT = 1;
private Path outputPath = null;
private Path workPath = null;
private final int algorithmVersion;
private final boolean skipCleanup;
private final boolean ignoreCleanupFailures;
/**
* Create a file output committer
* @param outputPath the job's output path, or null if you want the output
* committer to act as a noop.
* @param context the task's context
* @throws IOException
*/
public FileOutputCommitter(Path outputPath,
TaskAttemptContext context) throws IOException {
this(outputPath, (JobContext)context);
if (outputPath != null) {
workPath = getTaskAttemptPath(context, outputPath);
}
}
/**
* Create a file output committer
* @param outputPath the job's output path, or null if you want the output
* committer to act as a noop.
* @param context the task's context
* @throws IOException
*/
@Private
public FileOutputCommitter(Path outputPath,
JobContext context) throws IOException {
Configuration conf = context.getConfiguration();
algorithmVersion =
conf.getInt(FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
FILEOUTPUTCOMMITTER_ALGORITHM_VERSION_DEFAULT);
LOG.info("File Output Committer Algorithm version is " + algorithmVersion);
if (algorithmVersion != 1 && algorithmVersion != 2) {
throw new IOException("Only 1 or 2 algorithm version is supported");
}
// if skip cleanup
skipCleanup = conf.getBoolean(
FILEOUTPUTCOMMITTER_CLEANUP_SKIPPED,
FILEOUTPUTCOMMITTER_CLEANUP_SKIPPED_DEFAULT);
// if ignore failures in cleanup
ignoreCleanupFailures = conf.getBoolean(
FILEOUTPUTCOMMITTER_CLEANUP_FAILURES_IGNORED,
FILEOUTPUTCOMMITTER_CLEANUP_FAILURES_IGNORED_DEFAULT);
LOG.info("FileOutputCommitter skip cleanup _temporary folders under " +
"output directory:" + skipCleanup + ", ignore cleanup failures: " +
ignoreCleanupFailures);
if (outputPath != null) {
FileSystem fs = outputPath.getFileSystem(context.getConfiguration());
this.outputPath = fs.makeQualified(outputPath);
}
}
/**
* @return the path where final output of the job should be placed. This
* could also be considered the committed application attempt path.
*/
private Path getOutputPath() {
return this.outputPath;
}
/**
* @return true if we have an output path set, else false.
*/
private boolean hasOutputPath() {
return this.outputPath != null;
}
/**
* @return the path where the output of pending job attempts are
* stored.
*/
private Path getPendingJobAttemptsPath() {
return getPendingJobAttemptsPath(getOutputPath());
}
/**
* Get the location of pending job attempts.
* @param out the base output directory.
* @return the location of pending job attempts.
*/
private static Path getPendingJobAttemptsPath(Path out) {
return new Path(out, PENDING_DIR_NAME);
}
/**
* Get the Application Attempt Id for this job
* @param context the context to look in
* @return the Application Attempt Id for a given job.
*/
private static int getAppAttemptId(JobContext context) {
return context.getConfiguration().getInt(
MRJobConfig.APPLICATION_ATTEMPT_ID, 0);
}
/**
* Compute the path where the output of a given job attempt will be placed.
* @param context the context of the job. This is used to get the
* application attempt id.
* @return the path to store job attempt data.
*/
public Path getJobAttemptPath(JobContext context) {
return getJobAttemptPath(context, getOutputPath());
}
/**
* Compute the path where the output of a given job attempt will be placed.
* @param context the context of the job. This is used to get the
* application attempt id.
* @param out the output path to place these in.
* @return the path to store job attempt data.
*/
public static Path getJobAttemptPath(JobContext context, Path out) {
return getJobAttemptPath(getAppAttemptId(context), out);
}
/**
* Compute the path where the output of a given job attempt will be placed.
* @param appAttemptId the ID of the application attempt for this job.
* @return the path to store job attempt data.
*/
protected Path getJobAttemptPath(int appAttemptId) {
return getJobAttemptPath(appAttemptId, getOutputPath());
}
/**
* Compute the path where the output of a given job attempt will be placed.
* @param appAttemptId the ID of the application attempt for this job.
* @return the path to store job attempt data.
*/
private static Path getJobAttemptPath(int appAttemptId, Path out) {
return new Path(getPendingJobAttemptsPath(out), String.valueOf(appAttemptId));
}
/**
* Compute the path where the output of pending task attempts are stored.
* @param context the context of the job with pending tasks.
* @return the path where the output of pending task attempts are stored.
*/
private Path getPendingTaskAttemptsPath(JobContext context) {
return getPendingTaskAttemptsPath(context, getOutputPath());
}
/**
* Compute the path where the output of pending task attempts are stored.
* @param context the context of the job with pending tasks.
* @return the path where the output of pending task attempts are stored.
*/
private static Path getPendingTaskAttemptsPath(JobContext context, Path out) {
return new Path(getJobAttemptPath(context, out), PENDING_DIR_NAME);
}
/**
* Compute the path where the output of a task attempt is stored until
* that task is committed.
*
* @param context the context of the task attempt.
* @return the path where a task attempt should be stored.
*/
public Path getTaskAttemptPath(TaskAttemptContext context) {
return new Path(getPendingTaskAttemptsPath(context),
String.valueOf(context.getTaskAttemptID()));
}
/**
* Compute the path where the output of a task attempt is stored until
* that task is committed.
*
* @param context the context of the task attempt.
* @param out The output path to put things in.
* @return the path where a task attempt should be stored.
*/
public static Path getTaskAttemptPath(TaskAttemptContext context, Path out) {
return new Path(getPendingTaskAttemptsPath(context, out),
String.valueOf(context.getTaskAttemptID()));
}
/**
* Compute the path where the output of a committed task is stored until
* the entire job is committed.
* @param context the context of the task attempt
* @return the path where the output of a committed task is stored until
* the entire job is committed.
*/
public Path getCommittedTaskPath(TaskAttemptContext context) {
return getCommittedTaskPath(getAppAttemptId(context), context);
}
public static Path getCommittedTaskPath(TaskAttemptContext context, Path out) {
return getCommittedTaskPath(getAppAttemptId(context), context, out);
}
/**
* Compute the path where the output of a committed task is stored until the
* entire job is committed for a specific application attempt.
* @param appAttemptId the id of the application attempt to use
* @param context the context of any task.
* @return the path where the output of a committed task is stored.
*/
protected Path getCommittedTaskPath(int appAttemptId, TaskAttemptContext context) {
return new Path(getJobAttemptPath(appAttemptId),
String.valueOf(context.getTaskAttemptID().getTaskID()));
}
private static Path getCommittedTaskPath(int appAttemptId, TaskAttemptContext context, Path out) {
return new Path(getJobAttemptPath(appAttemptId, out),
String.valueOf(context.getTaskAttemptID().getTaskID()));
}
private static class CommittedTaskFilter implements PathFilter {
@Override
public boolean accept(Path path) {
return !PENDING_DIR_NAME.equals(path.getName());
}
}
/**
* Get a list of all paths where output from committed tasks are stored.
* @param context the context of the current job
* @return the list of these Paths/FileStatuses.
* @throws IOException
*/
private FileStatus[] getAllCommittedTaskPaths(JobContext context)
throws IOException {
Path jobAttemptPath = getJobAttemptPath(context);
FileSystem fs = jobAttemptPath.getFileSystem(context.getConfiguration());
return fs.listStatus(jobAttemptPath, new CommittedTaskFilter());
}
/**
* Get the directory that the task should write results into.
* @return the work directory
* @throws IOException
*/
public Path getWorkPath() throws IOException {
return workPath;
}
/**
* Create the temporary directory that is the root of all of the task
* work directories.
* @param context the job's context
*/
public void setupJob(JobContext context) throws IOException {
if (hasOutputPath()) {
Path jobAttemptPath = getJobAttemptPath(context);
FileSystem fs = jobAttemptPath.getFileSystem(
context.getConfiguration());
if (!fs.mkdirs(jobAttemptPath)) {
LOG.error("Mkdirs failed to create " + jobAttemptPath);
}
} else {
LOG.warn("Output Path is null in setupJob()");
}
}
/**
* The job has completed, so do works in commitJobInternal().
* Could retry on failure if using algorithm 2.
* @param context the job's context
*/
public void commitJob(JobContext context) throws IOException {
int maxAttemptsOnFailure = isCommitJobRepeatable(context) ?
context.getConfiguration().getInt(FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS,
FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS_DEFAULT) : 1;
int attempt = 0;
boolean jobCommitNotFinished = true;
while (jobCommitNotFinished) {
try {
commitJobInternal(context);
jobCommitNotFinished = false;
} catch (Exception e) {
if (++attempt >= maxAttemptsOnFailure) {
throw e;
} else {
LOG.warn("Exception get thrown in job commit, retry (" + attempt +
") time.", e);
}
}
}
}
/**
* The job has completed, so do following commit job, include:
* Move all committed tasks to the final output dir (algorithm 1 only).
* Delete the temporary directory, including all of the work directories.
* Create a _SUCCESS file to make it as successful.
* @param context the job's context
*/
@VisibleForTesting
protected void commitJobInternal(JobContext context) throws IOException {
if (hasOutputPath()) {
Path finalOutput = getOutputPath();
FileSystem fs = finalOutput.getFileSystem(context.getConfiguration());
if (algorithmVersion == 1) {
for (FileStatus stat: getAllCommittedTaskPaths(context)) {
mergePaths(fs, stat, finalOutput);
}
}
if (skipCleanup) {
LOG.info("Skip cleanup the _temporary folders under job's output " +
"directory in commitJob.");
} else {
// delete the _temporary folder and create a _done file in the o/p
// folder
try {
cleanupJob(context);
} catch (IOException e) {
if (ignoreCleanupFailures) {
// swallow exceptions in cleanup as user configure to make sure
// commitJob could be success even when cleanup get failure.
LOG.error("Error in cleanup job, manually cleanup is needed.", e);
} else {
// throw back exception to fail commitJob.
throw e;
}
}
}
// True if the job requires output.dir marked on successful job.
// Note that by default it is set to true.
if (context.getConfiguration().getBoolean(
SUCCESSFUL_JOB_OUTPUT_DIR_MARKER, true)) {
Path markerPath = new Path(outputPath, SUCCEEDED_FILE_NAME);
// If job commit is repeatable and previous/another AM could write
// mark file already, we need to set overwritten to be true explicitly
// in case other FS implementations don't overwritten by default.
if (isCommitJobRepeatable(context)) {
fs.create(markerPath, true).close();
} else {
fs.create(markerPath).close();
}
}
} else {
LOG.warn("Output Path is null in commitJob()");
}
}
/**
* Merge two paths together. Anything in from will be moved into to, if there
* are any name conflicts while merging the files or directories in from win.
* @param fs the File System to use
* @param from the path data is coming from.
* @param to the path data is going to.
* @throws IOException on any error
*/
private void mergePaths(FileSystem fs, final FileStatus from,
final Path to) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Merging data from " + from + " to " + to);
}
FileStatus toStat;
try {
toStat = fs.getFileStatus(to);
} catch (FileNotFoundException fnfe) {
toStat = null;
}
if (from.isFile()) {
if (toStat != null) {
if (!fs.delete(to, true)) {
throw new IOException("Failed to delete " + to);
}
}
if (!fs.rename(from.getPath(), to)) {
throw new IOException("Failed to rename " + from + " to " + to);
}
} else if (from.isDirectory()) {
if (toStat != null) {
if (!toStat.isDirectory()) {
if (!fs.delete(to, true)) {
throw new IOException("Failed to delete " + to);
}
renameOrMerge(fs, from, to);
} else {
//It is a directory so merge everything in the directories
for (FileStatus subFrom : fs.listStatus(from.getPath())) {
Path subTo = new Path(to, subFrom.getPath().getName());
mergePaths(fs, subFrom, subTo);
}
}
} else {
renameOrMerge(fs, from, to);
}
}
}
private void renameOrMerge(FileSystem fs, FileStatus from, Path to)
throws IOException {
if (algorithmVersion == 1) {
if (!fs.rename(from.getPath(), to)) {
throw new IOException("Failed to rename " + from + " to " + to);
}
} else {
fs.mkdirs(to);
for (FileStatus subFrom : fs.listStatus(from.getPath())) {
Path subTo = new Path(to, subFrom.getPath().getName());
mergePaths(fs, subFrom, subTo);
}
}
}
@Override
@Deprecated
public void cleanupJob(JobContext context) throws IOException {
if (hasOutputPath()) {
Path pendingJobAttemptsPath = getPendingJobAttemptsPath();
FileSystem fs = pendingJobAttemptsPath
.getFileSystem(context.getConfiguration());
// if job allow repeatable commit and pendingJobAttemptsPath could be
// deleted by previous AM, we should tolerate FileNotFoundException in
// this case.
try {
fs.delete(pendingJobAttemptsPath, true);
} catch (FileNotFoundException e) {
if (!isCommitJobRepeatable(context)) {
throw e;
}
}
} else {
LOG.warn("Output Path is null in cleanupJob()");
}
}
/**
* Delete the temporary directory, including all of the work directories.
* @param context the job's context
*/
@Override
public void abortJob(JobContext context, JobStatus.State state)
throws IOException {
// delete the _temporary folder
cleanupJob(context);
}
/**
* No task setup required.
*/
@Override
public void setupTask(TaskAttemptContext context) throws IOException {
// FileOutputCommitter's setupTask doesn't do anything. Because the
// temporary task directory is created on demand when the
// task is writing.
}
/**
* Move the files from the work directory to the job output directory
* @param context the task context
*/
@Override
public void commitTask(TaskAttemptContext context)
throws IOException {
commitTask(context, null);
}
@Private
public void commitTask(TaskAttemptContext context, Path taskAttemptPath)
throws IOException {
TaskAttemptID attemptId = context.getTaskAttemptID();
if (hasOutputPath()) {
context.progress();
if(taskAttemptPath == null) {
taskAttemptPath = getTaskAttemptPath(context);
}
FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration());
FileStatus taskAttemptDirStatus;
try {
taskAttemptDirStatus = fs.getFileStatus(taskAttemptPath);
} catch (FileNotFoundException e) {
taskAttemptDirStatus = null;
}
if (taskAttemptDirStatus != null) {
if (algorithmVersion == 1) {
Path committedTaskPath = getCommittedTaskPath(context);
if (fs.exists(committedTaskPath)) {
if (!fs.delete(committedTaskPath, true)) {
throw new IOException("Could not delete " + committedTaskPath);
}
}
if (!fs.rename(taskAttemptPath, committedTaskPath)) {
throw new IOException("Could not rename " + taskAttemptPath + " to "
+ committedTaskPath);
}
LOG.info("Saved output of task '" + attemptId + "' to " +
committedTaskPath);
} else {
// directly merge everything from taskAttemptPath to output directory
mergePaths(fs, taskAttemptDirStatus, outputPath);
LOG.info("Saved output of task '" + attemptId + "' to " +
outputPath);
}
} else {
LOG.warn("No Output found for " + attemptId);
}
} else {
LOG.warn("Output Path is null in commitTask()");
}
}
/**
* Delete the work directory
* @throws IOException
*/
@Override
public void abortTask(TaskAttemptContext context) throws IOException {
abortTask(context, null);
}
@Private
public void abortTask(TaskAttemptContext context, Path taskAttemptPath) throws IOException {
if (hasOutputPath()) {
context.progress();
if(taskAttemptPath == null) {
taskAttemptPath = getTaskAttemptPath(context);
}
FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration());
if(!fs.delete(taskAttemptPath, true)) {
LOG.warn("Could not delete "+taskAttemptPath);
}
} else {
LOG.warn("Output Path is null in abortTask()");
}
}
/**
* Did this task write any files in the work directory?
* @param context the task's context
*/
@Override
public boolean needsTaskCommit(TaskAttemptContext context
) throws IOException {
return needsTaskCommit(context, null);
}
@Private
public boolean needsTaskCommit(TaskAttemptContext context, Path taskAttemptPath
) throws IOException {
if(hasOutputPath()) {
if(taskAttemptPath == null) {
taskAttemptPath = getTaskAttemptPath(context);
}
FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration());
return fs.exists(taskAttemptPath);
}
return false;
}
@Override
@Deprecated
public boolean isRecoverySupported() {
return true;
}
@Override
public boolean isCommitJobRepeatable(JobContext context) throws IOException {
return algorithmVersion == 2;
}
@Override
public void recoverTask(TaskAttemptContext context)
throws IOException {
if(hasOutputPath()) {
context.progress();
TaskAttemptID attemptId = context.getTaskAttemptID();
int previousAttempt = getAppAttemptId(context) - 1;
if (previousAttempt < 0) {
throw new IOException ("Cannot recover task output for first attempt...");
}
Path previousCommittedTaskPath = getCommittedTaskPath(
previousAttempt, context);
FileSystem fs = previousCommittedTaskPath.getFileSystem(context.getConfiguration());
if (LOG.isDebugEnabled()) {
LOG.debug("Trying to recover task from " + previousCommittedTaskPath);
}
if (algorithmVersion == 1) {
if (fs.exists(previousCommittedTaskPath)) {
Path committedTaskPath = getCommittedTaskPath(context);
if (fs.exists(committedTaskPath)) {
if (!fs.delete(committedTaskPath, true)) {
throw new IOException("Could not delete "+committedTaskPath);
}
}
//Rename can fail if the parent directory does not yet exist.
Path committedParent = committedTaskPath.getParent();
fs.mkdirs(committedParent);
if (!fs.rename(previousCommittedTaskPath, committedTaskPath)) {
throw new IOException("Could not rename " + previousCommittedTaskPath +
" to " + committedTaskPath);
}
} else {
LOG.warn(attemptId+" had no output to recover.");
}
} else {
// essentially a no-op, but for backwards compatibility
// after upgrade to the new fileOutputCommitter,
// check if there are any output left in committedTaskPath
if (fs.exists(previousCommittedTaskPath)) {
LOG.info("Recovering task for upgrading scenario, moving files from "
+ previousCommittedTaskPath + " to " + outputPath);
FileStatus from = fs.getFileStatus(previousCommittedTaskPath);
mergePaths(fs, from, outputPath);
}
LOG.info("Done recovering task " + attemptId);
}
} else {
LOG.warn("Output Path is null in recoverTask()");
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.util;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.markup.*;
import com.intellij.openapi.util.BooleanGetter;
import com.intellij.openapi.util.TextRange;
import com.intellij.ui.JBColor;
import com.intellij.util.DocumentUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.awt.geom.CubicCurve2D;
import java.awt.geom.Path2D;
import java.util.Collections;
import java.util.List;
import static com.intellij.diff.util.DiffUtil.getLineCount;
public class DiffDrawUtil {
private static final int STRIPE_LAYER = HighlighterLayer.ERROR - 1;
private static final int DEFAULT_LAYER = HighlighterLayer.SELECTION - 3;
private static final int INLINE_LAYER = HighlighterLayer.SELECTION - 2;
private static final int LINE_MARKER_LAYER = HighlighterLayer.SELECTION - 1;
private static final double CTRL_PROXIMITY_X = 0.3;
public static final LineSeparatorRenderer BORDER_LINE_RENDERER = new LineSeparatorRenderer() {
@Override
public void drawLine(Graphics g, int x1, int x2, int y) {
Rectangle clip = g.getClipBounds();
x2 = clip.x + clip.width;
g.setColor(JBColor.border());
g.drawLine(x1, y, x2, y);
}
};
private DiffDrawUtil() {
}
@NotNull
public static Color getDividerColor() {
return getDividerColor(null);
}
@NotNull
public static Color getDividerColor(@Nullable Editor editor) {
return getDividerColorFromScheme(editor != null ? editor.getColorsScheme() : EditorColorsManager.getInstance().getGlobalScheme());
}
@NotNull
public static Color getDividerColorFromScheme(@NotNull EditorColorsScheme scheme) {
Color gutterBackground = scheme.getColor(EditorColors.GUTTER_BACKGROUND);
if (gutterBackground == null) {
gutterBackground = EditorColors.GUTTER_BACKGROUND.getDefaultColor();
}
return gutterBackground;
}
public static void drawConnectorLineSeparator(@NotNull Graphics2D g,
int x1, int x2,
int start1, int end1,
int start2, int end2) {
drawConnectorLineSeparator(g, x1, x2, start1, end1, start2, end2, null);
}
public static void drawConnectorLineSeparator(@NotNull Graphics2D g,
int x1, int x2,
int start1, int end1,
int start2, int end2,
@Nullable EditorColorsScheme scheme) {
DiffLineSeparatorRenderer.drawConnectorLine(g, x1, x2, start1, start2, end1 - start1, scheme);
}
public static void drawChunkBorderLine(@NotNull Graphics2D g, int x1, int x2, int y, @NotNull Color color,
boolean doubleLine, boolean dottedLine) {
if (dottedLine && doubleLine) {
UIUtil.drawBoldDottedLine(g, x1, x2, y - 1, null, color, false);
UIUtil.drawBoldDottedLine(g, x1, x2, y, null, color, false);
}
else if (dottedLine) {
UIUtil.drawBoldDottedLine(g, x1, x2, y - 1, null, color, false);
}
else if (doubleLine) {
UIUtil.drawLine(g, x1, y, x2, y, null, color);
UIUtil.drawLine(g, x1, y + 1, x2, y + 1, null, color);
}
else {
UIUtil.drawLine(g, x1, y, x2, y, null, color);
}
}
public static void drawTrapezium(@NotNull Graphics2D g,
int x1, int x2,
int start1, int end1,
int start2, int end2,
@Nullable Color fillColor,
@Nullable Color borderColor) {
if (fillColor != null) {
final int[] xPoints = new int[]{x1, x2, x2, x1};
final int[] yPoints = new int[]{start1, start2, end2 + 1, end1 + 1};
g.setColor(fillColor);
g.fillPolygon(xPoints, yPoints, xPoints.length);
}
if (borderColor != null) {
g.setColor(borderColor);
g.drawLine(x1, start1, x2, start2);
g.drawLine(x1, end1, x2, end2);
}
}
public static void drawCurveTrapezium(@NotNull Graphics2D g,
int x1, int x2,
int start1, int end1,
int start2, int end2,
@Nullable Color fillColor,
@Nullable Color borderColor) {
Shape upperCurve = makeCurve(x1, x2, start1, start2, true);
Shape lowerCurve = makeCurve(x1, x2, end1 + 1, end2 + 1, false);
Shape lowerCurveBorder = makeCurve(x1, x2, end1, end2, false);
if (fillColor != null) {
Path2D path = new Path2D.Double();
path.append(upperCurve, true);
path.append(lowerCurve, true);
g.setColor(fillColor);
g.fill(path);
}
if (borderColor != null) {
g.setColor(borderColor);
g.draw(upperCurve);
g.draw(lowerCurveBorder);
}
}
private static Shape makeCurve(int x1, int x2, int y1, int y2, boolean forward) {
int width = x2 - x1;
if (forward) {
return new CubicCurve2D.Double(x1, y1,
x1 + width * CTRL_PROXIMITY_X, y1,
x1 + width * (1.0 - CTRL_PROXIMITY_X), y2,
x1 + width, y2);
}
else {
return new CubicCurve2D.Double(x1 + width, y2,
x1 + width * (1.0 - CTRL_PROXIMITY_X), y2,
x1 + width * CTRL_PROXIMITY_X, y1,
x1, y1);
}
}
//
// Impl
//
public static int lineToY(@NotNull Editor editor, int line) {
Document document = editor.getDocument();
if (line >= getLineCount(document)) {
int y = lineToY(editor, getLineCount(document) - 1);
return y + editor.getLineHeight() * (line - getLineCount(document) + 1);
}
return editor.logicalPositionToXY(editor.offsetToLogicalPosition(document.getLineStartOffset(line))).y;
}
@NotNull
private static TextAttributes getTextAttributes(@NotNull final TextDiffType type,
@Nullable final Editor editor,
final boolean ignored) {
return new TextAttributes() {
@Override
public Color getBackgroundColor() {
return ignored ? type.getIgnoredColor(editor) : type.getColor(editor);
}
};
}
@NotNull
private static TextAttributes getStripeTextAttributes(@NotNull final TextDiffType type,
@NotNull final Editor editor) {
return new TextAttributes() {
@Override
public Color getErrorStripeColor() {
return type.getMarkerColor(editor);
}
};
}
private static void installGutterRenderer(@NotNull RangeHighlighter highlighter,
@NotNull TextDiffType type,
boolean ignoredFoldingOutline,
boolean resolved,
boolean isEmptyRange,
boolean isLastLine) {
DiffLineMarkerRenderer renderer = new DiffLineMarkerRenderer(highlighter, type, ignoredFoldingOutline, resolved,
isEmptyRange, isLastLine);
highlighter.setLineMarkerRenderer(renderer);
}
private static void installEmptyRangeRenderer(@NotNull RangeHighlighter highlighter,
@NotNull TextDiffType type) {
highlighter.setCustomRenderer(new DiffEmptyHighlighterRenderer(type));
}
@NotNull
private static LineSeparatorRenderer createDiffLineRenderer(@NotNull final Editor editor,
@NotNull final TextDiffType type,
final boolean doubleLine,
final boolean resolved) {
return new LineSeparatorRenderer() {
@Override
public void drawLine(Graphics g, int x1, int x2, int y) {
// TODO: change LineSeparatorRenderer interface ?
Rectangle clip = g.getClipBounds();
x2 = clip.x + clip.width;
drawChunkBorderLine((Graphics2D)g, x1, x2, y, type.getColor(editor), doubleLine, resolved);
}
};
}
//
// Highlighters
//
// TODO: desync of range and 'border' line markers on typing
@NotNull
public static List<RangeHighlighter> createHighlighter(@NotNull Editor editor, int startLine, int endLine, @NotNull TextDiffType type,
boolean ignored) {
return new LineHighlighterBuilder(editor, startLine, endLine, type).withIgnored(ignored).done();
}
@NotNull
public static List<RangeHighlighter> createHighlighter(@NotNull Editor editor, int startLine, int endLine, @NotNull TextDiffType type,
boolean ignored, boolean resolved) {
return new LineHighlighterBuilder(editor, startLine, endLine, type).withIgnored(ignored).withResolved(resolved).done();
}
@NotNull
public static List<RangeHighlighter> createInlineHighlighter(@NotNull Editor editor, int start, int end, @NotNull TextDiffType type) {
return new InlineHighlighterBuilder(editor, start, end, type).done();
}
@NotNull
public static List<RangeHighlighter> createLineMarker(@NotNull final Editor editor, int line1, int line2,
@NotNull final TextDiffType type, final boolean resolved) {
if (line1 == line2) {
if (line1 == 0) return Collections.emptyList();
return createLineMarker(editor, line1 - 1, type, SeparatorPlacement.BOTTOM, true, resolved);
}
else {
return ContainerUtil.concat(
createLineMarker(editor, line1, type, SeparatorPlacement.TOP, false, resolved),
createLineMarker(editor, line2 - 1, type, SeparatorPlacement.BOTTOM, false, resolved)
);
}
}
@NotNull
public static List<RangeHighlighter> createLineMarker(@NotNull Editor editor, int line, @NotNull final TextDiffType type,
@NotNull final SeparatorPlacement placement) {
return new LineMarkerBuilder(editor, line, placement).withType(type).doneDefaultRenderer();
}
@NotNull
private static List<RangeHighlighter> createLineMarker(@NotNull final Editor editor, int line, @NotNull final TextDiffType type,
@NotNull final SeparatorPlacement placement,
final boolean doubleLine, final boolean resolved) {
return new LineMarkerBuilder(editor, line, placement).withType(type).withResolved(resolved).doneDefaultRenderer(doubleLine);
}
@NotNull
public static List<RangeHighlighter> createBorderLineMarker(@NotNull final Editor editor, int line,
@NotNull final SeparatorPlacement placement) {
return new LineMarkerBuilder(editor, line, placement).withRenderer(BORDER_LINE_RENDERER).done();
}
@NotNull
public static List<RangeHighlighter> createLineSeparatorHighlighter(@NotNull Editor editor,
int offset1,
int offset2,
@NotNull BooleanGetter condition) {
RangeHighlighter marker = editor.getMarkupModel()
.addRangeHighlighter(offset1, offset2, LINE_MARKER_LAYER, null, HighlighterTargetArea.LINES_IN_RANGE);
DiffLineSeparatorRenderer renderer = new DiffLineSeparatorRenderer(editor, condition);
marker.setLineSeparatorPlacement(SeparatorPlacement.TOP);
marker.setLineSeparatorRenderer(renderer);
marker.setLineMarkerRenderer(renderer);
return Collections.singletonList(marker);
}
private static class LineHighlighterBuilder {
@NotNull private final Editor editor;
@NotNull private final TextDiffType type;
private final int startLine;
private final int endLine;
private boolean ignored = false;
private boolean resolved = false;
private LineHighlighterBuilder(@NotNull Editor editor, int startLine, int endLine, @NotNull TextDiffType type) {
this.editor = editor;
this.type = type;
this.startLine = startLine;
this.endLine = endLine;
}
@NotNull
public LineHighlighterBuilder withIgnored(boolean ignored) {
this.ignored = ignored;
return this;
}
@NotNull
public LineHighlighterBuilder withResolved(boolean resolved) {
this.resolved = resolved;
return this;
}
@NotNull
public List<RangeHighlighter> done() {
boolean isEmptyRange = startLine == endLine;
boolean isLastLine = endLine == getLineCount(editor.getDocument());
TextRange offsets = DiffUtil.getLinesRange(editor.getDocument(), startLine, endLine);
int start = offsets.getStartOffset();
int end = offsets.getEndOffset();
TextAttributes attributes = isEmptyRange || resolved ? null : getTextAttributes(type, editor, ignored);
TextAttributes stripeAttributes = isEmptyRange || resolved ? null : getStripeTextAttributes(type, editor);
RangeHighlighter highlighter = editor.getMarkupModel()
.addRangeHighlighter(start, end, DEFAULT_LAYER, attributes, HighlighterTargetArea.LINES_IN_RANGE);
installGutterRenderer(highlighter, type, ignored, resolved, isEmptyRange, isLastLine);
if (stripeAttributes == null) return Collections.singletonList(highlighter);
RangeHighlighter stripeHighlighter = editor.getMarkupModel()
.addRangeHighlighter(start, end, STRIPE_LAYER, stripeAttributes, HighlighterTargetArea.LINES_IN_RANGE);
return ContainerUtil.list(highlighter, stripeHighlighter);
}
}
private static class InlineHighlighterBuilder {
@NotNull private final Editor editor;
@NotNull private final TextDiffType type;
private final int start;
private final int end;
private InlineHighlighterBuilder(@NotNull Editor editor, int start, int end, @NotNull TextDiffType type) {
this.editor = editor;
this.type = type;
this.start = start;
this.end = end;
}
@NotNull
public List<RangeHighlighter> done() {
TextAttributes attributes = getTextAttributes(type, editor, false);
RangeHighlighter highlighter = editor.getMarkupModel()
.addRangeHighlighter(start, end, INLINE_LAYER, attributes, HighlighterTargetArea.EXACT_RANGE);
if (start == end) installEmptyRangeRenderer(highlighter, type);
return Collections.singletonList(highlighter);
}
}
private static class LineMarkerBuilder {
@NotNull private final Editor editor;
@NotNull private final SeparatorPlacement placement;
private final int line;
private boolean resolved = false;
@Nullable private TextDiffType type;
@Nullable private LineSeparatorRenderer renderer;
private LineMarkerBuilder(@NotNull Editor editor, int line, @NotNull SeparatorPlacement placement) {
this.editor = editor;
this.line = line;
this.placement = placement;
}
@NotNull
public LineMarkerBuilder withType(@NotNull TextDiffType type) {
this.type = type;
return this;
}
@NotNull
public LineMarkerBuilder withResolved(boolean resolved) {
this.resolved = resolved;
return this;
}
@NotNull
public LineMarkerBuilder withRenderer(@NotNull LineSeparatorRenderer renderer) {
this.renderer = renderer;
return this;
}
@NotNull
public List<RangeHighlighter> doneDefaultRenderer() {
return doneDefaultRenderer(false);
}
@NotNull
public List<RangeHighlighter> doneDefaultRenderer(boolean doubleLine) {
assert type != null;
this.renderer = createDiffLineRenderer(editor, type, doubleLine, resolved);
return done();
}
@NotNull
public List<RangeHighlighter> done() {
// We won't use addLineHighlighter as it will fail to add marker into empty document.
//RangeHighlighter highlighter = editor.getMarkupModel().addLineHighlighter(line, HighlighterLayer.SELECTION - 1, null);
int offset = DocumentUtil.getFirstNonSpaceCharOffset(editor.getDocument(), line);
RangeHighlighter highlighter = editor.getMarkupModel()
.addRangeHighlighter(offset, offset, LINE_MARKER_LAYER, null, HighlighterTargetArea.LINES_IN_RANGE);
highlighter.setLineSeparatorPlacement(placement);
highlighter.setLineSeparatorRenderer(renderer);
if (type == null || resolved) return Collections.singletonList(highlighter);
TextAttributes stripeAttributes = getStripeTextAttributes(type, editor);
RangeHighlighter stripeHighlighter = editor.getMarkupModel()
.addRangeHighlighter(offset, offset, STRIPE_LAYER, stripeAttributes, HighlighterTargetArea.LINES_IN_RANGE);
return ContainerUtil.list(highlighter, stripeHighlighter);
}
}
}
| |
package com.youdu.widget.adbrowser;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.net.Uri;
import android.text.TextUtils;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import com.youdu.adutil.LogUtils;
import java.net.URISyntaxException;
/**
* Custom WebViewClient for AdBrowserWebView which handles different url schemes.
* Has listener to communicate with buttons on AdBrowserLayout.
*/
public class AdBrowserWebViewClient extends WebViewClient {
private static final String LOG_TAG = AdBrowserWebViewClient.class.getSimpleName();
public static final String PLAY_STORE_URL = "https://play.google.com/store/apps/details?id=";
private static final String HEADER_PLAIN_TEXT = "plain/text";
private static final String TEL_SCHEME = "tel";
private static final String MAILTO_SCHEME = "mailto";
private static final String GEO_SCHEME = "geo";
private static final String MARKET_SCHEME = "market";
private static final String YOUTUBE_SCHEME = "vnd.youtube";
private static final String HTTP_SCHEME = "http";
private static final String HTTPS_SCHEME = "https";
private static final String INTENT_SCHEME = "intent";
private static final String GEO_HOST = "maps.google.com";
private static final String MARKET_HOST = "play.google.com";
private static final String YOUTUBE_HOST1 = "www.youtube.com";
private static final String YOUTUBE_HOST2 = "m.youtube.com";
private Listener mListener;
private Listener mEmptyListener = new Listener() {
@Override
public void onPageStarted() {
}
@Override
public void onPageFinished(boolean canGoBack) {
}
@Override
public void onReceiveError() {
}
@Override
public void onLeaveApp() {
}
};
public interface Listener {
void onPageStarted();
void onPageFinished(boolean canGoBack);
void onReceiveError();
void onLeaveApp();
}
public AdBrowserWebViewClient(Listener listener) {
if (listener == null) {
LogUtils.i(LOG_TAG, "Error: Wrong listener");
mListener = mEmptyListener;
} else {
mListener = listener;
}
}
@Override
public boolean shouldOverrideUrlLoading(WebView view, String url) {
LogUtils.i(LOG_TAG, "shouldOverrideUrlLoading url=" + url);
Context context = view.getContext();
Uri uri;
try {
uri = Uri.parse(url);
} catch (NullPointerException ex) {
ex.printStackTrace();
return false;
}
if (uri == null) {
return false;
}
String scheme = uri.getScheme();
String host = uri.getHost();
if (TextUtils.isEmpty(scheme)) {
return false;
}
if (scheme.equalsIgnoreCase(TEL_SCHEME)) {
Intent intent = new Intent(Intent.ACTION_DIAL, uri);
resolveAndStartActivity(intent, context);
} else if (scheme.equalsIgnoreCase(MAILTO_SCHEME)) {
url = url.replaceFirst("mailto:", "");
url = url.trim();
Intent intent = new Intent(Intent.ACTION_SEND, uri);
intent.setType(HEADER_PLAIN_TEXT).putExtra(Intent.EXTRA_EMAIL, new String[]{url});
resolveAndStartActivity(intent, context);
} else if (scheme.equalsIgnoreCase(GEO_SCHEME)) {
Intent searchAddress = new Intent(Intent.ACTION_VIEW, uri);
resolveAndStartActivity(searchAddress, context);
} else if (scheme.equalsIgnoreCase(YOUTUBE_SCHEME)) {
leaveApp(url, context);
} else if (scheme.equalsIgnoreCase(HTTP_SCHEME)
|| scheme.equalsIgnoreCase(HTTPS_SCHEME)) {
return checkHost(url, host, context);
} else if (scheme.equalsIgnoreCase(INTENT_SCHEME)) {
handleIntentScheme(url, context);
} else if (scheme.equalsIgnoreCase(MARKET_SCHEME)) {
handleMarketScheme(url, context);
} else {
return true;
}
return true;
}
/**
* Checks host
*
* @param url - full url
* @param host - host from url
* @return true - if param host equals with geo, market or youtube host
* false - otherwise
*/
private boolean checkHost(String url, String host, Context context) {
if (host.equalsIgnoreCase(GEO_HOST)) {
Intent searchAddress = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
resolveAndStartActivity(searchAddress, context);
} else if (host.equalsIgnoreCase(MARKET_HOST)
|| host.equalsIgnoreCase(YOUTUBE_HOST1)
|| host.equalsIgnoreCase(YOUTUBE_HOST2)) {
leaveApp(url, context);
} else {
return false;
}
return true;
}
private void handleMarketScheme(String url, Context context) {
try {
Intent intent = Intent.parseUri(url, 0);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if (isActivityResolved(intent, context)) {
context.startActivity(intent);
} else {
Uri uri = Uri.parse(url);
String id = uri.getQueryParameter("id");
url = PLAY_STORE_URL + id;
leaveApp(url, context);
}
} catch (Exception e) {
mListener.onReceiveError();
}
}
private void handleIntentScheme(String url, Context context) {
try {
Intent intent = Intent.parseUri(url, Intent.URI_INTENT_SCHEME);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if (isActivityResolved(intent, context)) {
context.startActivity(intent);
} else {
url = PLAY_STORE_URL + intent.getPackage();
leaveApp(url, context);
}
} catch (URISyntaxException e) {
mListener.onReceiveError();
}
}
private void leaveApp(String url, Context context) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
resolveAndStartActivity(intent, context);
mListener.onLeaveApp();
}
private void resolveAndStartActivity(Intent intent, Context context) {
if (isActivityResolved(intent, context)) {
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
} else {
mListener.onReceiveError();
}
}
private boolean isActivityResolved(Intent intent, Context context) {
return context.getPackageManager()
.resolveActivity(intent, PackageManager.MATCH_DEFAULT_ONLY) != null;
}
@Override
public final void onPageStarted(WebView view, String url, Bitmap favicon) {
super.onPageStarted(view, url, favicon);
mListener.onPageStarted();
}
@Override
public final void onPageFinished(WebView view, String url) {
super.onPageFinished(view, url);
mListener.onPageFinished(view.canGoBack());
}
@Override
public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) {
super.onReceivedError(view, errorCode, description, failingUrl);
String mess = "onReceivedError: " + description;
LogUtils.i(LOG_TAG, mess);
mListener.onReceiveError();
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Structure specifying a buffer image copy operation.
*
* <h5>Description</h5>
*
* <p>This structure is functionally identical to {@link VkBufferImageCopy}, but adds {@code sType} and {@code pNext} parameters, allowing it to be more easily extended.</p>
*
* <h5>Valid Usage</h5>
*
* <ul>
* <li>{@code bufferRowLength} <b>must</b> be 0, or greater than or equal to the {@code width} member of {@code imageExtent}</li>
* <li>{@code bufferImageHeight} <b>must</b> be 0, or greater than or equal to the {@code height} member of {@code imageExtent}</li>
* <li>The {@code aspectMask} member of {@code imageSubresource} <b>must</b> only have a single bit set</li>
* </ul>
*
* <h5>Valid Usage (Implicit)</h5>
*
* <ul>
* <li>{@code sType} <b>must</b> be {@link VK13#VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2}</li>
* <li>{@code pNext} <b>must</b> be {@code NULL} or a pointer to a valid instance of {@link VkCopyCommandTransformInfoQCOM}</li>
* <li>The {@code sType} value of each struct in the {@code pNext} chain <b>must</b> be unique</li>
* <li>{@code imageSubresource} <b>must</b> be a valid {@link VkImageSubresourceLayers} structure</li>
* </ul>
*
* <h5>See Also</h5>
*
* <p>{@link VkCopyBufferToImageInfo2}, {@link VkCopyImageToBufferInfo2}, {@link VkExtent3D}, {@link VkImageSubresourceLayers}, {@link VkOffset3D}</p>
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkBufferImageCopy2 {
* VkStructureType {@link #sType};
* void const * {@link #pNext};
* VkDeviceSize {@link #bufferOffset};
* uint32_t {@link #bufferRowLength};
* uint32_t {@link #bufferImageHeight};
* {@link VkImageSubresourceLayers VkImageSubresourceLayers} {@link #imageSubresource};
* {@link VkOffset3D VkOffset3D} {@link #imageOffset};
* {@link VkExtent3D VkExtent3D} {@link #imageExtent};
* }</code></pre>
*/
public class VkBufferImageCopy2 extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
STYPE,
PNEXT,
BUFFEROFFSET,
BUFFERROWLENGTH,
BUFFERIMAGEHEIGHT,
IMAGESUBRESOURCE,
IMAGEOFFSET,
IMAGEEXTENT;
static {
Layout layout = __struct(
__member(4),
__member(POINTER_SIZE),
__member(8),
__member(4),
__member(4),
__member(VkImageSubresourceLayers.SIZEOF, VkImageSubresourceLayers.ALIGNOF),
__member(VkOffset3D.SIZEOF, VkOffset3D.ALIGNOF),
__member(VkExtent3D.SIZEOF, VkExtent3D.ALIGNOF)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
STYPE = layout.offsetof(0);
PNEXT = layout.offsetof(1);
BUFFEROFFSET = layout.offsetof(2);
BUFFERROWLENGTH = layout.offsetof(3);
BUFFERIMAGEHEIGHT = layout.offsetof(4);
IMAGESUBRESOURCE = layout.offsetof(5);
IMAGEOFFSET = layout.offsetof(6);
IMAGEEXTENT = layout.offsetof(7);
}
/**
* Creates a {@code VkBufferImageCopy2} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkBufferImageCopy2(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** the type of this structure. */
@NativeType("VkStructureType")
public int sType() { return nsType(address()); }
/** {@code NULL} or a pointer to a structure extending this structure. */
@NativeType("void const *")
public long pNext() { return npNext(address()); }
/** the offset in bytes from the start of the buffer object where the image data is copied from or to. */
@NativeType("VkDeviceSize")
public long bufferOffset() { return nbufferOffset(address()); }
/** {@code bufferRowLength} and {@code bufferImageHeight} specify in texels a subregion of a larger two- or three-dimensional image in buffer memory, and control the addressing calculations. If either of these values is zero, that aspect of the buffer memory is considered to be tightly packed according to the {@code imageExtent}. */
@NativeType("uint32_t")
public int bufferRowLength() { return nbufferRowLength(address()); }
/** see {@code bufferRowLength} */
@NativeType("uint32_t")
public int bufferImageHeight() { return nbufferImageHeight(address()); }
/** a {@link VkImageSubresourceLayers} used to specify the specific image subresources of the image used for the source or destination image data. */
public VkImageSubresourceLayers imageSubresource() { return nimageSubresource(address()); }
/** selects the initial {@code x}, {@code y}, {@code z} offsets in texels of the sub-region of the source or destination image data. */
public VkOffset3D imageOffset() { return nimageOffset(address()); }
/** the size in texels of the image to copy in {@code width}, {@code height} and {@code depth}. */
public VkExtent3D imageExtent() { return nimageExtent(address()); }
/** Sets the specified value to the {@link #sType} field. */
public VkBufferImageCopy2 sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; }
/** Sets the {@link VK13#VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2} value to the {@link #sType} field. */
public VkBufferImageCopy2 sType$Default() { return sType(VK13.VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2); }
/** Sets the specified value to the {@link #pNext} field. */
public VkBufferImageCopy2 pNext(@NativeType("void const *") long value) { npNext(address(), value); return this; }
/** Prepends the specified {@link VkCopyCommandTransformInfoQCOM} value to the {@code pNext} chain. */
public VkBufferImageCopy2 pNext(VkCopyCommandTransformInfoQCOM value) { return this.pNext(value.pNext(this.pNext()).address()); }
/** Sets the specified value to the {@link #bufferOffset} field. */
public VkBufferImageCopy2 bufferOffset(@NativeType("VkDeviceSize") long value) { nbufferOffset(address(), value); return this; }
/** Sets the specified value to the {@link #bufferRowLength} field. */
public VkBufferImageCopy2 bufferRowLength(@NativeType("uint32_t") int value) { nbufferRowLength(address(), value); return this; }
/** Sets the specified value to the {@link #bufferImageHeight} field. */
public VkBufferImageCopy2 bufferImageHeight(@NativeType("uint32_t") int value) { nbufferImageHeight(address(), value); return this; }
/** Copies the specified {@link VkImageSubresourceLayers} to the {@link #imageSubresource} field. */
public VkBufferImageCopy2 imageSubresource(VkImageSubresourceLayers value) { nimageSubresource(address(), value); return this; }
/** Passes the {@link #imageSubresource} field to the specified {@link java.util.function.Consumer Consumer}. */
public VkBufferImageCopy2 imageSubresource(java.util.function.Consumer<VkImageSubresourceLayers> consumer) { consumer.accept(imageSubresource()); return this; }
/** Copies the specified {@link VkOffset3D} to the {@link #imageOffset} field. */
public VkBufferImageCopy2 imageOffset(VkOffset3D value) { nimageOffset(address(), value); return this; }
/** Passes the {@link #imageOffset} field to the specified {@link java.util.function.Consumer Consumer}. */
public VkBufferImageCopy2 imageOffset(java.util.function.Consumer<VkOffset3D> consumer) { consumer.accept(imageOffset()); return this; }
/** Copies the specified {@link VkExtent3D} to the {@link #imageExtent} field. */
public VkBufferImageCopy2 imageExtent(VkExtent3D value) { nimageExtent(address(), value); return this; }
/** Passes the {@link #imageExtent} field to the specified {@link java.util.function.Consumer Consumer}. */
public VkBufferImageCopy2 imageExtent(java.util.function.Consumer<VkExtent3D> consumer) { consumer.accept(imageExtent()); return this; }
/** Initializes this struct with the specified values. */
public VkBufferImageCopy2 set(
int sType,
long pNext,
long bufferOffset,
int bufferRowLength,
int bufferImageHeight,
VkImageSubresourceLayers imageSubresource,
VkOffset3D imageOffset,
VkExtent3D imageExtent
) {
sType(sType);
pNext(pNext);
bufferOffset(bufferOffset);
bufferRowLength(bufferRowLength);
bufferImageHeight(bufferImageHeight);
imageSubresource(imageSubresource);
imageOffset(imageOffset);
imageExtent(imageExtent);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkBufferImageCopy2 set(VkBufferImageCopy2 src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkBufferImageCopy2} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkBufferImageCopy2 malloc() {
return wrap(VkBufferImageCopy2.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkBufferImageCopy2} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkBufferImageCopy2 calloc() {
return wrap(VkBufferImageCopy2.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkBufferImageCopy2} instance allocated with {@link BufferUtils}. */
public static VkBufferImageCopy2 create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkBufferImageCopy2.class, memAddress(container), container);
}
/** Returns a new {@code VkBufferImageCopy2} instance for the specified memory address. */
public static VkBufferImageCopy2 create(long address) {
return wrap(VkBufferImageCopy2.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkBufferImageCopy2 createSafe(long address) {
return address == NULL ? null : wrap(VkBufferImageCopy2.class, address);
}
/**
* Returns a new {@link VkBufferImageCopy2.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkBufferImageCopy2.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkBufferImageCopy2.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkBufferImageCopy2.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkBufferImageCopy2.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkBufferImageCopy2.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkBufferImageCopy2.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkBufferImageCopy2.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkBufferImageCopy2.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
/**
* Returns a new {@code VkBufferImageCopy2} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkBufferImageCopy2 malloc(MemoryStack stack) {
return wrap(VkBufferImageCopy2.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkBufferImageCopy2} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkBufferImageCopy2 calloc(MemoryStack stack) {
return wrap(VkBufferImageCopy2.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkBufferImageCopy2.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkBufferImageCopy2.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkBufferImageCopy2.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkBufferImageCopy2.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #sType}. */
public static int nsType(long struct) { return UNSAFE.getInt(null, struct + VkBufferImageCopy2.STYPE); }
/** Unsafe version of {@link #pNext}. */
public static long npNext(long struct) { return memGetAddress(struct + VkBufferImageCopy2.PNEXT); }
/** Unsafe version of {@link #bufferOffset}. */
public static long nbufferOffset(long struct) { return UNSAFE.getLong(null, struct + VkBufferImageCopy2.BUFFEROFFSET); }
/** Unsafe version of {@link #bufferRowLength}. */
public static int nbufferRowLength(long struct) { return UNSAFE.getInt(null, struct + VkBufferImageCopy2.BUFFERROWLENGTH); }
/** Unsafe version of {@link #bufferImageHeight}. */
public static int nbufferImageHeight(long struct) { return UNSAFE.getInt(null, struct + VkBufferImageCopy2.BUFFERIMAGEHEIGHT); }
/** Unsafe version of {@link #imageSubresource}. */
public static VkImageSubresourceLayers nimageSubresource(long struct) { return VkImageSubresourceLayers.create(struct + VkBufferImageCopy2.IMAGESUBRESOURCE); }
/** Unsafe version of {@link #imageOffset}. */
public static VkOffset3D nimageOffset(long struct) { return VkOffset3D.create(struct + VkBufferImageCopy2.IMAGEOFFSET); }
/** Unsafe version of {@link #imageExtent}. */
public static VkExtent3D nimageExtent(long struct) { return VkExtent3D.create(struct + VkBufferImageCopy2.IMAGEEXTENT); }
/** Unsafe version of {@link #sType(int) sType}. */
public static void nsType(long struct, int value) { UNSAFE.putInt(null, struct + VkBufferImageCopy2.STYPE, value); }
/** Unsafe version of {@link #pNext(long) pNext}. */
public static void npNext(long struct, long value) { memPutAddress(struct + VkBufferImageCopy2.PNEXT, value); }
/** Unsafe version of {@link #bufferOffset(long) bufferOffset}. */
public static void nbufferOffset(long struct, long value) { UNSAFE.putLong(null, struct + VkBufferImageCopy2.BUFFEROFFSET, value); }
/** Unsafe version of {@link #bufferRowLength(int) bufferRowLength}. */
public static void nbufferRowLength(long struct, int value) { UNSAFE.putInt(null, struct + VkBufferImageCopy2.BUFFERROWLENGTH, value); }
/** Unsafe version of {@link #bufferImageHeight(int) bufferImageHeight}. */
public static void nbufferImageHeight(long struct, int value) { UNSAFE.putInt(null, struct + VkBufferImageCopy2.BUFFERIMAGEHEIGHT, value); }
/** Unsafe version of {@link #imageSubresource(VkImageSubresourceLayers) imageSubresource}. */
public static void nimageSubresource(long struct, VkImageSubresourceLayers value) { memCopy(value.address(), struct + VkBufferImageCopy2.IMAGESUBRESOURCE, VkImageSubresourceLayers.SIZEOF); }
/** Unsafe version of {@link #imageOffset(VkOffset3D) imageOffset}. */
public static void nimageOffset(long struct, VkOffset3D value) { memCopy(value.address(), struct + VkBufferImageCopy2.IMAGEOFFSET, VkOffset3D.SIZEOF); }
/** Unsafe version of {@link #imageExtent(VkExtent3D) imageExtent}. */
public static void nimageExtent(long struct, VkExtent3D value) { memCopy(value.address(), struct + VkBufferImageCopy2.IMAGEEXTENT, VkExtent3D.SIZEOF); }
// -----------------------------------
/** An array of {@link VkBufferImageCopy2} structs. */
public static class Buffer extends StructBuffer<VkBufferImageCopy2, Buffer> implements NativeResource {
private static final VkBufferImageCopy2 ELEMENT_FACTORY = VkBufferImageCopy2.create(-1L);
/**
* Creates a new {@code VkBufferImageCopy2.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkBufferImageCopy2#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkBufferImageCopy2 getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@link VkBufferImageCopy2#sType} field. */
@NativeType("VkStructureType")
public int sType() { return VkBufferImageCopy2.nsType(address()); }
/** @return the value of the {@link VkBufferImageCopy2#pNext} field. */
@NativeType("void const *")
public long pNext() { return VkBufferImageCopy2.npNext(address()); }
/** @return the value of the {@link VkBufferImageCopy2#bufferOffset} field. */
@NativeType("VkDeviceSize")
public long bufferOffset() { return VkBufferImageCopy2.nbufferOffset(address()); }
/** @return the value of the {@link VkBufferImageCopy2#bufferRowLength} field. */
@NativeType("uint32_t")
public int bufferRowLength() { return VkBufferImageCopy2.nbufferRowLength(address()); }
/** @return the value of the {@link VkBufferImageCopy2#bufferImageHeight} field. */
@NativeType("uint32_t")
public int bufferImageHeight() { return VkBufferImageCopy2.nbufferImageHeight(address()); }
/** @return a {@link VkImageSubresourceLayers} view of the {@link VkBufferImageCopy2#imageSubresource} field. */
public VkImageSubresourceLayers imageSubresource() { return VkBufferImageCopy2.nimageSubresource(address()); }
/** @return a {@link VkOffset3D} view of the {@link VkBufferImageCopy2#imageOffset} field. */
public VkOffset3D imageOffset() { return VkBufferImageCopy2.nimageOffset(address()); }
/** @return a {@link VkExtent3D} view of the {@link VkBufferImageCopy2#imageExtent} field. */
public VkExtent3D imageExtent() { return VkBufferImageCopy2.nimageExtent(address()); }
/** Sets the specified value to the {@link VkBufferImageCopy2#sType} field. */
public VkBufferImageCopy2.Buffer sType(@NativeType("VkStructureType") int value) { VkBufferImageCopy2.nsType(address(), value); return this; }
/** Sets the {@link VK13#VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2} value to the {@link VkBufferImageCopy2#sType} field. */
public VkBufferImageCopy2.Buffer sType$Default() { return sType(VK13.VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2); }
/** Sets the specified value to the {@link VkBufferImageCopy2#pNext} field. */
public VkBufferImageCopy2.Buffer pNext(@NativeType("void const *") long value) { VkBufferImageCopy2.npNext(address(), value); return this; }
/** Prepends the specified {@link VkCopyCommandTransformInfoQCOM} value to the {@code pNext} chain. */
public VkBufferImageCopy2.Buffer pNext(VkCopyCommandTransformInfoQCOM value) { return this.pNext(value.pNext(this.pNext()).address()); }
/** Sets the specified value to the {@link VkBufferImageCopy2#bufferOffset} field. */
public VkBufferImageCopy2.Buffer bufferOffset(@NativeType("VkDeviceSize") long value) { VkBufferImageCopy2.nbufferOffset(address(), value); return this; }
/** Sets the specified value to the {@link VkBufferImageCopy2#bufferRowLength} field. */
public VkBufferImageCopy2.Buffer bufferRowLength(@NativeType("uint32_t") int value) { VkBufferImageCopy2.nbufferRowLength(address(), value); return this; }
/** Sets the specified value to the {@link VkBufferImageCopy2#bufferImageHeight} field. */
public VkBufferImageCopy2.Buffer bufferImageHeight(@NativeType("uint32_t") int value) { VkBufferImageCopy2.nbufferImageHeight(address(), value); return this; }
/** Copies the specified {@link VkImageSubresourceLayers} to the {@link VkBufferImageCopy2#imageSubresource} field. */
public VkBufferImageCopy2.Buffer imageSubresource(VkImageSubresourceLayers value) { VkBufferImageCopy2.nimageSubresource(address(), value); return this; }
/** Passes the {@link VkBufferImageCopy2#imageSubresource} field to the specified {@link java.util.function.Consumer Consumer}. */
public VkBufferImageCopy2.Buffer imageSubresource(java.util.function.Consumer<VkImageSubresourceLayers> consumer) { consumer.accept(imageSubresource()); return this; }
/** Copies the specified {@link VkOffset3D} to the {@link VkBufferImageCopy2#imageOffset} field. */
public VkBufferImageCopy2.Buffer imageOffset(VkOffset3D value) { VkBufferImageCopy2.nimageOffset(address(), value); return this; }
/** Passes the {@link VkBufferImageCopy2#imageOffset} field to the specified {@link java.util.function.Consumer Consumer}. */
public VkBufferImageCopy2.Buffer imageOffset(java.util.function.Consumer<VkOffset3D> consumer) { consumer.accept(imageOffset()); return this; }
/** Copies the specified {@link VkExtent3D} to the {@link VkBufferImageCopy2#imageExtent} field. */
public VkBufferImageCopy2.Buffer imageExtent(VkExtent3D value) { VkBufferImageCopy2.nimageExtent(address(), value); return this; }
/** Passes the {@link VkBufferImageCopy2#imageExtent} field to the specified {@link java.util.function.Consumer Consumer}. */
public VkBufferImageCopy2.Buffer imageExtent(java.util.function.Consumer<VkExtent3D> consumer) { consumer.accept(imageExtent()); return this; }
}
}
| |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.service.mgt.ui;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMAttribute;
import org.apache.axiom.om.impl.builder.StAXOMBuilder;
import org.apache.axiom.om.util.StAXUtils;
import org.apache.axis2.AxisFault;
import org.apache.axis2.client.Options;
import org.apache.axis2.client.ServiceClient;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.service.mgt.stub.ServiceGroupAdminStub;
import org.wso2.carbon.service.mgt.stub.types.carbon.ServiceGroupMetaData;
import org.wso2.carbon.service.mgt.stub.types.carbon.ServiceGroupMetaDataWrapper;
import org.wso2.carbon.utils.xml.XMLPrettyPrinter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.lang.Exception;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamReader;
public class ServiceGroupAdminClient {
private static final Log log = LogFactory.getLog(ServiceGroupAdminClient.class);
private ServiceGroupAdminStub stub;
private static final String BUNDLE = "org.wso2.carbon.service.mgt.ui.i18n.Resources";
private ResourceBundle bundle;
/**
* @param cookie
* @param backendServerURL
* @param configCtx
* @param locale
* @throws AxisFault
*/
public ServiceGroupAdminClient(String cookie,
String backendServerURL,
ConfigurationContext configCtx,
Locale locale) throws AxisFault {
String serviceURL = backendServerURL + "ServiceGroupAdmin";
bundle = ResourceBundle.getBundle(BUNDLE, locale);
stub = new ServiceGroupAdminStub(configCtx, serviceURL);
ServiceClient client = stub._getServiceClient();
Options option = client.getOptions();
option.setManageSession(true);
option.setProperty(org.apache.axis2.transport.http.HTTPConstants.COOKIE_STRING, cookie);
}
public ServiceGroupMetaDataWrapper getAllServiceGroups(String serviceTypeFilter,
String serviceGroupSearchString,
int pageNumber) throws RemoteException {
try {
return stub.listServiceGroups(serviceTypeFilter, serviceGroupSearchString, pageNumber);
} catch (RemoteException e) {
handleException(bundle.getString("cannot.get.all.services"), e);
}
return null;
}
/**
* @param serviceGroupName
* @return
* @throws AxisFault
*/
public List<Parameter> getServiceGroupParameters(String serviceGroupName) throws AxisFault {
try {
List<Parameter> parameters = new ArrayList<Parameter>();
try {
String[] groupParameters = stub.getServiceGroupParameters(serviceGroupName);
if (groupParameters != null && groupParameters.length != 0) {
for (String groupParameter : groupParameters) {
if (groupParameter != null) {
XMLStreamReader xmlSR = StAXUtils
.createXMLStreamReader(new ByteArrayInputStream(groupParameter
.getBytes()));
OMElement paramEle = new StAXOMBuilder(xmlSR).getDocumentElement();
String paramName = paramEle.getAttribute(new QName("name"))
.getAttributeValue();
InputStream xmlIn = new ByteArrayInputStream(groupParameter.getBytes());
XMLPrettyPrinter xmlPrettyPrinter = new XMLPrettyPrinter(xmlIn, null);
Parameter parameter = new Parameter(paramName,
xmlPrettyPrinter.xmlFormat());
boolean isLocked = false;
OMAttribute lockedAttrib = paramEle.getAttribute(new QName("locked"));
if (lockedAttrib != null) {
isLocked = "true".equals(lockedAttrib.getAttributeValue());
}
parameter.setLocked(isLocked);
parameters.add(parameter);
}
}
}
} catch (Exception e) {
handleException("Could not get parameters for service group: " + serviceGroupName,
e);
}
return parameters;
} catch (java.lang.Exception e) {
String msg = "Cannot get service group parameters. Error is "+ e.getMessage();
handleException(msg, e);
}
return null;
}
/**
* @param mtomState
* @param serviceGroupName
* @return
* @throws AxisFault
*/
public ServiceGroupMetaData configureServiceGroupMTOM(String mtomState, String serviceGroupName)
throws AxisFault {
try {
return stub.configureServiceGroupMTOM(mtomState, serviceGroupName);
} catch (java.lang.Exception e) {
String msg = "Cannot change MOM state of Axis service group" + serviceGroupName
+ " . Error is "+ e.getMessage();
handleException(msg, e);
}
return null;
}
/**
* @param serviceGroupName
* @return
* @throws AxisFault
*/
public ServiceGroupMetaData listServiceGroup(String serviceGroupName) throws AxisFault {
try {
return stub.listServiceGroup(serviceGroupName);
} catch (java.lang.Exception e) {
String msg = "Cannot get service group data. Error is "+ e.getMessage();
handleException(msg, e);
}
return null;
}
/**
* @param serviceGroupName
* @param params
* @throws AxisFault
*/
public void setServiceGroupParamters(String serviceGroupName, List<String> params)
throws AxisFault {
try {
stub.setServiceGroupParameters(serviceGroupName, params.toArray(new String[params
.size()]));
} catch (java.lang.Exception e) {
String msg = "Cannot get service group data. Error is "+ e.getMessage();
handleException(msg, e);
}
}
/**
* @param serviceGroupName
* @param parameterName
* @throws AxisFault
*/
public void removeServiceGroupParameter(String serviceGroupName, String parameterName)
throws AxisFault {
try {
stub.removeServiceGroupParameter(serviceGroupName, parameterName);
} catch (java.lang.Exception e) {
handleException("Could not remove service group parameter. Service group: "
+ serviceGroupName + ", parameterName=" + parameterName, e);
}
}
/**
* @param serviceGroupName
* @return
* @throws AxisFault
*/
public String dumpAAR(String serviceGroupName) throws AxisFault {
try {
String dumpValue = stub.dumpAAR(serviceGroupName);
if (dumpValue != null) {
return dumpValue;
} else {
handleException("Service archive creation is not supported by this service type");
}
} catch (java.lang.Exception e) {
handleException(e.getMessage(), e);
}
return null;
}
/**
* @param msg
* @param e
* @throws AxisFault
*/
private void handleException(String msg, Exception e) throws AxisFault {
log.error(msg, e);
throw new AxisFault(msg, e);
}
private void handleException(String msg) throws AxisFault {
log.error(msg);
throw new AxisFault(msg);
}
}
| |
package com.example.core.model.search.multi_search;
import android.os.Parcel;
import android.os.Parcelable;
import com.example.core.model.person_search.KnownFor;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.List;
public class MultiSearchResult implements Parcelable
{
@SerializedName("poster_path")
@Expose
private String posterPath;
@SerializedName("adult")
@Expose
private Boolean adult;
@SerializedName("overview")
@Expose
private String overview;
@SerializedName("release_date")
@Expose
private String releaseDate;
@SerializedName("original_title")
@Expose
private String originalTitle;
@SerializedName("genre_ids")
@Expose
private List<Integer> genreIds = null;
@SerializedName("id")
@Expose
private Integer id;
@SerializedName("media_type")
@Expose
private String mediaType;
@SerializedName("original_language")
@Expose
private String originalLanguage;
@SerializedName("title")
@Expose
private String title;
@SerializedName("backdrop_path")
@Expose
private String backdropPath;
@SerializedName("popularity")
@Expose
private Double popularity;
@SerializedName("vote_count")
@Expose
private Integer voteCount;
@SerializedName("video")
@Expose
private Boolean video;
@SerializedName("vote_average")
@Expose
private Double voteAverage;
@SerializedName("profile_path")
@Expose
private String profilePath;
@SerializedName("known_for")
@Expose
private List<KnownFor> knownFor = null;
@SerializedName("name")
@Expose
private String name;
@SerializedName("first_air_date")
@Expose
private String firstAirDate;
@SerializedName("origin_country")
@Expose
private List<String> originCountry = null;
@SerializedName("original_name")
@Expose
private String originalName;
public final static Creator<MultiSearchResult> CREATOR = new Creator<MultiSearchResult>() {
@SuppressWarnings({
"unchecked"
})
public MultiSearchResult createFromParcel(Parcel in) {
MultiSearchResult instance = new MultiSearchResult();
instance.posterPath = ((String) in.readValue((String.class.getClassLoader())));
instance.adult = ((Boolean) in.readValue((Boolean.class.getClassLoader())));
instance.overview = ((String) in.readValue((String.class.getClassLoader())));
instance.releaseDate = ((String) in.readValue((String.class.getClassLoader())));
instance.originalTitle = ((String) in.readValue((String.class.getClassLoader())));
in.readList(instance.genreIds, (Integer.class.getClassLoader()));
instance.id = ((Integer) in.readValue((Integer.class.getClassLoader())));
instance.mediaType = ((String) in.readValue((String.class.getClassLoader())));
instance.originalLanguage = ((String) in.readValue((String.class.getClassLoader())));
instance.title = ((String) in.readValue((String.class.getClassLoader())));
instance.backdropPath = ((String) in.readValue((String.class.getClassLoader())));
instance.popularity = ((Double) in.readValue((Double.class.getClassLoader())));
instance.voteCount = ((Integer) in.readValue((Integer.class.getClassLoader())));
instance.video = ((Boolean) in.readValue((Boolean.class.getClassLoader())));
instance.voteAverage = ((Double) in.readValue((Double.class.getClassLoader())));
instance.profilePath = ((String) in.readValue((String.class.getClassLoader())));
in.readList(instance.knownFor, (KnownFor.class.getClassLoader()));
instance.name = ((String) in.readValue((String.class.getClassLoader())));
instance.firstAirDate = ((String) in.readValue((String.class.getClassLoader())));
in.readList(instance.originCountry, (String.class.getClassLoader()));
instance.originalName = ((String) in.readValue((String.class.getClassLoader())));
return instance;
}
public MultiSearchResult[] newArray(int size) {
return (new MultiSearchResult[size]);
}
}
;
public String getPosterPath() {
return posterPath;
}
public void setPosterPath(String posterPath) {
this.posterPath = posterPath;
}
public Boolean getAdult() {
return adult;
}
public void setAdult(Boolean adult) {
this.adult = adult;
}
public String getOverview() {
return overview;
}
public void setOverview(String overview) {
this.overview = overview;
}
public String getReleaseDate() {
return releaseDate;
}
public void setReleaseDate(String releaseDate) {
this.releaseDate = releaseDate;
}
public String getOriginalTitle() {
return originalTitle;
}
public void setOriginalTitle(String originalTitle) {
this.originalTitle = originalTitle;
}
public List<Integer> getGenreIds() {
return genreIds;
}
public void setGenreIds(List<Integer> genreIds) {
this.genreIds = genreIds;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getMediaType() {
return mediaType;
}
public void setMediaType(String mediaType) {
this.mediaType = mediaType;
}
public String getOriginalLanguage() {
return originalLanguage;
}
public void setOriginalLanguage(String originalLanguage) {
this.originalLanguage = originalLanguage;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getBackdropPath() {
return backdropPath;
}
public void setBackdropPath(String backdropPath) {
this.backdropPath = backdropPath;
}
public Double getPopularity() {
return popularity;
}
public void setPopularity(Double popularity) {
this.popularity = popularity;
}
public Integer getVoteCount() {
return voteCount;
}
public void setVoteCount(Integer voteCount) {
this.voteCount = voteCount;
}
public Boolean getVideo() {
return video;
}
public void setVideo(Boolean video) {
this.video = video;
}
public Double getVoteAverage() {
return voteAverage;
}
public void setVoteAverage(Double voteAverage) {
this.voteAverage = voteAverage;
}
public String getProfilePath() {
return profilePath;
}
public void setProfilePath(String profilePath) {
this.profilePath = profilePath;
}
public List<KnownFor> getKnownFor() {
return knownFor;
}
public void setKnownFor(List<KnownFor> knownFor) {
this.knownFor = knownFor;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getFirstAirDate() {
return firstAirDate;
}
public void setFirstAirDate(String firstAirDate) {
this.firstAirDate = firstAirDate;
}
public List<String> getOriginCountry() {
return originCountry;
}
public void setOriginCountry(List<String> originCountry) {
this.originCountry = originCountry;
}
public String getOriginalName() {
return originalName;
}
public void setOriginalName(String originalName) {
this.originalName = originalName;
}
public void writeToParcel(Parcel dest, int flags) {
dest.writeValue(posterPath);
dest.writeValue(adult);
dest.writeValue(overview);
dest.writeValue(releaseDate);
dest.writeValue(originalTitle);
dest.writeList(genreIds);
dest.writeValue(id);
dest.writeValue(mediaType);
dest.writeValue(originalLanguage);
dest.writeValue(title);
dest.writeValue(backdropPath);
dest.writeValue(popularity);
dest.writeValue(voteCount);
dest.writeValue(video);
dest.writeValue(voteAverage);
dest.writeValue(profilePath);
dest.writeList(knownFor);
dest.writeValue(name);
dest.writeValue(firstAirDate);
dest.writeList(originCountry);
dest.writeValue(originalName);
}
public int describeContents() {
return 0;
}
}
| |
package net.techreadiness.plugin.action.reports.school;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import net.techreadiness.annotation.CoreSecured;
import net.techreadiness.plugin.action.reports.ReportAction;
import net.techreadiness.plugin.action.reports.ReportExport;
import net.techreadiness.plugin.action.reports.ReportExportCsv;
import net.techreadiness.plugin.action.reports.ReportExportPdf;
import net.techreadiness.plugin.action.reports.ReportItemProvider;
import net.techreadiness.plugin.action.reports.ReportItemProvider.ExportType;
import net.techreadiness.security.CorePermissionCodes;
import net.techreadiness.service.ServiceContext;
import net.techreadiness.service.object.Org;
import net.techreadiness.service.object.Scope;
import net.techreadiness.ui.tags.datagrid.DataGridState;
import net.techreadiness.ui.util.ConversationScoped;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.Result;
import org.springframework.beans.factory.annotation.Qualifier;
import com.google.common.collect.Lists;
public class SchoolExceptionReportAction extends ReportAction {
private static List<String> standardColumnLabels, standardColumnKeys, fullDetailColumnLabels, fullDetailColumnKeys;
{
standardColumnLabels = Lists.newArrayList("Organization", "Org Code", "Parent Organization", "Completion Status",
"Total Device Count", "Total # of Test Starts Needed Per School", "Device to Test-Taker Ratio",
"Number of Unanswered Survey Questions");
standardColumnKeys = Lists.newArrayList("orgName", "orgCode", "parentOrgName", "dataEntryComplete", "deviceCount",
"testingTestStartCount", "deviceTestTakerRatio", "unansweredSurveyCount");
fullDetailColumnLabels = Lists.newArrayList("Organization", "Org Code", "Parent Organization", "Completion Status",
"Total Device Count", "Total # of Test Starts Needed Per School", "Device to Test-Taker Ratio",
"Number of Unanswered Survey Questions", "Internal Network Bandwidth Utilization",
"Max # of Simultaneous Test-Takers", "Wireless Access Points Count", "Number of Administrators",
"Number of Technology Staff", "AdministratorTechnical Understanding", "Training of Administrators",
"Training of Technology Staff", "Testing Window", "Sessions per Day", "Internal Network Bandwidth",
"Internet Bandwidth", "Technical Staff Technical Understanding", "Internet Bandwidth Utilization",
"Classification of School", "Grade K Enrollment Count", "Grade 1 Enrollment Count",
"Grade 2 Enrollment Count", "Grade 3 Enrollment Count", "Grade 4 Enrollment Count",
"Grade 5 Enrollment Count", "Grade 6 Enrollment Count", "Grade 7 Enrollment Count",
"Grade 8 Enrollment Count", "Grade 9 Enrollment Count", "Grade 10 Enrollment Count",
"Grade 11 Enrollment Count", "Grade 12 Enrollment Count");
fullDetailColumnKeys = Lists.newArrayList("orgName", "orgCode", "parentOrgName", "dataEntryComplete", "deviceCount",
"testingTestStartCount", "deviceTestTakerRatio", "unansweredSurveyCount", "networkUtilization",
"simultaneousTesters", "wirelessAccessPoints", "surveyAdminCount", "surveyTechstaffCount",
"surveyAdminUnderstanding", "surveyAdminTraining", "surveyTechstaffTraining", "testingWindowLength",
"sessionsPerDay", "networkSpeed", "internetSpeed", "surveyTechstaffUnderstanding", "internetUtilization",
"schoolType", "enrollmentCountK", "enrollmentCount1", "enrollmentCount2", "enrollmentCount3",
"enrollmentCount4", "enrollmentCount5", "enrollmentCount6", "enrollmentCount7", "enrollmentCount8",
"enrollmentCount9", "enrollmentCount10", "enrollmentCount11", "enrollmentCount12");
}
private static final long serialVersionUID = 1L;
private String orgCode;
private String stateCode;
private String stateName;
// Not implementing geoChartBean for School Exception Report
// private GeoChartBean geoChartBean;
private boolean csvFullDetail = false;
private String question;
private Map<String, String> questions;
private Org currentOrg;
private boolean dualConsortium;
private boolean importDeviceDataAllowed;
private String asOfDate;
@javax.inject.Inject
@Qualifier("SchoolReportItemProvider")
private ReportItemProvider reportItemProvider;
@ConversationScoped(value = "schoolDataGridState")
private DataGridState<?> reportGrid;
@CoreSecured({ CorePermissionCodes.READY_CUSTOMER_SCHOOL_EXCEPTION_RPT })
@Action(value = "schoolExceptionReport", results = { @Result(name = "map", location = "/net/techreadiness/plugin/action/reports/school/table.jsp") })
public String schoolExceptionReport() throws Exception {
getData();
buildBreadcrumbs(currentOrg, consortium, "schoolExceptionReport");
return "map";
}
public void getData() throws Exception {
ServiceContext context = getServiceContext();
if (questions == null) {
questions = new LinkedHashMap<>();
questions.put("notComplete", "Not Complete");
questions.put("noDeviceEntry", "No Device Entry");
questions.put("noActivity", "No Activity");
questions.put("potentiallyMissed", "Potentially Missed Survey Questions");
questions.put("potentiallyMissedDevice", "Potentially Missed Device Entry");
questions.put("potentiallyComplete", "Potentially Complete");
questions.put("potentiallyUnderReported", "Potentially Not Complete");
}
if (question == null && questions != null && !questions.isEmpty()) {
question = questions.keySet().iterator().next();
}
// check if we are passed an org code
// we should get this passed to us on every request outside the initial request(clicking on tab)
if (orgCode == null || orgCode.equals("")) {
Org org = context.getOrg();
orgCode = org.getCode();
}
currentOrg = orgService.getByCode(context, orgCode);
if (currentOrg == null || !userService.hasAccessToOrg(context, context.getUserId(), currentOrg.getOrgId())) {
if (currentOrg == null) {
throw new Exception("Org could not be found: " + orgCode);
}
throw new Exception("User(" + context.getUserId() + ") doesn't have access to org (" + currentOrg.getOrgId()
+ ")");
}
// get previous org off the session, if different, reset paging
Map<String, Object> session = getSession();
if (session.get("prevOrgSchool") == null || !currentOrg.getId().equals(session.get("prevOrgSchool"))) {
reportGrid.setPage(1);
reportGrid.setPageSize(10);
}
session.put("prevOrgSchool", currentOrg.getId());
// check if user has access to dual consortium
dualConsortium = userService.hasPermission(context, CorePermissionCodes.CORE_CUSTOMER_CHANGE_GLOBAL_SCOPE);
// check if user has access to import device data
setImportDeviceDataAllowed(userService.hasPermission(context, CorePermissionCodes.READY_CUSTOMER_DEVICE_CREATE));
// if true, check to make sure the current org has access to both consortiums
List<net.techreadiness.service.object.OrgPart> orgParts = orgPartService.findOrgPartsForOrg(context,
currentOrg.getOrgId());
if (dualConsortium) {
dualConsortium = orgParts != null && orgParts.size() > 1;
}
// if consortium is null, set consortium to the consortium in the service context
if (consortium == null) {
consortium = Consortium.getConsortium(context.getScope().getCode());
}
// get the consortiums scope object
Scope consortiumScope = scopeService.getByScopePath(consortium.path);
// set appropriate data for the report item provider
reportItemProvider.setConsortium(consortiumScope);
reportItemProvider.setSnapshotWindow(getCurrentSnapshotWindow());
reportItemProvider.setOrg(currentOrg);
reportItemProvider.setMinimumRecommendedFlag(getMinimumRecommendedFlag());
reportItemProvider.setQuestion(question);
}
@Override
protected ReportExport getReportExport(ExportType type) throws Exception {
setFileName("SchoolExceptionReport");
getData();
switch (ExportType.valueOf(fileType)) {
case csv: {
ReportExportCsv csv = new ReportExportCsv();
if (csvFullDetail) {
csv.setColumnLabels(fullDetailColumnLabels);
csv.setColumnKeys(fullDetailColumnKeys);
} else {
csv.setColumnLabels(standardColumnLabels);
csv.setColumnKeys(standardColumnKeys);
}
csv.setData(reportItemProvider.export(type));
return csv;
}
case pdf: {
ReportExportPdf pdf = new ReportExportPdf.Builder(standardColumnLabels, standardColumnKeys,
reportItemProvider.export(type))
.title("School Exception Report")
.subtitle(questions.get(question))
.consortium(consortium.toString())
.aggregationMessage(
asOfDate == null ? "" : "Data as of " + asOfDate + " - Reports are updated hourly").build();
return pdf;
// pdf.setConditionalStyles(createConditionalStyles(pdf.getDetailStyle()));
// UNFINISHED -
// ASK TRACY ABOUT THE REPORT LAYOUT BEFORE FINISHING THIS
}
}
return null;
}
@Override
protected ReportExport getAllSchoolsReportExport(ExportType type) throws Exception {
return null;
}
public String getOrgCode() {
return orgCode;
}
public void setOrgCode(String selectedOrgCode) {
orgCode = selectedOrgCode;
}
public String getStateCode() {
return stateCode;
}
public void setStateCode(String stateCode) {
this.stateCode = stateCode;
}
public String getStateName() {
return stateName;
}
public void setStateName(String stateName) {
this.stateName = stateName;
}
public String getQuestion() {
return question;
}
public void setQuestion(String question) {
this.question = question;
}
public Map<String, String> getQuestions() {
return questions;
}
public Org getCurrentOrg() {
return currentOrg;
}
public boolean isDualConsortium() {
return dualConsortium;
}
public ReportItemProvider getReportItemProvider() {
return reportItemProvider;
}
public DataGridState<?> getReportGrid() {
return reportGrid;
}
public void setReportGrid(DataGridState<?> reportGrid) {
this.reportGrid = reportGrid;
}
public boolean isCsvFullDetail() {
return csvFullDetail;
}
public void setCsvFullDetail(boolean csvFullDetail) {
this.csvFullDetail = csvFullDetail;
}
public boolean isImportDeviceDataAllowed() {
return importDeviceDataAllowed;
}
public void setImportDeviceDataAllowed(boolean importDeviceDataAllowed) {
this.importDeviceDataAllowed = importDeviceDataAllowed;
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2012, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.keycloak.testsuite.composites;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.keycloak.OAuth2Constants;
import org.keycloak.common.enums.SslRequired;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.UserCredentialModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.representations.AccessToken;
import org.keycloak.services.managers.ClientManager;
import org.keycloak.services.managers.RealmManager;
import org.keycloak.testsuite.ApplicationServlet;
import org.keycloak.testsuite.OAuthClient;
import org.keycloak.testsuite.OAuthClient.AccessTokenResponse;
import org.keycloak.testsuite.pages.LoginPage;
import org.keycloak.testsuite.rule.AbstractKeycloakRule;
import org.keycloak.testsuite.rule.WebResource;
import org.keycloak.testsuite.rule.WebRule;
import org.openqa.selenium.WebDriver;
import java.security.PublicKey;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class CompositeRoleTest {
public static PublicKey realmPublicKey;
@ClassRule
public static AbstractKeycloakRule keycloakRule = new AbstractKeycloakRule(){
@Override
protected void configure(KeycloakSession session, RealmManager manager, RealmModel adminRealm) {
RealmModel realm = manager.createRealm("test");
KeycloakModelUtils.generateRealmKeys(realm);
realmPublicKey = realm.getPublicKey();
realm.setSsoSessionIdleTimeout(3000);
realm.setAccessTokenLifespan(10000);
realm.setSsoSessionMaxLifespan(10000);
realm.setAccessCodeLifespanUserAction(1000);
realm.setAccessCodeLifespan(1000);
realm.setSslRequired(SslRequired.EXTERNAL);
realm.setEnabled(true);
realm.addRequiredCredential(UserCredentialModel.PASSWORD);
final RoleModel realmRole1 = realm.addRole("REALM_ROLE_1");
final RoleModel realmRole2 = realm.addRole("REALM_ROLE_2");
final RoleModel realmRole3 = realm.addRole("REALM_ROLE_3");
final RoleModel realmComposite1 = realm.addRole("REALM_COMPOSITE_1");
realmComposite1.addCompositeRole(realmRole1);
final UserModel realmComposite1User = session.users().addUser(realm, "REALM_COMPOSITE_1_USER");
realmComposite1User.setEnabled(true);
realmComposite1User.updateCredential(UserCredentialModel.password("password"));
realmComposite1User.grantRole(realmComposite1);
final UserModel realmRole1User = session.users().addUser(realm, "REALM_ROLE_1_USER");
realmRole1User.setEnabled(true);
realmRole1User.updateCredential(UserCredentialModel.password("password"));
realmRole1User.grantRole(realmRole1);
final ClientModel realmComposite1Application = new ClientManager(manager).createClient(realm, "REALM_COMPOSITE_1_APPLICATION");
realmComposite1Application.setFullScopeAllowed(false);
realmComposite1Application.setEnabled(true);
realmComposite1Application.addScopeMapping(realmComposite1);
realmComposite1Application.addRedirectUri("http://localhost:8081/app/*");
realmComposite1Application.setBaseUrl("http://localhost:8081/app");
realmComposite1Application.setManagementUrl("http://localhost:8081/app/logout");
realmComposite1Application.setSecret("password");
final ClientModel realmRole1Application = new ClientManager(manager).createClient(realm, "REALM_ROLE_1_APPLICATION");
realmRole1Application.setFullScopeAllowed(false);
realmRole1Application.setEnabled(true);
realmRole1Application.addScopeMapping(realmRole1);
realmRole1Application.addRedirectUri("http://localhost:8081/app/*");
realmRole1Application.setBaseUrl("http://localhost:8081/app");
realmRole1Application.setManagementUrl("http://localhost:8081/app/logout");
realmRole1Application.setSecret("password");
final ClientModel appRoleApplication = new ClientManager(manager).createClient(realm, "APP_ROLE_APPLICATION");
appRoleApplication.setFullScopeAllowed(false);
appRoleApplication.setEnabled(true);
appRoleApplication.addRedirectUri("http://localhost:8081/app/*");
appRoleApplication.setBaseUrl("http://localhost:8081/app");
appRoleApplication.setManagementUrl("http://localhost:8081/app/logout");
appRoleApplication.setSecret("password");
final RoleModel appRole1 = appRoleApplication.addRole("APP_ROLE_1");
final RoleModel appRole2 = appRoleApplication.addRole("APP_ROLE_2");
final RoleModel realmAppCompositeRole = realm.addRole("REALM_APP_COMPOSITE_ROLE");
realmAppCompositeRole.addCompositeRole(appRole1);
final UserModel realmAppCompositeUser = session.users().addUser(realm, "REALM_APP_COMPOSITE_USER");
realmAppCompositeUser.setEnabled(true);
realmAppCompositeUser.updateCredential(UserCredentialModel.password("password"));
realmAppCompositeUser.grantRole(realmAppCompositeRole);
final UserModel realmAppRoleUser = session.users().addUser(realm, "REALM_APP_ROLE_USER");
realmAppRoleUser.setEnabled(true);
realmAppRoleUser.updateCredential(UserCredentialModel.password("password"));
realmAppRoleUser.grantRole(appRole2);
final ClientModel appCompositeApplication = new ClientManager(manager).createClient(realm, "APP_COMPOSITE_APPLICATION");
appCompositeApplication.setFullScopeAllowed(false);
appCompositeApplication.setEnabled(true);
appCompositeApplication.addRedirectUri("http://localhost:8081/app/*");
appCompositeApplication.setBaseUrl("http://localhost:8081/app");
appCompositeApplication.setManagementUrl("http://localhost:8081/app/logout");
appCompositeApplication.setSecret("password");
final RoleModel appCompositeRole = appCompositeApplication.addRole("APP_COMPOSITE_ROLE");
appCompositeApplication.addScopeMapping(appRole2);
appCompositeRole.addCompositeRole(realmRole1);
appCompositeRole.addCompositeRole(realmRole2);
appCompositeRole.addCompositeRole(realmRole3);
appCompositeRole.addCompositeRole(appRole1);
final UserModel appCompositeUser = session.users().addUser(realm, "APP_COMPOSITE_USER");
appCompositeUser.setEnabled(true);
appCompositeUser.updateCredential(UserCredentialModel.password("password"));
appCompositeUser.grantRole(realmAppCompositeRole);
appCompositeUser.grantRole(realmComposite1);
deployServlet("app", "/app", ApplicationServlet.class);
}
};
@Rule
public WebRule webRule = new WebRule(this);
@WebResource
protected WebDriver driver;
@WebResource
protected OAuthClient oauth;
@WebResource
protected LoginPage loginPage;
@Test
public void testAppCompositeUser() throws Exception {
oauth.realm("test");
oauth.realmPublicKey(realmPublicKey);
oauth.clientId("APP_COMPOSITE_APPLICATION");
oauth.doLogin("APP_COMPOSITE_USER", "password");
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
Assert.assertEquals(200, response.getStatusCode());
Assert.assertEquals("bearer", response.getTokenType());
AccessToken token = oauth.verifyToken(response.getAccessToken());
Assert.assertEquals(keycloakRule.getUser("test", "APP_COMPOSITE_USER").getId(), token.getSubject());
Assert.assertEquals(1, token.getResourceAccess("APP_ROLE_APPLICATION").getRoles().size());
Assert.assertEquals(1, token.getRealmAccess().getRoles().size());
Assert.assertTrue(token.getResourceAccess("APP_ROLE_APPLICATION").isUserInRole("APP_ROLE_1"));
Assert.assertTrue(token.getRealmAccess().isUserInRole("REALM_ROLE_1"));
AccessTokenResponse refreshResponse = oauth.doRefreshTokenRequest(response.getRefreshToken(), "password");
Assert.assertEquals(200, refreshResponse.getStatusCode());
}
@Test
public void testRealmAppCompositeUser() throws Exception {
oauth.realm("test");
oauth.realmPublicKey(realmPublicKey);
oauth.clientId("APP_ROLE_APPLICATION");
oauth.doLogin("REALM_APP_COMPOSITE_USER", "password");
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
Assert.assertEquals(200, response.getStatusCode());
Assert.assertEquals("bearer", response.getTokenType());
AccessToken token = oauth.verifyToken(response.getAccessToken());
Assert.assertEquals(keycloakRule.getUser("test", "REALM_APP_COMPOSITE_USER").getId(), token.getSubject());
Assert.assertEquals(1, token.getResourceAccess("APP_ROLE_APPLICATION").getRoles().size());
Assert.assertTrue(token.getResourceAccess("APP_ROLE_APPLICATION").isUserInRole("APP_ROLE_1"));
AccessTokenResponse refreshResponse = oauth.doRefreshTokenRequest(response.getRefreshToken(), "password");
Assert.assertEquals(200, refreshResponse.getStatusCode());
}
@Test
public void testRealmOnlyWithUserCompositeAppComposite() throws Exception {
oauth.realm("test");
oauth.realmPublicKey(realmPublicKey);
oauth.clientId("REALM_COMPOSITE_1_APPLICATION");
oauth.doLogin("REALM_COMPOSITE_1_USER", "password");
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
Assert.assertEquals(200, response.getStatusCode());
Assert.assertEquals("bearer", response.getTokenType());
AccessToken token = oauth.verifyToken(response.getAccessToken());
Assert.assertEquals(keycloakRule.getUser("test", "REALM_COMPOSITE_1_USER").getId(), token.getSubject());
Assert.assertEquals(2, token.getRealmAccess().getRoles().size());
Assert.assertTrue(token.getRealmAccess().isUserInRole("REALM_COMPOSITE_1"));
Assert.assertTrue(token.getRealmAccess().isUserInRole("REALM_ROLE_1"));
AccessTokenResponse refreshResponse = oauth.doRefreshTokenRequest(response.getRefreshToken(), "password");
Assert.assertEquals(200, refreshResponse.getStatusCode());
}
@Test
public void testRealmOnlyWithUserCompositeAppRole() throws Exception {
oauth.realm("test");
oauth.realmPublicKey(realmPublicKey);
oauth.clientId("REALM_ROLE_1_APPLICATION");
oauth.doLogin("REALM_COMPOSITE_1_USER", "password");
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
Assert.assertEquals(200, response.getStatusCode());
Assert.assertEquals("bearer", response.getTokenType());
AccessToken token = oauth.verifyToken(response.getAccessToken());
Assert.assertEquals(keycloakRule.getUser("test", "REALM_COMPOSITE_1_USER").getId(), token.getSubject());
Assert.assertEquals(1, token.getRealmAccess().getRoles().size());
Assert.assertTrue(token.getRealmAccess().isUserInRole("REALM_ROLE_1"));
AccessTokenResponse refreshResponse = oauth.doRefreshTokenRequest(response.getRefreshToken(), "password");
Assert.assertEquals(200, refreshResponse.getStatusCode());
}
@Test
public void testRealmOnlyWithUserRoleAppComposite() throws Exception {
oauth.realm("test");
oauth.realmPublicKey(realmPublicKey);
oauth.clientId("REALM_COMPOSITE_1_APPLICATION");
oauth.doLogin("REALM_ROLE_1_USER", "password");
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
Assert.assertEquals(200, response.getStatusCode());
Assert.assertEquals("bearer", response.getTokenType());
AccessToken token = oauth.verifyToken(response.getAccessToken());
Assert.assertEquals(keycloakRule.getUser("test", "REALM_ROLE_1_USER").getId(), token.getSubject());
Assert.assertEquals(1, token.getRealmAccess().getRoles().size());
Assert.assertTrue(token.getRealmAccess().isUserInRole("REALM_ROLE_1"));
AccessTokenResponse refreshResponse = oauth.doRefreshTokenRequest(response.getRefreshToken(), "password");
Assert.assertEquals(200, refreshResponse.getStatusCode());
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.timeline;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.metamx.common.guava.Comparators;
import com.metamx.common.logger.Logger;
import io.druid.timeline.partition.ImmutablePartitionHolder;
import io.druid.timeline.partition.PartitionChunk;
import io.druid.timeline.partition.PartitionHolder;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* VersionedIntervalTimeline is a data structure that manages objects on a specific timeline.
*
* It associates a jodatime Interval and a generically-typed version with the object that is being stored.
*
* In the event of overlapping timeline entries, timeline intervals may be chunked. The underlying data associated
* with a timeline entry remains unchanged when chunking occurs.
*
* After loading objects via the add() method, the lookup(Interval) method can be used to get the list of the most
* recent objects (according to the version) that match the given interval. The intent is that objects represent
* a certain time period and when you do a lookup(), you are asking for all of the objects that you need to look
* at in order to get a correct answer about that time period.
*
* The findOvershadowed() method returns a list of objects that will never be returned by a call to lookup() because
* they are overshadowed by some other object. This can be used in conjunction with the add() and remove() methods
* to achieve "atomic" updates. First add new items, then check if those items caused anything to be overshadowed, if
* so, remove the overshadowed elements and you have effectively updated your data set without any user impact.
*/
public class VersionedIntervalTimeline<VersionType, ObjectType> implements TimelineLookup<VersionType, ObjectType>
{
private static final Logger log = new Logger(VersionedIntervalTimeline.class);
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true);
final NavigableMap<Interval, TimelineEntry> completePartitionsTimeline = new TreeMap<Interval, TimelineEntry>(
Comparators.intervalsByStartThenEnd()
);
final NavigableMap<Interval, TimelineEntry> incompletePartitionsTimeline = new TreeMap<Interval, TimelineEntry>(
Comparators.intervalsByStartThenEnd()
);
private final Map<Interval, TreeMap<VersionType, TimelineEntry>> allTimelineEntries = Maps.newHashMap();
private final Comparator<? super VersionType> versionComparator;
public VersionedIntervalTimeline(
Comparator<? super VersionType> versionComparator
)
{
this.versionComparator = versionComparator;
}
public void add(final Interval interval, VersionType version, PartitionChunk<ObjectType> object)
{
try {
lock.writeLock().lock();
Map<VersionType, TimelineEntry> exists = allTimelineEntries.get(interval);
TimelineEntry entry = null;
if (exists == null) {
entry = new TimelineEntry(interval, version, new PartitionHolder<ObjectType>(object));
TreeMap<VersionType, TimelineEntry> versionEntry = new TreeMap<VersionType, TimelineEntry>(versionComparator);
versionEntry.put(version, entry);
allTimelineEntries.put(interval, versionEntry);
} else {
entry = exists.get(version);
if (entry == null) {
entry = new TimelineEntry(interval, version, new PartitionHolder<ObjectType>(object));
exists.put(version, entry);
} else {
PartitionHolder<ObjectType> partitionHolder = entry.getPartitionHolder();
partitionHolder.add(object);
}
}
if (entry.getPartitionHolder().isComplete()) {
add(completePartitionsTimeline, interval, entry);
}
add(incompletePartitionsTimeline, interval, entry);
}
finally {
lock.writeLock().unlock();
}
}
public PartitionChunk<ObjectType> remove(Interval interval, VersionType version, PartitionChunk<ObjectType> chunk)
{
try {
lock.writeLock().lock();
Map<VersionType, TimelineEntry> versionEntries = allTimelineEntries.get(interval);
if (versionEntries == null) {
return null;
}
TimelineEntry entry = versionEntries.get(version);
if (entry == null) {
return null;
}
PartitionChunk<ObjectType> retVal = entry.getPartitionHolder().remove(chunk);
if (entry.getPartitionHolder().isEmpty()) {
versionEntries.remove(version);
if (versionEntries.isEmpty()) {
allTimelineEntries.remove(interval);
}
remove(incompletePartitionsTimeline, interval, entry, true);
}
remove(completePartitionsTimeline, interval, entry, false);
return retVal;
}
finally {
lock.writeLock().unlock();
}
}
public PartitionHolder<ObjectType> findEntry(Interval interval, VersionType version)
{
try {
lock.readLock().lock();
for (Map.Entry<Interval, TreeMap<VersionType, TimelineEntry>> entry : allTimelineEntries.entrySet()) {
if (entry.getKey().equals(interval) || entry.getKey().contains(interval)) {
TimelineEntry foundEntry = entry.getValue().get(version);
if (foundEntry != null) {
return new ImmutablePartitionHolder<ObjectType>(
foundEntry.getPartitionHolder()
);
}
}
}
return null;
}
finally {
lock.readLock().unlock();
}
}
/**
* Does a lookup for the objects representing the given time interval. Will *only* return
* PartitionHolders that are complete.
*
* @param interval interval to find objects for
*
* @return Holders representing the interval that the objects exist for, PartitionHolders
* are guaranteed to be complete
*/
public List<TimelineObjectHolder<VersionType, ObjectType>> lookup(Interval interval)
{
try {
lock.readLock().lock();
return lookup(interval, false);
}
finally {
lock.readLock().unlock();
}
}
public Set<TimelineObjectHolder<VersionType, ObjectType>> findOvershadowed()
{
try {
lock.readLock().lock();
Set<TimelineObjectHolder<VersionType, ObjectType>> retVal = Sets.newHashSet();
Map<Interval, Map<VersionType, TimelineEntry>> overShadowed = Maps.newHashMap();
for (Map.Entry<Interval, TreeMap<VersionType, TimelineEntry>> versionEntry : allTimelineEntries.entrySet()) {
Map<VersionType, TimelineEntry> versionCopy = Maps.newHashMap();
versionCopy.putAll(versionEntry.getValue());
overShadowed.put(versionEntry.getKey(), versionCopy);
}
for (Map.Entry<Interval, TimelineEntry> entry : completePartitionsTimeline.entrySet()) {
Map<VersionType, TimelineEntry> versionEntry = overShadowed.get(entry.getValue().getTrueInterval());
if (versionEntry != null) {
versionEntry.remove(entry.getValue().getVersion());
if (versionEntry.isEmpty()) {
overShadowed.remove(entry.getValue().getTrueInterval());
}
}
}
for (Map.Entry<Interval, TimelineEntry> entry : incompletePartitionsTimeline.entrySet()) {
Map<VersionType, TimelineEntry> versionEntry = overShadowed.get(entry.getValue().getTrueInterval());
if (versionEntry != null) {
versionEntry.remove(entry.getValue().getVersion());
if (versionEntry.isEmpty()) {
overShadowed.remove(entry.getValue().getTrueInterval());
}
}
}
for (Map.Entry<Interval, Map<VersionType, TimelineEntry>> versionEntry : overShadowed.entrySet()) {
for (Map.Entry<VersionType, TimelineEntry> entry : versionEntry.getValue().entrySet()) {
TimelineEntry object = entry.getValue();
retVal.add(
new TimelineObjectHolder<VersionType, ObjectType>(
object.getTrueInterval(),
object.getVersion(),
object.getPartitionHolder()
)
);
}
}
return retVal;
}
finally {
lock.readLock().unlock();
}
}
private void add(
NavigableMap<Interval, TimelineEntry> timeline,
Interval interval,
TimelineEntry entry
)
{
TimelineEntry existsInTimeline = timeline.get(interval);
if (existsInTimeline != null) {
int compare = versionComparator.compare(entry.getVersion(), existsInTimeline.getVersion());
if (compare > 0) {
addIntervalToTimeline(interval, entry, timeline);
}
return;
}
Interval lowerKey = timeline.lowerKey(interval);
if (lowerKey != null) {
if (addAtKey(timeline, lowerKey, entry)) {
return;
}
}
Interval higherKey = timeline.higherKey(interval);
if (higherKey != null) {
if (addAtKey(timeline, higherKey, entry)) {
return;
}
}
addIntervalToTimeline(interval, entry, timeline);
}
/**
*
* @param timeline
* @param key
* @param entry
* @return boolean flag indicating whether or not we inserted or discarded something
*/
private boolean addAtKey(
NavigableMap<Interval, TimelineEntry> timeline,
Interval key,
TimelineEntry entry
)
{
boolean retVal = false;
Interval currKey = key;
Interval entryInterval = entry.getTrueInterval();
if (!currKey.overlaps(entryInterval)) {
return false;
}
while (entryInterval != null && currKey != null && currKey.overlaps(entryInterval)) {
Interval nextKey = timeline.higherKey(currKey);
int versionCompare = versionComparator.compare(
entry.getVersion(),
timeline.get(currKey).getVersion()
);
if (versionCompare < 0) {
if (currKey.contains(entryInterval)) {
return true;
} else if (currKey.getStart().isBefore(entryInterval.getStart())) {
entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
} else {
addIntervalToTimeline(new Interval(entryInterval.getStart(), currKey.getStart()), entry, timeline);
if (entryInterval.getEnd().isAfter(currKey.getEnd())) {
entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
} else {
entryInterval = null; // discard this entry
}
}
} else if (versionCompare > 0) {
TimelineEntry oldEntry = timeline.remove(currKey);
if (currKey.contains(entryInterval)) {
addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline);
addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline);
addIntervalToTimeline(entryInterval, entry, timeline);
return true;
} else if (currKey.getStart().isBefore(entryInterval.getStart())) {
addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline);
} else if (entryInterval.getEnd().isBefore(currKey.getEnd())) {
addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline);
}
} else {
if (timeline.get(currKey).equals(entry)) {
// This occurs when restoring segments
timeline.remove(currKey);
} else {
throw new UnsupportedOperationException(
String.format(
"Cannot add overlapping segments [%s and %s] with the same version [%s]",
currKey,
entryInterval,
entry.getVersion()
)
);
}
}
currKey = nextKey;
retVal = true;
}
addIntervalToTimeline(entryInterval, entry, timeline);
return retVal;
}
private void addIntervalToTimeline(
Interval interval,
TimelineEntry entry,
NavigableMap<Interval, TimelineEntry> timeline
)
{
if (interval != null && interval.toDurationMillis() > 0) {
timeline.put(interval, entry);
}
}
private void remove(
NavigableMap<Interval, TimelineEntry> timeline,
Interval interval,
TimelineEntry entry,
boolean incompleteOk
)
{
List<Interval> intervalsToRemove = Lists.newArrayList();
TimelineEntry removed = timeline.get(interval);
if (removed == null) {
Iterator<Map.Entry<Interval, TimelineEntry>> iter = timeline.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<Interval, TimelineEntry> timelineEntry = iter.next();
if (timelineEntry.getValue() == entry) {
intervalsToRemove.add(timelineEntry.getKey());
}
}
} else {
intervalsToRemove.add(interval);
}
for (Interval i : intervalsToRemove) {
remove(timeline, i, incompleteOk);
}
}
private void remove(
NavigableMap<Interval, TimelineEntry> timeline,
Interval interval,
boolean incompleteOk
)
{
timeline.remove(interval);
for (Map.Entry<Interval, TreeMap<VersionType, TimelineEntry>> versionEntry : allTimelineEntries.entrySet()) {
if (versionEntry.getKey().overlap(interval) != null) {
TimelineEntry timelineEntry = versionEntry.getValue().lastEntry().getValue();
if (timelineEntry.getPartitionHolder().isComplete() || incompleteOk) {
add(timeline, versionEntry.getKey(), timelineEntry);
}
}
}
}
private List<TimelineObjectHolder<VersionType, ObjectType>> lookup(Interval interval, boolean incompleteOk)
{
List<TimelineObjectHolder<VersionType, ObjectType>> retVal = new ArrayList<TimelineObjectHolder<VersionType, ObjectType>>();
NavigableMap<Interval, TimelineEntry> timeline = (incompleteOk)
? incompletePartitionsTimeline
: completePartitionsTimeline;
for (Map.Entry<Interval, TimelineEntry> entry : timeline.entrySet()) {
Interval timelineInterval = entry.getKey();
TimelineEntry val = entry.getValue();
if (timelineInterval.overlaps(interval)) {
retVal.add(
new TimelineObjectHolder<VersionType, ObjectType>(
timelineInterval,
val.getVersion(),
val.getPartitionHolder()
)
);
}
}
if (retVal.isEmpty()) {
return retVal;
}
TimelineObjectHolder<VersionType, ObjectType> firstEntry = retVal.get(0);
if (interval.overlaps(firstEntry.getInterval()) && interval.getStart()
.isAfter(firstEntry.getInterval().getStart())) {
retVal.set(
0,
new TimelineObjectHolder<VersionType, ObjectType>(
new Interval(interval.getStart(), firstEntry.getInterval().getEnd()),
firstEntry.getVersion(),
firstEntry.getObject()
)
);
}
TimelineObjectHolder<VersionType, ObjectType> lastEntry = retVal.get(retVal.size() - 1);
if (interval.overlaps(lastEntry.getInterval()) && interval.getEnd().isBefore(lastEntry.getInterval().getEnd())) {
retVal.set(
retVal.size() - 1,
new TimelineObjectHolder<VersionType, ObjectType>(
new Interval(lastEntry.getInterval().getStart(), interval.getEnd()),
lastEntry.getVersion(),
lastEntry.getObject()
)
);
}
return retVal;
}
public class TimelineEntry
{
private final Interval trueInterval;
private final VersionType version;
private final PartitionHolder<ObjectType> partitionHolder;
public TimelineEntry(Interval trueInterval, VersionType version, PartitionHolder<ObjectType> partitionHolder)
{
this.trueInterval = trueInterval;
this.version = version;
this.partitionHolder = partitionHolder;
}
public Interval getTrueInterval()
{
return trueInterval;
}
public VersionType getVersion()
{
return version;
}
public PartitionHolder<ObjectType> getPartitionHolder()
{
return partitionHolder;
}
}
}
| |
package org.apache.maven.execution;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.maven.RepositoryUtils;
import org.apache.maven.artifact.InvalidRepositoryException;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.bridge.MavenRepositorySystem;
import org.apache.maven.repository.RepositorySystem;
import org.apache.maven.settings.Mirror;
import org.apache.maven.settings.Proxy;
import org.apache.maven.settings.Repository;
import org.apache.maven.settings.Server;
import org.apache.maven.settings.Settings;
import org.apache.maven.settings.SettingsUtils;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.util.StringUtils;
@Component( role = MavenExecutionRequestPopulator.class )
public class DefaultMavenExecutionRequestPopulator
implements MavenExecutionRequestPopulator
{
@Requirement
private RepositorySystem repositorySystem;
public MavenExecutionRequest populateFromSettings( MavenExecutionRequest request, Settings settings )
throws MavenExecutionRequestPopulationException
{
if ( settings == null )
{
return request;
}
request.setOffline( settings.isOffline() );
request.setInteractiveMode( settings.isInteractiveMode() );
request.setPluginGroups( settings.getPluginGroups() );
request.setLocalRepositoryPath( settings.getLocalRepository() );
for ( Server server : settings.getServers() )
{
server = server.clone();
request.addServer( server );
}
// <proxies>
// <proxy>
// <active>true</active>
// <protocol>http</protocol>
// <host>proxy.somewhere.com</host>
// <port>8080</port>
// <username>proxyuser</username>
// <password>somepassword</password>
// <nonProxyHosts>www.google.com|*.somewhere.com</nonProxyHosts>
// </proxy>
// </proxies>
for ( Proxy proxy : settings.getProxies() )
{
if ( !proxy.isActive() )
{
continue;
}
proxy = proxy.clone();
request.addProxy( proxy );
}
// <mirrors>
// <mirror>
// <id>nexus</id>
// <mirrorOf>*</mirrorOf>
// <url>http://repository.sonatype.org/content/groups/public</url>
// </mirror>
// </mirrors>
for ( Mirror mirror : settings.getMirrors() )
{
mirror = mirror.clone();
request.addMirror( mirror );
}
request.setActiveProfiles( settings.getActiveProfiles() );
for ( org.apache.maven.settings.Profile rawProfile : settings.getProfiles() )
{
request.addProfile( SettingsUtils.convertFromSettingsProfile( rawProfile ) );
if(settings.getActiveProfiles().contains( rawProfile.getId() ))
{
List<Repository> remoteRepositories = rawProfile.getRepositories();
for( Repository remoteRepository : remoteRepositories )
{
try
{
request.addRemoteRepository( MavenRepositorySystem.buildArtifactRepository( remoteRepository ) );
}
catch ( InvalidRepositoryException e )
{
// do nothing for now
}
}
}
}
return request;
}
private void populateDefaultPluginGroups( MavenExecutionRequest request )
{
request.addPluginGroup( "org.apache.maven.plugins" );
request.addPluginGroup( "org.codehaus.mojo" );
}
private void injectDefaultRepositories( MavenExecutionRequest request )
throws MavenExecutionRequestPopulationException
{
Set<String> definedRepositories = getRepoIds( request.getRemoteRepositories() );
if ( !definedRepositories.contains( RepositorySystem.DEFAULT_REMOTE_REPO_ID ) )
{
try
{
request.addRemoteRepository( repositorySystem.createDefaultRemoteRepository() );
}
catch ( InvalidRepositoryException e )
{
throw new MavenExecutionRequestPopulationException( "Cannot create default remote repository.", e );
}
}
}
private void injectDefaultPluginRepositories( MavenExecutionRequest request )
throws MavenExecutionRequestPopulationException
{
Set<String> definedRepositories = getRepoIds( request.getPluginArtifactRepositories() );
if ( !definedRepositories.contains( RepositorySystem.DEFAULT_REMOTE_REPO_ID ) )
{
try
{
request.addPluginArtifactRepository( repositorySystem.createDefaultRemoteRepository() );
}
catch ( InvalidRepositoryException e )
{
throw new MavenExecutionRequestPopulationException( "Cannot create default remote repository.", e );
}
}
}
private Set<String> getRepoIds( List<ArtifactRepository> repositories )
{
Set<String> repoIds = new HashSet<String>();
if ( repositories != null )
{
for ( ArtifactRepository repository : repositories )
{
repoIds.add( repository.getId() );
}
}
return repoIds;
}
private void processRepositoriesInSettings( MavenExecutionRequest request )
throws MavenExecutionRequestPopulationException
{
repositorySystem.injectMirror( request.getRemoteRepositories(), request.getMirrors() );
repositorySystem.injectProxy( request.getRemoteRepositories(), request.getProxies() );
repositorySystem.injectAuthentication( request.getRemoteRepositories(), request.getServers() );
request.setRemoteRepositories( repositorySystem.getEffectiveRepositories( request.getRemoteRepositories() ) );
repositorySystem.injectMirror( request.getPluginArtifactRepositories(), request.getMirrors() );
repositorySystem.injectProxy( request.getPluginArtifactRepositories(), request.getProxies() );
repositorySystem.injectAuthentication( request.getPluginArtifactRepositories(), request.getServers() );
request.setPluginArtifactRepositories( repositorySystem.getEffectiveRepositories( request.getPluginArtifactRepositories() ) );
}
private void localRepository( MavenExecutionRequest request )
throws MavenExecutionRequestPopulationException
{
// ------------------------------------------------------------------------
// Local Repository
//
// 1. Use a value has been passed in via the configuration
// 2. Use value in the resultant settings
// 3. Use default value
// ------------------------------------------------------------------------
if ( request.getLocalRepository() == null )
{
request.setLocalRepository( createLocalRepository( request ) );
}
if ( request.getLocalRepositoryPath() == null )
{
request.setLocalRepositoryPath( new File( request.getLocalRepository().getBasedir() ).getAbsoluteFile() );
}
}
// ------------------------------------------------------------------------
// Artifact Transfer Mechanism
// ------------------------------------------------------------------------
public ArtifactRepository createLocalRepository( MavenExecutionRequest request )
throws MavenExecutionRequestPopulationException
{
String localRepositoryPath = null;
if ( request.getLocalRepositoryPath() != null )
{
localRepositoryPath = request.getLocalRepositoryPath().getAbsolutePath();
}
if ( StringUtils.isEmpty( localRepositoryPath ) )
{
localRepositoryPath = RepositorySystem.defaultUserLocalRepository.getAbsolutePath();
}
try
{
return repositorySystem.createLocalRepository( new File( localRepositoryPath ) );
}
catch ( InvalidRepositoryException e )
{
throw new MavenExecutionRequestPopulationException( "Cannot create local repository.", e );
}
}
private void baseDirectory( MavenExecutionRequest request )
{
if ( request.getBaseDirectory() == null && request.getPom() != null )
{
request.setBaseDirectory( request.getPom().getAbsoluteFile().getParentFile() );
}
}
public MavenExecutionRequest populateDefaults( MavenExecutionRequest request )
throws MavenExecutionRequestPopulationException
{
baseDirectory( request );
localRepository( request );
populateDefaultPluginGroups( request );
injectDefaultRepositories( request );
injectDefaultPluginRepositories( request );
processRepositoriesInSettings( request );
return request;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.server.op.session;
import io.netty.channel.Channel;
import org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutor;
import org.apache.tinkerpop.gremlin.groovy.jsr223.GroovyCompilerGremlinPlugin;
import org.apache.tinkerpop.gremlin.jsr223.GremlinScriptEngine;
import org.apache.tinkerpop.gremlin.server.Context;
import org.apache.tinkerpop.gremlin.server.GraphManager;
import org.apache.tinkerpop.gremlin.server.Settings;
import org.apache.tinkerpop.gremlin.server.util.MetricManager;
import org.apache.tinkerpop.gremlin.server.util.ThreadFactoryUtil;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.script.Bindings;
import javax.script.SimpleBindings;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static org.apache.tinkerpop.gremlin.server.op.session.SessionOpProcessor.CONFIG_GLOBAL_FUNCTION_CACHE_ENABLED;
import static org.apache.tinkerpop.gremlin.server.op.session.SessionOpProcessor.CONFIG_PER_GRAPH_CLOSE_TIMEOUT;
import static org.apache.tinkerpop.gremlin.server.op.session.SessionOpProcessor.CONFIG_SESSION_TIMEOUT;
import static org.apache.tinkerpop.gremlin.server.op.session.SessionOpProcessor.DEFAULT_PER_GRAPH_CLOSE_TIMEOUT;
import static org.apache.tinkerpop.gremlin.server.op.session.SessionOpProcessor.DEFAULT_SESSION_TIMEOUT;
/**
* Defines a "session" for the {@link SessionOpProcessor} which preserves state between requests made to Gremlin
* Server. Since transactions are bound to a single thread the "session" maintains its own thread to process Gremlin
* statements so that each request can be executed within it to preserve the transaction state from one request to
* the next.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class Session {
private static final Logger logger = LoggerFactory.getLogger(Session.class);
private final Bindings bindings;
private final Settings settings;
private final GraphManager graphManager;
private final String session;
private final ScheduledExecutorService scheduledExecutorService;
private final long configuredSessionTimeout;
private final long configuredPerGraphCloseTimeout;
private final boolean globalFunctionCacheEnabled;
private final Channel boundChannel;
private AtomicBoolean killing = new AtomicBoolean(false);
private AtomicReference<ScheduledFuture> kill = new AtomicReference<>();
/**
* Each session gets its own ScriptEngine so as to isolate its configuration and the classes loaded to it.
* This is important as it enables user interfaces built on Gremlin Server to have isolation in what
* libraries they use and what classes exist.
*/
private final GremlinExecutor gremlinExecutor;
private final ThreadFactory threadFactoryWorker = ThreadFactoryUtil.create("session-%d");
/**
* By binding the session to run ScriptEngine evaluations in a specific thread, each request will respect
* the ThreadLocal nature of Graph implementations.
*/
private final ExecutorService executor = Executors.newSingleThreadExecutor(threadFactoryWorker);
private final ConcurrentHashMap<String, Session> sessions;
public Session(final String session, final Context context, final ConcurrentHashMap<String, Session> sessions) {
logger.debug("New session established for {}", session);
this.session = session;
this.bindings = new SimpleBindings();
this.settings = context.getSettings();
this.graphManager = context.getGraphManager();
this.scheduledExecutorService = context.getScheduledExecutorService();
this.sessions = sessions;
final Settings.ProcessorSettings processorSettings = this.settings.optionalProcessor(SessionOpProcessor.class).
orElse(SessionOpProcessor.DEFAULT_SETTINGS);
this.configuredSessionTimeout = Long.parseLong(processorSettings.config.getOrDefault(
CONFIG_SESSION_TIMEOUT, DEFAULT_SESSION_TIMEOUT).toString());
this.configuredPerGraphCloseTimeout = Long.parseLong(processorSettings.config.getOrDefault(
CONFIG_PER_GRAPH_CLOSE_TIMEOUT, DEFAULT_PER_GRAPH_CLOSE_TIMEOUT).toString());
this.globalFunctionCacheEnabled = Boolean.parseBoolean(
processorSettings.config.getOrDefault(CONFIG_GLOBAL_FUNCTION_CACHE_ENABLED, true).toString());
this.gremlinExecutor = initializeGremlinExecutor().create();
settings.scriptEngines.keySet().forEach(this::registerMetrics);
boundChannel = context.getChannelHandlerContext().channel();
boundChannel.closeFuture().addListener(future -> manualKill(true));
}
/**
* Determines if the supplied {@code Channel} object is the same as the one bound to the {@code Session}.
*/
public boolean isBoundTo(final Channel channel) {
return channel == boundChannel;
}
public GremlinExecutor getGremlinExecutor() {
return gremlinExecutor;
}
public Bindings getBindings() {
return bindings;
}
public ExecutorService getExecutor() {
return executor;
}
public String getSessionId() {
return session;
}
public boolean acceptingRequests() {
return !killing.get();
}
public void touch() {
// if the task of killing is cancelled successfully then reset the session monitor. otherwise this session
// has already been killed and there's nothing left to do with this session.
kill.updateAndGet(future -> {
if (null == future || !future.isDone()) {
if (future != null) future.cancel(false);
return this.scheduledExecutorService.schedule(() -> {
logger.info("Session {} has been idle for more than {} milliseconds - preparing to close",
this.session, this.configuredSessionTimeout);
kill(false);
}, this.configuredSessionTimeout, TimeUnit.MILLISECONDS);
}
return future;
});
}
/**
* Stops the session with call to {@link #kill(boolean)} but also stops the session expiration call which ensures
* that the session is only killed once. See {@link #kill(boolean)} for information on how what "forcing" the
* session kill will mean.
*/
public void manualKill(final boolean force) {
// seems there is a situation where kill can get nulled. seems to only happen in travis as a result of test
// runs and i'm guessing it has something to do with a combination of shutdown and session close though i'm
// not sure why. perhaps this "fix" just masks up a deeper problem but as i reason on it now, it seems mostly
// bound to shutdown situations which basically means the forced end of the session anyway, so perhaps the
// root cause isn't something that needs immediate chasing (at least until it can be shown otherwise anyway)
Optional.ofNullable(kill.get()).ifPresent(f -> f.cancel(true));
kill(force);
}
/**
* Kills the session and rollback any uncommitted changes on transactional graphs. When "force" closed, the
* session won't bother to try to submit transaction close commands. It will be up to the underlying graph
* implementation to determine how it will clean up orphaned transactions. The force will try to cancel scheduled
* jobs and interrupt any currently running ones. Interruption is not guaranteed, but an attempt will be made.
*/
public synchronized void kill(final boolean force) {
killing.set(true);
// if the session has already been removed then there's no need to do this process again. it's possible that
// the manuallKill and the kill future could have both called kill at roughly the same time. this prevents
// kill() from being called more than once
if (!sessions.containsKey(session)) return;
if (!force) {
// when the session is killed open transaction should be rolled back
graphManager.getGraphNames().forEach(gName -> {
final Graph g = graphManager.getGraph(gName);
if (g.features().graph().supportsTransactions()) {
// have to execute the rollback in the executor because the transaction is associated with
// that thread of execution from this session
try {
executor.submit(() -> {
if (g.tx().isOpen()) {
logger.debug("Rolling back open transactions on {} before killing session: {}", gName, session);
g.tx().rollback();
}
}).get(configuredPerGraphCloseTimeout, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
logger.warn(String.format("An error occurred while attempting rollback on %s when closing session: %s", gName, session), ex);
}
}
});
} else {
logger.debug("Skipped attempt to close open graph transactions on {} - close was forced", session);
}
// prevent any additional requests from processing. if the kill was not "forced" then jobs were scheduled to
// try to rollback open transactions. those jobs either timed-out or completed successfully. either way, no
// additional jobs will be allowed, running jobs will be cancelled (if possible) and any scheduled jobs will
// be cancelled
executor.shutdownNow();
sessions.remove(session);
// once a session is dead release the gauges in the registry for it
MetricManager.INSTANCE.getRegistry().removeMatching((s, metric) -> s.contains(session));
logger.debug("Session {} closed", session);
}
private GremlinExecutor.Builder initializeGremlinExecutor() {
final GremlinExecutor.Builder gremlinExecutorBuilder = GremlinExecutor.build()
.evaluationTimeout(settings.getEvaluationTimeout())
.afterTimeout(b -> {
graphManager.rollbackAll();
this.bindings.clear();
this.bindings.putAll(b);
})
.afterSuccess(b -> {
this.bindings.clear();
this.bindings.putAll(b);
})
.globalBindings(graphManager.getAsBindings())
.executorService(executor)
.scheduledExecutorService(scheduledExecutorService);
settings.scriptEngines.forEach((k, v) -> {
// use plugins if they are present
if (!v.plugins.isEmpty()) {
// make sure that server related classes are available at init. the LifeCycleHook stuff will be
// added explicitly via configuration using GremlinServerGremlinModule in the yaml. need to override
// scriptengine settings with SessionOpProcessor specific ones as the processing for sessions is
// different and a global setting may not make sense for a session
if (v.plugins.containsKey(GroovyCompilerGremlinPlugin.class.getName())) {
v.plugins.get(GroovyCompilerGremlinPlugin.class.getName()).put(CONFIG_GLOBAL_FUNCTION_CACHE_ENABLED, globalFunctionCacheEnabled);
} else {
final Map<String,Object> pluginConf = new HashMap<>();
pluginConf.put(CONFIG_GLOBAL_FUNCTION_CACHE_ENABLED, globalFunctionCacheEnabled);
v.plugins.put(GroovyCompilerGremlinPlugin.class.getName(), pluginConf);
}
gremlinExecutorBuilder.addPlugins(k, v.plugins);
}
});
return gremlinExecutorBuilder;
}
private void registerMetrics(final String engineName) {
final GremlinScriptEngine engine = gremlinExecutor.getScriptEngineManager().getEngineByName(engineName);
MetricManager.INSTANCE.registerGremlinScriptEngineMetrics(engine, engineName, "session", session, "class-cache");
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.cli;
import java.io.File;
import java.io.FileInputStream;
import java.net.URL;
import java.util.Date;
/**
* <p>Allows Options to be created from a single String.
* The pattern contains various single character flags and via
* an optional punctuation character, their expected type.
* </p>
*
* <table border="1">
* <caption>Overview of PatternOptionBuilder patterns</caption>
* <tr><td>a</td><td>-a flag</td></tr>
* <tr><td>b@</td><td>-b [classname]</td></tr>
* <tr><td>c></td><td>-c [filename]</td></tr>
* <tr><td>d+</td><td>-d [classname] (creates object via empty constructor)</td></tr>
* <tr><td>e%</td><td>-e [number] (creates Double/Long instance depending on existing of a '.')</td></tr>
* <tr><td>f/</td><td>-f [url]</td></tr>
* <tr><td>g:</td><td>-g [string]</td></tr>
* </table>
*
* <p>
* For example, the following allows command line flags of '-v -p string-value -f /dir/file'.
* The exclamation mark precede a mandatory option.
* </p>
*
* <pre>
* Options options = PatternOptionBuilder.parsePattern("vp:!f/");
* </pre>
*
* <p>
* TODO: These need to break out to OptionType and also to be pluggable.
* </p>
*
* @version $Id: PatternOptionBuilder.java 1677406 2015-05-03 14:27:31Z britter $
*/
public class PatternOptionBuilder
{
/** String class */
public static final Class<String> STRING_VALUE = String.class;
/** Object class */
public static final Class<Object> OBJECT_VALUE = Object.class;
/** Number class */
public static final Class<Number> NUMBER_VALUE = Number.class;
/** Date class */
public static final Class<Date> DATE_VALUE = Date.class;
/** Class class */
public static final Class<?> CLASS_VALUE = Class.class;
/// can we do this one??
// is meant to check that the file exists, else it errors.
// ie) it's for reading not writing.
/** FileInputStream class */
public static final Class<FileInputStream> EXISTING_FILE_VALUE = FileInputStream.class;
/** File class */
public static final Class<File> FILE_VALUE = File.class;
/** File array class */
public static final Class<File[]> FILES_VALUE = File[].class;
/** URL class */
public static final Class<URL> URL_VALUE = URL.class;
/**
* Retrieve the class that <code>ch</code> represents.
*
* @param ch the specified character
* @return The class that <code>ch</code> represents
*/
public static Object getValueClass(char ch)
{
switch (ch)
{
case '@':
return PatternOptionBuilder.OBJECT_VALUE;
case ':':
return PatternOptionBuilder.STRING_VALUE;
case '%':
return PatternOptionBuilder.NUMBER_VALUE;
case '+':
return PatternOptionBuilder.CLASS_VALUE;
case '#':
return PatternOptionBuilder.DATE_VALUE;
case '<':
return PatternOptionBuilder.EXISTING_FILE_VALUE;
case '>':
return PatternOptionBuilder.FILE_VALUE;
case '*':
return PatternOptionBuilder.FILES_VALUE;
case '/':
return PatternOptionBuilder.URL_VALUE;
}
return null;
}
/**
* Returns whether <code>ch</code> is a value code, i.e.
* whether it represents a class in a pattern.
*
* @param ch the specified character
* @return true if <code>ch</code> is a value code, otherwise false.
*/
public static boolean isValueCode(char ch)
{
return ch == '@'
|| ch == ':'
|| ch == '%'
|| ch == '+'
|| ch == '#'
|| ch == '<'
|| ch == '>'
|| ch == '*'
|| ch == '/'
|| ch == '!';
}
/**
* Returns the {@link Options} instance represented by <code>pattern</code>.
*
* @param pattern the pattern string
* @return The {@link Options} instance
*/
public static Options parsePattern(String pattern)
{
char opt = ' ';
boolean required = false;
Class<?> type = null;
Options options = new Options();
for (int i = 0; i < pattern.length(); i++)
{
char ch = pattern.charAt(i);
// a value code comes after an option and specifies
// details about it
if (!isValueCode(ch))
{
if (opt != ' ')
{
final Option option = Option.builder(String.valueOf(opt))
.hasArg(type != null)
.required(required)
.type(type)
.build();
// we have a previous one to deal with
options.addOption(option);
required = false;
type = null;
opt = ' ';
}
opt = ch;
}
else if (ch == '!')
{
required = true;
}
else
{
type = (Class<?>) getValueClass(ch);
}
}
if (opt != ' ')
{
final Option option = Option.builder(String.valueOf(opt))
.hasArg(type != null)
.required(required)
.type(type)
.build();
// we have a final one to deal with
options.addOption(option);
}
return options;
}
}
| |
// ========================================================================
// Copyright (c) 1996-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.util.resource;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
import java.text.DateFormat;
import java.util.Arrays;
import java.util.Date;
import org.eclipse.jetty.util.IO;
import org.eclipse.jetty.util.Loader;
import org.eclipse.jetty.util.StringUtil;
import org.eclipse.jetty.util.URIUtil;
import org.eclipse.jetty.util.log.Log;
/* ------------------------------------------------------------ */
/**
* Abstract resource class.
*/
public abstract class Resource implements ResourceFactory
{
public static boolean __defaultUseCaches = true;
volatile Object _associate;
/**
* Change the default setting for url connection caches.
* Subsequent URLConnections will use this default.
* @param useCaches
*/
public static void setDefaultUseCaches (boolean useCaches)
{
__defaultUseCaches=useCaches;
}
/* ------------------------------------------------------------ */
public static boolean getDefaultUseCaches ()
{
return __defaultUseCaches;
}
/* ------------------------------------------------------------ */
/** Construct a resource from a uri.
* @param uri A URI.
* @return A Resource object.
* @throws IOException Problem accessing URI
*/
public static Resource newResource(URI uri)
throws IOException
{
return newResource(uri.toURL());
}
/* ------------------------------------------------------------ */
/** Construct a resource from a url.
* @param url A URL.
* @return A Resource object.
* @throws IOException Problem accessing URL
*/
public static Resource newResource(URL url)
throws IOException
{
return newResource(url, __defaultUseCaches);
}
/* ------------------------------------------------------------ */
/**
* Construct a resource from a url.
* @param url the url for which to make the resource
* @param useCaches true enables URLConnection caching if applicable to the type of resource
* @return
*/
static Resource newResource(URL url, boolean useCaches)
{
if (url==null)
return null;
String url_string=url.toExternalForm();
if( url_string.startsWith( "file:"))
{
try
{
FileResource fileResource= new FileResource(url);
return fileResource;
}
catch(Exception e)
{
Log.debug(Log.EXCEPTION,e);
return new BadResource(url,e.toString());
}
}
else if( url_string.startsWith( "jar:file:"))
{
return new JarFileResource(url, useCaches);
}
else if( url_string.startsWith( "jar:"))
{
return new JarResource(url, useCaches);
}
return new URLResource(url,null,useCaches);
}
/* ------------------------------------------------------------ */
/** Construct a resource from a string.
* @param resource A URL or filename.
* @return A Resource object.
*/
public static Resource newResource(String resource)
throws MalformedURLException, IOException
{
return newResource(resource, __defaultUseCaches);
}
/* ------------------------------------------------------------ */
/** Construct a resource from a string.
* @param resource A URL or filename.
* @param useCaches controls URLConnection caching
* @return A Resource object.
*/
public static Resource newResource (String resource, boolean useCaches)
throws MalformedURLException, IOException
{
URL url=null;
try
{
// Try to format as a URL?
url = new URL(resource);
}
catch(MalformedURLException e)
{
if(!resource.startsWith("ftp:") &&
!resource.startsWith("file:") &&
!resource.startsWith("jar:"))
{
try
{
// It's a file.
if (resource.startsWith("./"))
resource=resource.substring(2);
File file=new File(resource).getCanonicalFile();
return new FileResource(file);
}
catch(Exception e2)
{
Log.debug(Log.EXCEPTION,e2);
throw e;
}
}
else
{
Log.warn("Bad Resource: "+resource);
throw e;
}
}
// Make sure that any special characters stripped really are ignorable.
String nurl=url.toString();
if (nurl.length()>0 && nurl.charAt(nurl.length()-1)!=resource.charAt(resource.length()-1))
{
if ((nurl.charAt(nurl.length()-1)!='/' ||
nurl.charAt(nurl.length()-2)!=resource.charAt(resource.length()-1))
&&
(resource.charAt(resource.length()-1)!='/' ||
resource.charAt(resource.length()-2)!=nurl.charAt(nurl.length()-1)
))
{
return new BadResource(url,"Trailing special characters stripped by URL in "+resource);
}
}
return newResource(url);
}
/* ------------------------------------------------------------ */
/** Construct a system resource from a string.
* The resource is tried as classloader resource before being
* treated as a normal resource.
* @param resource Resource as string representation
* @return The new Resource
* @throws IOException Problem accessing resource.
*/
public static Resource newSystemResource(String resource)
throws IOException
{
URL url=null;
// Try to format as a URL?
ClassLoader
loader=Thread.currentThread().getContextClassLoader();
if (loader!=null)
{
try
{
url = loader.getResource(resource);
if (url == null && resource.startsWith("/"))
url = loader.getResource(resource.substring(1));
}
catch (IllegalArgumentException e)
{
// Catches scenario where a bad Windows path like "C:\dev" is
// improperly escaped, which various downstream classloaders
// tend to have a problem with
url = null;
}
}
if (url==null)
{
loader=Resource.class.getClassLoader();
if (loader!=null)
{
url=loader.getResource(resource);
if (url==null && resource.startsWith("/"))
url=loader.getResource(resource.substring(1));
}
}
if (url==null)
{
url=ClassLoader.getSystemResource(resource);
if (url==null && resource.startsWith("/"))
url=loader.getResource(resource.substring(1));
}
if (url==null)
return null;
return newResource(url);
}
/* ------------------------------------------------------------ */
/** Find a classpath resource.
*/
public static Resource newClassPathResource(String resource)
{
return newClassPathResource(resource,true,false);
}
/* ------------------------------------------------------------ */
/** Find a classpath resource.
* The {@link java.lang.Class#getResource(String)} method is used to lookup the resource. If it is not
* found, then the {@link Loader#getResource(Class, String, boolean)} method is used.
* If it is still not found, then {@link ClassLoader#getSystemResource(String)} is used.
* Unlike {@link ClassLoader#getSystemResource(String)} this method does not check for normal resources.
* @param name The relative name of the resource
* @param useCaches True if URL caches are to be used.
* @param checkParents True if forced searching of parent Classloaders is performed to work around
* loaders with inverted priorities
* @return Resource or null
*/
public static Resource newClassPathResource(String name,boolean useCaches,boolean checkParents)
{
URL url=Resource.class.getResource(name);
if (url==null)
{
try
{
url=Loader.getResource(Resource.class,name,checkParents);
}
catch(ClassNotFoundException e)
{
url=ClassLoader.getSystemResource(name);
}
}
if (url==null)
return null;
return newResource(url,useCaches);
}
/* ------------------------------------------------------------ */
public static boolean isContainedIn (Resource r, Resource containingResource) throws MalformedURLException
{
return r.isContainedIn(containingResource);
}
/* ------------------------------------------------------------ */
@Override
protected void finalize()
{
release();
}
/* ------------------------------------------------------------ */
public abstract boolean isContainedIn (Resource r) throws MalformedURLException;
/* ------------------------------------------------------------ */
/** Release any temporary resources held by the resource.
*/
public abstract void release();
/* ------------------------------------------------------------ */
/**
* Returns true if the respresened resource exists.
*/
public abstract boolean exists();
/* ------------------------------------------------------------ */
/**
* Returns true if the respresenetd resource is a container/directory.
* If the resource is not a file, resources ending with "/" are
* considered directories.
*/
public abstract boolean isDirectory();
/* ------------------------------------------------------------ */
/**
* Returns the last modified time
*/
public abstract long lastModified();
/* ------------------------------------------------------------ */
/**
* Return the length of the resource
*/
public abstract long length();
/* ------------------------------------------------------------ */
/**
* Returns an URL representing the given resource
*/
public abstract URL getURL();
/* ------------------------------------------------------------ */
/**
* Returns an URI representing the given resource
*/
public URI getURI()
{
try
{
return getURL().toURI();
}
catch(Exception e)
{
throw new RuntimeException(e);
}
}
/* ------------------------------------------------------------ */
/**
* Returns an File representing the given resource or NULL if this
* is not possible.
*/
public abstract File getFile()
throws IOException;
/* ------------------------------------------------------------ */
/**
* Returns the name of the resource
*/
public abstract String getName();
/* ------------------------------------------------------------ */
/**
* Returns an input stream to the resource
*/
public abstract InputStream getInputStream()
throws java.io.IOException;
/* ------------------------------------------------------------ */
/**
* Returns an output stream to the resource
*/
public abstract OutputStream getOutputStream()
throws java.io.IOException, SecurityException;
/* ------------------------------------------------------------ */
/**
* Deletes the given resource
*/
public abstract boolean delete()
throws SecurityException;
/* ------------------------------------------------------------ */
/**
* Rename the given resource
*/
public abstract boolean renameTo( Resource dest)
throws SecurityException;
/* ------------------------------------------------------------ */
/**
* Returns a list of resource names contained in the given resource
* The resource names are not URL encoded.
*/
public abstract String[] list();
/* ------------------------------------------------------------ */
/**
* Returns the resource contained inside the current resource with the
* given name.
* @param path The path segment to add, which should be encoded by the
* encode method.
*/
public abstract Resource addPath(String path)
throws IOException,MalformedURLException;
/* ------------------------------------------------------------ */
/** Get a resource from withing this resource.
* <p>
* This method is essentially an alias for {@link #addPath(String)}, but without checked exceptions.
* This method satisfied the {@link ResourceFactory} interface.
* @see org.eclipse.jetty.util.resource.ResourceFactory#getResource(java.lang.String)
*/
public Resource getResource(String path)
{
try
{
return addPath(path);
}
catch(Exception e)
{
Log.debug(e);
return null;
}
}
/* ------------------------------------------------------------ */
/** Encode according to this resource type.
* The default implementation calls URI.encodePath(uri)
* @param uri
* @return String encoded for this resource type.
*/
public String encode(String uri)
{
return URIUtil.encodePath(uri);
}
/* ------------------------------------------------------------ */
public Object getAssociate()
{
return _associate;
}
/* ------------------------------------------------------------ */
public void setAssociate(Object o)
{
_associate=o;
}
/* ------------------------------------------------------------ */
/**
* @return The canonical Alias of this resource or null if none.
*/
public URL getAlias()
{
return null;
}
/* ------------------------------------------------------------ */
/** Get the resource list as a HTML directory listing.
* @param base The base URL
* @param parent True if the parent directory should be included
* @return String of HTML
*/
public String getListHTML(String base,boolean parent)
throws IOException
{
base=URIUtil.canonicalPath(base);
if (base==null || !isDirectory())
return null;
String[] ls = list();
if (ls==null)
return null;
Arrays.sort(ls);
String decodedBase = URIUtil.decodePath(base);
String title = "Directory: "+deTag(decodedBase);
StringBuilder buf=new StringBuilder(4096);
buf.append("<HTML><HEAD>");
buf.append("<LINK HREF=\"").append("jetty-dir.css").append("\" REL=\"stylesheet\" TYPE=\"text/css\"/><TITLE>");
buf.append(title);
buf.append("</TITLE></HEAD><BODY>\n<H1>");
buf.append(title);
buf.append("</H1>\n<TABLE BORDER=0>\n");
if (parent)
{
buf.append("<TR><TD><A HREF=\"");
buf.append(URIUtil.addPaths(base,"../"));
buf.append("\">Parent Directory</A></TD><TD></TD><TD></TD></TR>\n");
}
String encodedBase = hrefEncodeURI(base);
DateFormat dfmt=DateFormat.getDateTimeInstance(DateFormat.MEDIUM,
DateFormat.MEDIUM);
for (int i=0 ; i< ls.length ; i++)
{
Resource item = addPath(ls[i]);
buf.append("\n<TR><TD><A HREF=\"");
String path=URIUtil.addPaths(encodedBase,URIUtil.encodePath(ls[i]));
buf.append(path);
if (item.isDirectory() && !path.endsWith("/"))
buf.append(URIUtil.SLASH);
// URIUtil.encodePath(buf,path);
buf.append("\">");
buf.append(deTag(ls[i]));
buf.append(" ");
buf.append("</A></TD><TD ALIGN=right>");
buf.append(item.length());
buf.append(" bytes </TD><TD>");
buf.append(dfmt.format(new Date(item.lastModified())));
buf.append("</TD></TR>");
}
buf.append("</TABLE>\n");
buf.append("</BODY></HTML>\n");
return buf.toString();
}
/**
* Encode any characters that could break the URI string in an HREF.
* Such as <a href="/path/to;<script>Window.alert("XSS"+'%20'+"here");</script>">Link</a>
*
* The above example would parse incorrectly on various browsers as the "<" or '"' characters
* would end the href attribute value string prematurely.
*
* @param raw the raw text to encode.
* @return the defanged text.
*/
private static String hrefEncodeURI(String raw)
{
StringBuffer buf = null;
loop:
for (int i=0;i<raw.length();i++)
{
char c=raw.charAt(i);
switch(c)
{
case '\'':
case '"':
case '<':
case '>':
buf=new StringBuffer(raw.length()<<1);
break loop;
}
}
if (buf==null)
return raw;
for (int i=0;i<raw.length();i++)
{
char c=raw.charAt(i);
switch(c)
{
case '"':
buf.append("%22");
continue;
case '\'':
buf.append("%27");
continue;
case '<':
buf.append("%3C");
continue;
case '>':
buf.append("%3E");
continue;
default:
buf.append(c);
continue;
}
}
return buf.toString();
}
private static String deTag(String raw)
{
return StringUtil.replace( StringUtil.replace(raw,"<","<"), ">", ">");
}
/* ------------------------------------------------------------ */
/**
* @param out
* @param start First byte to write
* @param count Bytes to write or -1 for all of them.
*/
public void writeTo(OutputStream out,long start,long count)
throws IOException
{
InputStream in = getInputStream();
try
{
in.skip(start);
if (count<0)
IO.copy(in,out);
else
IO.copy(in,out,count);
}
finally
{
in.close();
}
}
/* ------------------------------------------------------------ */
public void copyTo(File destination)
throws IOException
{
if (destination.exists())
throw new IllegalArgumentException(destination+" exists");
writeTo(new FileOutputStream(destination),0,-1);
}
}
| |
package com.jenkov.container.script;
import com.jenkov.container.ContainerException;
import com.jenkov.container.IContainer;
import com.jenkov.container.impl.factory.*;
import com.jenkov.container.itf.factory.IGlobalFactory;
import com.jenkov.container.itf.factory.ILocalFactory;
import java.io.*;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.List;
/**
* A ScriptFactoryBuilder is capable of parsing Butterfly Container Script into factories and add them
* to an IContainer instance.
* @author Jakob Jenkov - Copyright 2004-2006 Jenkov Development
*/
public class ScriptFactoryBuilder {
FactoryBuilder builder = new FactoryBuilder();
IContainer container = null;
// public ScriptFactoryBuilder() {
// }
/**
* Creates a new ScriptFactoryBuilder that adds its factories to the given container.
* @param container The container the ScriptFactoryBuilder is to add factories to.
*/
public ScriptFactoryBuilder(IContainer container) {
if(container == null) throw new IllegalArgumentException("The container parameter must be non-null");
this.container = container;
}
/**
* Parses the given script and adds the corresponding factory to the container.
* Note: The script should only define a single factory.
* @param factoryScript The script defining the factory to add.
*/
public void addFactory(String factoryScript){
validateContainer();
buildGlobalFactory(this.container, new StringReader(factoryScript));
}
/**
* Parses the given script string and replaces the corresponding factory. If no factory exists
* with the given name, a new factory is created.
*
* @param factoryScript The script defining the factory to replace.
*/
public void replaceFactory(String factoryScript){
ScriptFactoryParser parser = new ScriptFactoryParser();
FactoryDefinition definition = parser.parseFactory(new ParserInput(new StringReader(factoryScript)));
IGlobalFactory factory = buildGlobalFactory(container, definition);
container.replaceFactory(definition.getName(), factory);
}
/**
* Parses the script read from the given InputStream and adds the corresponding factories
* to the container. The script can container as many factories as you like.
* <br/><br/>
* This method takes a name used to identify the stream.
* The name is only used if errors are found in the script
* read from the InputStream. In that case an exception is thrown, and the name of the stream
* is included. This is handy when reading scripts from more than one file or network location.
* That way you will be told what file/location the error is found in.
*
* @deprecated Use the methods that take a Reader instead, so you can control the character set of the script.
* @param input The InputStream connected to the script to parse.
* @param name A name used to identify this stream. The name is only used if errors are found in the script
* read from the InputStream. In that case an exception is thrown, and the name of the stream
* is included.
* @param closeInputStream Set to true if you want the method to close the InputStream when it is done
* parsing the factories. False if not.
*/
public void addFactories(InputStream input, String name, boolean closeInputStream){
validateContainer();
ContainerException parserException = null;
Exception exception = null;
try{
addFactories(this.container, input);
} catch (ContainerException e){
parserException = e;
e.addInfo("ScriptFactoryBuilder", "ERROR_IN_INPUT_STREAM", "An error occurred in the stream (file?) named " + name);
throw e;
} catch (Exception e){
exception = e;
throw new ParserException("ScriptFactoryBuilder", "ERROR_IN_INPUT_STREAM", "An error occurred in the stream (file?) named " + name, e);
} finally {
if(closeInputStream){
try {
input.close();
} catch (IOException e) {
if(parserException == null && exception == null){
throw new ParserException("ScriptFactoryBuilder", "ERROR_CLOSING_INPUT_STREAM",
"An exception occurred when attempting to close InputStream", e);
} else {
//ignore the close exception. A more important exception has already been thrown earlier.
}
}
}
}
}
/**
* Parses the script read from the given InputStream and adds the corresponding factories
* to the container. The script can container as many factories as you like.
* <br/><br/>
* Note: Look at the newer method addFactories(InputStream, String, boolean)
* for a more user friendly method that does the same thing.
*
* @deprecated Use the methods that take a Reader instead, so you can control the character set of the script.
* @param input The InputStream connected to the script to parse.
*/
public void addFactories(InputStream input){
validateContainer();
addFactories(this.container, input);
}
/**
* Parses the script read from the given InputStream and adds the corresponding factories
* to the container. The script can container as many factories as you like.
*
* <br/><br/>
* Use this method when you want to control the character set used to interprete the
* script file. For instance, if the script file is encoded in UTF-16, you can create
* a Reader instance that understands UTF-16.
*
* <br/><br/>
* This method takes a name used to identify the stream.
* The name is only used if errors are found in the script
* read from the InputStream. In that case an exception is thrown, and the name of the stream
* is included. This is handy when reading scripts from more than one file or network location.
* That way you will be told what file/location the error is found in.
*
* @param reader The Reader connected to the script to parse.
* @param name A name used to identify this stream. The name is only used if errors are found in the script
* read from the InputStream. In that case an exception is thrown, and the name of the stream
* is included.
* @param closeReader Set to true if you want the method to close the Reader when the ScriptFactoryBuilder is done
* parsing the factories. False if not.
*/
public void addFactories(Reader reader, String name, boolean closeReader){
validateContainer();
ContainerException parserException = null;
Exception exception = null;
try{
addFactories(this.container, reader);
} catch (ContainerException e){
parserException = e;
e.addInfo("ScriptFactoryBuilder", "ERROR_IN_INPUT_STREAM", "An error occurred in the stream (file?) named " + name);
throw e;
} catch (Exception e){
exception = e;
throw new ParserException("ScriptFactoryBuilder", "ERROR_IN_INPUT_STREAM", "An error occurred in the stream (file?) named " + name, e);
} finally {
if(closeReader){
try {
reader.close();
} catch (IOException e) {
if(parserException == null && exception == null){
throw new ParserException("ScriptFactoryBuilder", "ERROR_CLOSING_INPUT_STREAM",
"An exception occurred when attempting to close Reader", e);
} else {
//ignore the close exception. A more important exception has already been thrown earlier.
}
}
}
}
}
/**
* Parses the script read from the given Reader and adds the corresponding factories
* to the container. The script can container as many factories as you like.
*
* <br/><br/>
* Use this method when you want to control the character set used to interprete the
* script file. For instance, if the script file is encoded in UTF-16, you can create
* a Reader instance that understands UTF-16.
*
* <br/><br/>
* Note: Look at the newer method addFactories(InputStream, String, boolean)
* for a more user friendly method that does the same thing.
*
* @param reader The Reader connected to the script to parse.
*/
public void addFactories(Reader reader){
validateContainer();
addFactories(this.container, reader);
}
private void addFactories(IContainer container, Reader reader){
ScriptFactoryParser parser = new ScriptFactoryParser();
ParserInput parserInput = new ParserInput(reader);
FactoryDefinition definition = parser.parseFactory(parserInput);
while(definition != null){
IGlobalFactory factory = buildGlobalFactory(container, definition);
container.addFactory(definition.getName(), factory);
definition = parser.parseFactory(parserInput);
}
}
/**
* @deprecated Use the methods that take a Reader instead.
*
* @param container
* @param input
*/
private void addFactories(IContainer container, InputStream input){
ScriptFactoryParser parser = new ScriptFactoryParser();
ParserInput parserInput = new ParserInput(input);
FactoryDefinition definition = parser.parseFactory(parserInput);
while(definition != null){
IGlobalFactory factory = buildGlobalFactory(container, definition);
container.addFactory(definition.getName(), factory);
definition = parser.parseFactory(parserInput);
}
}
/**
* @deprecated Use the methods that take a Reader instead.
* @param input The InputStream from which to load the butterfly container script.
*/
public void replaceFactories(InputStream input){
validateContainer();
replaceFactories(this.container, new InputStreamReader(input));
}
/**
* Parses the script stream and replaces all existing factories with same
* names as factories found in the script file. Factories in the container that have
* no new definition found in the script file are kept as is. Factories in the script
* file that has no counterpart in the container are just added.
*
* <br/><br/>
* Use this method when you want to control the character set used to interprete the
* script file. For instance, if the script file is encoded in UTF-16, you can create
* a Reader instance that understands UTF-16.
*
* @param reader The Reader connected to the script to parse and add factories from.
*/
public void replaceFactories(Reader reader){
validateContainer();
replaceFactories(this.container, reader);
}
private void replaceFactories(IContainer container, Reader reader){
ScriptFactoryParser parser = new ScriptFactoryParser();
ParserInput parserInput = new ParserInput(reader);
FactoryDefinition definition = parser.parseFactory(parserInput);
while(definition != null){
IGlobalFactory factory = buildGlobalFactory(container, definition);
container.replaceFactory(definition.getName(), factory);
definition = parser.parseFactory(parserInput);
}
}
protected void buildGlobalFactory(IContainer container, Reader input){
ScriptFactoryParser parser = new ScriptFactoryParser();
FactoryDefinition definition = parser.parseFactory(new ParserInput(input));
IGlobalFactory factory = buildGlobalFactory(container, definition);
container.addFactory(definition.getName(), factory);
}
protected IGlobalFactory buildGlobalFactory(IContainer container, FactoryDefinition definition){
ILocalFactory instantiationFactory = buildLocalFactoryRecursively(container, definition);
definition.setLocalProductType(definition.getName(), instantiationFactory.getReturnType());
GlobalFactoryBase globalFactory = null;
if(definition.isNewInstance() || definition.isLocalizedMap()){
globalFactory = new GlobalNewInstanceFactory();
} else if(definition.isSingleton()){
globalFactory = new GlobalSingletonFactory();
} else if(definition.isThreadSingleton()){
globalFactory = new GlobalThreadSingletonFactory();
} else if(definition.isFlyweight()){
globalFactory = new GlobalFlyweightFactory();
}
int namedLocalProductCount = definition.getNamedLocalProductCount();
globalFactory.setLocalProductCount(namedLocalProductCount);
/* todo optimize this... so far all global factories have at least 1 named local product (the returned product)... but far from
all global factories actually reference it from life cycle phases. */
if(definition.getNamedLocalProductCount() > 0){
instantiationFactory = new LocalProductProducerFactory(instantiationFactory, 0);
}
globalFactory.setLocalInstantiationFactory(instantiationFactory);
if(definition.getPhaseFactories() != null && definition.getPhaseFactories().size() > 0){
for(String phase : definition.getPhaseFactories().keySet()){
List<FactoryDefinition> phaseFactories = definition.getPhaseFactories().get(phase);
globalFactory.setPhase(phase, buildLocalFactories(container, phaseFactories));
}
}
return globalFactory;
}
protected List<ILocalFactory> buildLocalFactories(IContainer container, List<FactoryDefinition> factoryDefinitions){
List<ILocalFactory> factories = new ArrayList<ILocalFactory>();
if(factoryDefinitions != null){
for(FactoryDefinition definition : factoryDefinitions){
factories.add(buildLocalFactoryRecursively(container, definition));
}
}
return factories;
}
protected ILocalFactory buildLocalFactoryRecursively(IContainer container, FactoryDefinition definition){
ILocalFactory factory = null;
try {
List<ILocalFactory> argumentFactories = buildLocalFactories(container, definition.getInstantiationArgFactories());
//todo get this from castings in factory definition. Casting will force a specific return type.
Class[] forcedArgumentTypes = getForcedArgumentTypes(argumentFactories, definition);
if(definition.isConstructorFactory()){ //constructor factory
factory = builder.createConstructorFactory(definition.getIdentifierOwnerClass(), argumentFactories, forcedArgumentTypes);
} else if(definition.isStaticMethodFactory()){ //method invocation factory
factory = builder.createStaticMethodFactory(definition.getIdentifier(), definition.getIdentifierOwnerClass(), argumentFactories, forcedArgumentTypes);
} else if(definition.isInstanceMethodFactory()) {
ILocalFactory methodInvocationTargetFactory = buildLocalFactoryRecursively(container, definition.getIdentifierTargetFactory());
factory = builder.createInstanceMethodFactory(definition.getIdentifier(), methodInvocationTargetFactory, argumentFactories, forcedArgumentTypes);
} else if(definition.isInstanceFieldFactory()){
ILocalFactory fieldTargetFactory = buildLocalFactoryRecursively(container, definition.getIdentifierTargetFactory());
factory = builder.createFieldFactory(definition.getIdentifier(), fieldTargetFactory);
} else if(definition.isStaticFieldFactory()){
factory = builder.createFieldFactory(definition.getIdentifier(), definition.getIdentifierOwnerClass());
} else if(definition.isInstanceFieldAssignmentFactory()){
ILocalFactory assignmentTargetFactory = buildLocalFactoryRecursively(container, definition.getIdentifierTargetFactory());
factory = builder.createFieldAssignmentFactory(definition.getIdentifier(), assignmentTargetFactory, argumentFactories.get(0));
} else if(definition.isStaticFieldAssignmentFactory()){
factory = builder.createFieldAssignmentFactory(definition.getIdentifier(), definition.getIdentifierOwnerClass(), argumentFactories.get(0));
} else if(definition.isFactoryCallFactory()){ //existing factory reference
if(container.getFactory(definition.getIdentifier()) == null) throw
new ParserException(
"ScriptFactoryBuilder", "UNKNOWN_FACTORY",
"Error in factory definition " + definition.getRoot().getName() + ": Unknown Factory: " + definition.getIdentifier());
factory = new InputAdaptingFactory(container.getFactory(definition.getIdentifier()), argumentFactories);
} else if(definition.isFactoryFactory()){
factory = new FactoryFactory(container, definition.getIdentifier());
} else if(definition.isCollectionFactory()){
factory = new CollectionFactory(argumentFactories);
} else if(definition.isMapFactory()){
List<ILocalFactory> keyFactories = buildLocalFactories(container, definition.getInstantiationArgKeyFactories());
factory = new MapFactory(keyFactories, argumentFactories);
if(definition.isLocalizedMap()){
((MapFactory) factory).setFactoryMap(true);
IGlobalFactory localeFactory = container.getFactory("locale");
if(localeFactory == null){
new ParserException(
"ScriptFactoryBuilder", "NO_LOCALE_FACTORY_FOUND",
"Error in factory definition " + definition.getRoot().getName() + ": No 'locale' factory found. " +
"A 'locale' factory must be present in order to use localized resource factories");
}
factory = new LocalizedResourceFactory(factory, localeFactory);
}
} else if(definition.isValueFactory()){ // value factory
if(isString(definition.getIdentifier()))
factory = new ValueFactory(definition.getIdentifier().substring(1, definition.getIdentifier().length()-1));
else if("null".equals(definition.getIdentifier())){
factory = new ValueFactory(null);
} else factory = new ValueFactory(definition.getIdentifier());
} else if(definition.isInputParameterFactory()){ // input consuming factory
factory = new InputConsumerFactory(Integer.parseInt(definition.getIdentifier()));
} else if(definition.isLocalProductFactory()){
factory = new LocalProductConsumerFactory(definition.getLocalProductType(), definition.getLocalProductIndex());
}
//only local factories with a name (named local factories) can be something else than "new instance" factories.
if(definition.isNamedLocalFactory()){
if(definition.isSingleton()){
factory = new LocalSingletonFactory(factory);
} else if(definition.isThreadSingleton()){
factory = new LocalThreadSingletonFactory(factory);
} else if(definition.isFlyweight()){
factory = new LocalFlyweightFactory(factory);
}
factory = new LocalProductProducerFactory(factory, definition.getLocalProductIndex());
definition.setLocalProductType(definition.getName(), factory.getReturnType());
}
return factory;
} catch (ContainerException e) {
if(e.getCode().indexOf("ScriptFactoryBuilder") == -1){
e.addInfo("ScriptFactoryBuilder", "ERROR_CREATING_FACTORY", "Error in factory definition " + definition.getRoot().getName());
}
throw e;
}
}
private Class[] getForcedArgumentTypes(List<ILocalFactory> arguments, FactoryDefinition definition) {
Class[] forcedArgumentTypes = new Class[arguments.size()];
if(definition.getInstantiationArgFactories() != null){
for(int i=0; i<forcedArgumentTypes.length; i++){
String forcedReturnType = definition.getInstantiationArgFactories().get(i).getForcedReturnType();
if(forcedReturnType != null){
if(forcedReturnType.endsWith("[]")){
Class componentType = FactoryUtil.getClassForName(forcedReturnType.substring(0, forcedReturnType.length()-2).trim());
if(componentType == null){
throw new ParserException(
"ScriptFactoryBuilder", "INVALID_PARAMETER_CAST",
"Error in factory definition " + definition.getRoot().getName() +
": Invalid parameter casting - class not found: " +
definition.getInstantiationArgFactories().get(i).getForcedReturnType());
}
forcedArgumentTypes[i] = Array.newInstance(componentType, 0).getClass();
} else {
forcedArgumentTypes[i] = FactoryUtil.getClassForName(forcedReturnType);
if(forcedArgumentTypes[i] == null){
throw new ParserException(
"ScriptFactoryBuilder", "INVALID_PARAMETER_CAST",
"Error in factory definition " + definition.getRoot().getName() +
": Invalid parameter casting - class not found: " +
definition.getInstantiationArgFactories().get(i).getForcedReturnType());
}
}
}
}
}
return forcedArgumentTypes;
}
private boolean isString(String value) {
return value.startsWith("\"") || value.startsWith("'");
}
private void validateContainer() {
if(this.container == null){
throw new IllegalStateException("You cannot use this method unless the ScriptFactoryBuilder" +
" was instantiated with a Container instance in the constructor");
}
}
}
| |
//Test Bed 2
// RobotBuilder Version: 2.0
//
// This file was generated by RobotBuilder. It contains sections of
// code that are automatically generated and assigned by robotbuilder.
// These sections will be updated in the future when you export to
// Java from RobotBuilder. Do not put any code or make any change in
// the blocks indicating autogenerated code or it will be lost on an
// update. Deleting the comments indicating the section will prevent
// it from being updated in the future.
package org.usfirst.frc5902.robot;
import org.opencv.core.Rect;
import org.opencv.imgproc.Imgproc;
import org.usfirst.frc5902.robot.commands.autoBaseLine;
import org.usfirst.frc5902.robot.commands.autoNothing;
import org.usfirst.frc5902.robot.subsystems.agitator;
import org.usfirst.frc5902.robot.subsystems.cameraControl;
import org.usfirst.frc5902.robot.subsystems.driveTrain;
import org.usfirst.frc5902.robot.subsystems.encoderDev;
import org.usfirst.frc5902.robot.subsystems.gyro;
import org.usfirst.frc5902.robot.subsystems.intake;
import org.usfirst.frc5902.robot.subsystems.sensorBase;
import org.usfirst.frc5902.robot.subsystems.shooter;
import org.usfirst.frc5902.robot.subsystems.trackerPipeline;
import edu.wpi.cscore.UsbCamera;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.command.CommandGroup;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.vision.VisionThread;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory. Something there
*/
@SuppressWarnings("unused")
public class Robot extends IterativeRobot {
Command autonomousCommand;
@SuppressWarnings("rawtypes")
SendableChooser autoChooser;
public static OI oi;
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS
public static driveTrain driveTrain;
public static sensorBase sensorBase;
public static intake intake;
public static shooter shooter;
public static cameraControl cameraControl;
public static encoderDev leftDriveEncoder;
public static agitator agitator;
public static double speed;
public final double pulseToInches = (6*Math.PI)/4096.0;
//agitator
// Make Gyro
public static gyro gyro;
// GRIP Defines
private static final int IMG_WIDTH = 640;
private static final int IMG_HEIGHT = 380;
private VisionThread visionThread;
private double centerX = 0.0;
private RobotDrive drive;
private final Object imgLock = new Object();
// GRIP END Defines
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public void robotInit() {
RobotMap.init();
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
driveTrain = new driveTrain();
sensorBase = new sensorBase();
intake = new intake();
shooter = new shooter();
cameraControl = new cameraControl();
agitator = new agitator();
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
//Make Gyro & Reset to 0 & Calibrate
// Commented out - actions are performed in gyro init
//gyro.reset();
//gyro.calibrate();
// Encoder Code
gyro = new gyro();
leftDriveEncoder = new encoderDev(RobotMap.driveTrainleftDriveLead);
/**
** CAMERA CODE
*/
// GRIP CAMERA CODE
//New
//Source Destination may not be accurate - Harrison
//camera = CameraServer.getInstance().startAutomaticCapture();
UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
camera.setResolution(IMG_WIDTH, IMG_HEIGHT);
visionThread = new VisionThread(camera, new trackerPipeline(), pipeline -> {
if (!pipeline.filterContoursOutput().isEmpty()) {
Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
synchronized (imgLock) {
centerX = r.x + (r.width / 2);
}
}
});
visionThread.start();
// END GRIP CAMERA CODE
/**
** END CAMERA CODE
*/
// OI must be constructed after subsystems. If the OI creates Commands
//(which it very likely will), subsystems are not guaranteed to be
// constructed yet. Thus, their requires() statements may grab null
// pointers. Bad news. Don't move it.
oi = new OI();
// instantiate the command used for the autonomous period
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
//Autonomous Chooser Code
autoChooser = new SendableChooser();
autoChooser.addDefault("Sprint for Baseline", new autoBaseLine());
autoChooser.addObject("Auto Do Nothing", new autoNothing());
SmartDashboard.putData("Autonomous Mode Chooser", autoChooser);
}
/**
* This function is called when the disabled button is hit.
* You can use it to reset subsystems before shutting down.
*/
public void disabledInit(){
}
/**
* Diasables the robot run execution
*/
public void disabledPeriodic() {
Scheduler.getInstance().run();
}
/**
* Starts Autonomous
*/
public void autonomousInit() {
autonomousCommand = (CommandGroup) autoChooser.getSelected();
autonomousCommand.start();
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
Scheduler.getInstance().run();
}
/**
* This makes sure that the autonomous stops running when
* teleop starts running. If you want the autonomous to
* continue until interrupted by another command, remove
* this line or comment it out.
*/
public void teleopInit() {
if (autonomousCommand != null) autonomousCommand.cancel();
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
Scheduler.getInstance().run();
SmartDashboard.putNumber("Xbox X axis", oi.driverXbox.getX());
// SmartDashboard.putNumber("Xbox Y axis", oi.driverXbox.getY());
SmartDashboard.putNumber("Gyro Reading", gyro.getAngle());
SmartDashboard.putNumber("Throttle Reading", oi.logitech.getThrottle());
// SmartDashboard.putNumber("Left Drive Encoder Reading", leftDriveEncoder.pulseWidthPos);
// SmartDashboard.putNumber("Left Drive Encoder Velocity", leftDriveEncoder.pulseWidthVelocity);
SmartDashboard.putNumber("Left Drive Distance Inches", ((leftDriveEncoder.pulseWidthPos * pulseToInches)*100)/(int)100);
SmartDashboard.putNumber("Pan Servo Position Inches", cameraControl.panServo.getPosition());
leftDriveEncoder.run();
gyro.run(gyro);
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
LiveWindow.run();
}
/**To move the robot with a GRIP inputStream we have to create a loop
* in Teleop or method on it's only (possibly called by a button)
* that will run GRIP over and over again
* this link give some info, we have to grab data from a network table out algorithm
* is running to see if it gives us the blob with proper spec (aka: area, pos)
* https://www.chiefdelphi.com/forums/showthread.php?t=141283
*
* When it meet the data then we can enter the logic to move the robot - Harrison
*
*/
public static void RunCode() {
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexing.firehose;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import com.google.inject.Binder;
import com.google.inject.Module;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.InputRowParser;
import io.druid.data.input.impl.JSONParseSpec;
import io.druid.data.input.impl.MapInputRowParser;
import io.druid.data.input.impl.SpatialDimensionSchema;
import io.druid.data.input.impl.TimeAndDimsParseSpec;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.guice.GuiceAnnotationIntrospector;
import io.druid.guice.GuiceInjectableValues;
import io.druid.guice.GuiceInjectors;
import io.druid.indexing.common.SegmentLoaderFactory;
import io.druid.indexing.common.TaskToolboxFactory;
import io.druid.indexing.common.TestUtils;
import io.druid.indexing.common.actions.LocalTaskActionClientFactory;
import io.druid.indexing.common.actions.TaskActionToolbox;
import io.druid.indexing.common.config.TaskConfig;
import io.druid.indexing.common.config.TaskStorageConfig;
import io.druid.indexing.common.task.NoopTask;
import io.druid.indexing.common.task.Task;
import io.druid.indexing.overlord.HeapMemoryTaskStorage;
import io.druid.indexing.overlord.TaskLockbox;
import io.druid.indexing.overlord.TaskStorage;
import io.druid.indexing.overlord.supervisor.SupervisorManager;
import io.druid.java.util.common.IOE;
import io.druid.java.util.common.Intervals;
import io.druid.java.util.common.JodaUtils;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.logger.Logger;
import io.druid.java.util.emitter.service.ServiceEmitter;
import io.druid.math.expr.ExprMacroTable;
import io.druid.metadata.IndexerSQLMetadataStorageCoordinator;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.filter.SelectorDimFilter;
import io.druid.segment.IndexIO;
import io.druid.segment.IndexMergerV9;
import io.druid.segment.IndexSpec;
import io.druid.segment.TestHelper;
import io.druid.segment.column.Column;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.loading.DataSegmentArchiver;
import io.druid.segment.loading.DataSegmentKiller;
import io.druid.segment.loading.DataSegmentMover;
import io.druid.segment.loading.DataSegmentPusher;
import io.druid.segment.loading.LocalDataSegmentPuller;
import io.druid.segment.loading.LocalLoadSpec;
import io.druid.segment.loading.SegmentLoaderConfig;
import io.druid.segment.loading.SegmentLoaderLocalCacheManager;
import io.druid.segment.loading.SegmentLoadingException;
import io.druid.segment.loading.StorageLocationConfig;
import io.druid.segment.realtime.firehose.IngestSegmentFirehose;
import io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory;
import io.druid.segment.transform.ExpressionTransform;
import io.druid.segment.transform.TransformSpec;
import io.druid.server.metrics.NoopServiceEmitter;
import io.druid.timeline.DataSegment;
import io.druid.timeline.TimelineObjectHolder;
import io.druid.timeline.partition.NumberedPartitionChunk;
import io.druid.timeline.partition.NumberedShardSpec;
import io.druid.timeline.partition.PartitionChunk;
import io.druid.timeline.partition.PartitionHolder;
import org.easymock.EasyMock;
import org.joda.time.Interval;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
*
*/
@RunWith(Parameterized.class)
public class IngestSegmentFirehoseFactoryTest
{
private static final ObjectMapper MAPPER;
private static final IndexMergerV9 INDEX_MERGER_V9;
private static final IndexIO INDEX_IO;
private static final TaskStorage TASK_STORAGE;
private static final TaskLockbox TASK_LOCKBOX;
private static final Task TASK;
static {
TestUtils testUtils = new TestUtils();
MAPPER = setupInjectablesInObjectMapper(TestHelper.makeJsonMapper());
INDEX_MERGER_V9 = testUtils.getTestIndexMergerV9();
INDEX_IO = testUtils.getTestIndexIO();
TASK_STORAGE = new HeapMemoryTaskStorage(
new TaskStorageConfig(null)
{
}
);
TASK_LOCKBOX = new TaskLockbox(TASK_STORAGE);
TASK = NoopTask.create();
TASK_LOCKBOX.add(TASK);
}
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder() throws IOException
{
final IndexSpec indexSpec = new IndexSpec();
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder()
.withMinTimestamp(JodaUtils.MIN_INSTANT)
.withDimensionsSpec(ROW_PARSER)
.withMetrics(
new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME),
new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME)
)
.build();
final IncrementalIndex index = new IncrementalIndex.Builder()
.setIndexSchema(schema)
.setMaxRowCount(MAX_ROWS * MAX_SHARD_NUMBER)
.buildOnheap();
for (Integer i = 0; i < MAX_ROWS; ++i) {
index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0));
}
if (!persistDir.mkdirs() && !persistDir.exists()) {
throw new IOE("Could not create directory at [%s]", persistDir.getAbsolutePath());
}
INDEX_MERGER_V9.persist(index, persistDir, indexSpec, null);
final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null)
{
final private Set<DataSegment> published = Sets.newHashSet();
final private Set<DataSegment> nuked = Sets.newHashSet();
@Override
public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval) throws IOException
{
return ImmutableList.copyOf(segmentSet);
}
@Override
public List<DataSegment> getUsedSegmentsForIntervals(String dataSource, List<Interval> interval)
throws IOException
{
return ImmutableList.copyOf(segmentSet);
}
@Override
public List<DataSegment> getUnusedSegmentsForInterval(String dataSource, Interval interval)
{
return ImmutableList.of();
}
@Override
public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments)
{
Set<DataSegment> added = Sets.newHashSet();
for (final DataSegment segment : segments) {
if (published.add(segment)) {
added.add(segment);
}
}
return ImmutableSet.copyOf(added);
}
@Override
public void deleteSegments(Set<DataSegment> segments)
{
nuked.addAll(segments);
}
};
final LocalTaskActionClientFactory tac = new LocalTaskActionClientFactory(
TASK_STORAGE,
new TaskActionToolbox(TASK_LOCKBOX, mdc, newMockEmitter(), EasyMock.createMock(SupervisorManager.class))
);
SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
EasyMock.replay(notifierFactory);
SegmentLoaderConfig segmentLoaderConfig = new SegmentLoaderConfig()
{
@Override
public List<StorageLocationConfig> getLocations()
{
return Lists.newArrayList();
}
};
final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory(
new TaskConfig(tmpDir.getAbsolutePath(), null, null, 50000, null, false, null, null),
tac,
newMockEmitter(),
new DataSegmentPusher()
{
@Deprecated
@Override
public String getPathForHadoop(String dataSource)
{
return getPathForHadoop();
}
@Override
public String getPathForHadoop()
{
throw new UnsupportedOperationException();
}
@Override
public DataSegment push(File file, DataSegment segment, boolean useUniquePath)
{
return segment;
}
@Override
public Map<String, Object> makeLoadSpec(URI uri)
{
throw new UnsupportedOperationException();
}
},
new DataSegmentKiller()
{
@Override
public void kill(DataSegment segments) throws SegmentLoadingException
{
}
@Override
public void killAll() throws IOException
{
throw new UnsupportedOperationException("not implemented");
}
},
new DataSegmentMover()
{
@Override
public DataSegment move(DataSegment dataSegment, Map<String, Object> targetLoadSpec)
throws SegmentLoadingException
{
return dataSegment;
}
},
new DataSegmentArchiver()
{
@Override
public DataSegment archive(DataSegment segment) throws SegmentLoadingException
{
return segment;
}
@Override
public DataSegment restore(DataSegment segment) throws SegmentLoadingException
{
return segment;
}
},
null, // segment announcer
null,
notifierFactory,
null, // query runner factory conglomerate corporation unionized collective
null, // query executor service
null, // monitor scheduler
new SegmentLoaderFactory(
new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, MAPPER)
),
MAPPER,
INDEX_IO,
null,
null,
INDEX_MERGER_V9,
null,
null,
null,
null
);
Collection<Object[]> values = new LinkedList<>();
for (InputRowParser parser : Arrays.<InputRowParser>asList(
ROW_PARSER,
new MapInputRowParser(
new JSONParseSpec(
new TimestampSpec(TIME_COLUMN, "auto", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(ImmutableList.<String>of()),
ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME),
ImmutableList.<SpatialDimensionSchema>of()
),
null,
null
)
)
)) {
for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
for (List<String> metric_names : Arrays.<List<String>>asList(
null,
ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME)
)) {
final IngestSegmentFirehoseFactory factory = new IngestSegmentFirehoseFactory(
TASK.getDataSource(),
Intervals.ETERNITY,
new SelectorDimFilter(DIM_NAME, DIM_VALUE, null),
dim_names,
metric_names,
INDEX_IO
);
factory.setTaskToolbox(taskToolboxFactory.build(TASK));
values.add(
new Object[]{
StringUtils.format(
"DimNames[%s]MetricNames[%s]ParserDimNames[%s]",
dim_names == null ? "null" : "dims",
metric_names == null ? "null" : "metrics",
parser == ROW_PARSER ? "dims" : "null"
),
factory,
parser
}
);
}
}
}
return values;
}
public static ObjectMapper setupInjectablesInObjectMapper(ObjectMapper objectMapper)
{
objectMapper.registerModule(
new SimpleModule("testModule").registerSubtypes(LocalLoadSpec.class)
);
final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector();
objectMapper.setAnnotationIntrospectors(
new AnnotationIntrospectorPair(
guiceIntrospector, objectMapper.getSerializationConfig().getAnnotationIntrospector()
),
new AnnotationIntrospectorPair(
guiceIntrospector, objectMapper.getDeserializationConfig().getAnnotationIntrospector()
)
);
objectMapper.setInjectableValues(
new GuiceInjectableValues(
GuiceInjectors.makeStartupInjectorWithModules(
ImmutableList.of(
new Module()
{
@Override
public void configure(Binder binder)
{
binder.bind(LocalDataSegmentPuller.class);
}
}
)
)
)
);
return objectMapper;
}
public IngestSegmentFirehoseFactoryTest(
String testName,
IngestSegmentFirehoseFactory factory,
InputRowParser rowParser
)
{
this.factory = factory;
// Must decorate the parser, since IngestSegmentFirehoseFactory will undecorate it.
this.rowParser = TransformSpec.NONE.decorate(rowParser);
}
private static final Logger log = new Logger(IngestSegmentFirehoseFactoryTest.class);
private static final String DATA_SOURCE_NAME = "testDataSource";
private static final String DATA_SOURCE_VERSION = "version";
private static final Integer BINARY_VERSION = -1;
private static final String DIM_NAME = "testDimName";
private static final String DIM_VALUE = "testDimValue";
private static final String DIM_LONG_NAME = "testDimLongName";
private static final String DIM_FLOAT_NAME = "testDimFloatName";
private static final String METRIC_LONG_NAME = "testLongMetric";
private static final String METRIC_FLOAT_NAME = "testFloatMetric";
private static final Long METRIC_LONG_VALUE = 1L;
private static final Float METRIC_FLOAT_VALUE = 1.0f;
private static final String TIME_COLUMN = "ts";
private static final Integer MAX_SHARD_NUMBER = 10;
private static final Integer MAX_ROWS = 10;
private static final File tmpDir = Files.createTempDir();
private static final File persistDir = Paths.get(tmpDir.getAbsolutePath(), "indexTestMerger").toFile();
private static final List<DataSegment> segmentSet = new ArrayList<>(MAX_SHARD_NUMBER);
private final IngestSegmentFirehoseFactory factory;
private final InputRowParser rowParser;
private static final InputRowParser<Map<String, Object>> ROW_PARSER = new MapInputRowParser(
new TimeAndDimsParseSpec(
new TimestampSpec(TIME_COLUMN, "auto", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(ImmutableList.of(DIM_NAME)),
ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME),
ImmutableList.of()
)
)
);
private static Map<String, Object> buildRow(Long ts)
{
return ImmutableMap.<String, Object>of(
TIME_COLUMN, ts,
DIM_NAME, DIM_VALUE,
DIM_FLOAT_NAME, METRIC_FLOAT_VALUE,
DIM_LONG_NAME, METRIC_LONG_VALUE
);
}
private static DataSegment buildSegment(Integer shardNumber)
{
Preconditions.checkArgument(shardNumber < MAX_SHARD_NUMBER);
Preconditions.checkArgument(shardNumber >= 0);
return new DataSegment(
DATA_SOURCE_NAME,
Intervals.ETERNITY,
DATA_SOURCE_VERSION,
ImmutableMap.<String, Object>of(
"type", "local",
"path", persistDir.getAbsolutePath()
),
ImmutableList.of(DIM_NAME),
ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME),
new NumberedShardSpec(
shardNumber,
MAX_SHARD_NUMBER
),
BINARY_VERSION,
0L
);
}
@BeforeClass
public static void setUpStatic() throws IOException, InterruptedException
{
for (int i = 0; i < MAX_SHARD_NUMBER; ++i) {
segmentSet.add(buildSegment(i));
}
}
@AfterClass
public static void tearDownStatic()
{
recursivelyDelete(tmpDir);
}
private static void recursivelyDelete(final File dir)
{
if (dir != null) {
if (dir.isDirectory()) {
final File[] files = dir.listFiles();
if (files != null) {
for (File file : files) {
recursivelyDelete(file);
}
}
} else {
if (!dir.delete()) {
log.warn("Could not delete file at [%s]", dir.getAbsolutePath());
}
}
}
}
@Test
public void sanityTest()
{
Assert.assertEquals(TASK.getDataSource(), factory.getDataSource());
if (factory.getDimensions() != null) {
Assert.assertArrayEquals(new String[]{DIM_NAME}, factory.getDimensions().toArray());
}
Assert.assertEquals(Intervals.ETERNITY, factory.getInterval());
if (factory.getMetrics() != null) {
Assert.assertEquals(
ImmutableSet.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME),
ImmutableSet.copyOf(factory.getMetrics())
);
}
}
@Test
public void simpleFirehoseReadingTest() throws IOException
{
Assert.assertEquals(MAX_SHARD_NUMBER.longValue(), segmentSet.size());
Integer rowcount = 0;
try (final IngestSegmentFirehose firehose =
(IngestSegmentFirehose)
factory.connect(rowParser, null)) {
while (firehose.hasMore()) {
InputRow row = firehose.nextRow();
Assert.assertArrayEquals(new String[]{DIM_NAME}, row.getDimensions().toArray());
Assert.assertArrayEquals(new String[]{DIM_VALUE}, row.getDimension(DIM_NAME).toArray());
Assert.assertEquals(METRIC_LONG_VALUE.longValue(), row.getMetric(METRIC_LONG_NAME));
Assert.assertEquals(
METRIC_FLOAT_VALUE,
row.getMetric(METRIC_FLOAT_NAME).floatValue(),
METRIC_FLOAT_VALUE * 0.0001
);
++rowcount;
}
}
Assert.assertEquals((int) MAX_SHARD_NUMBER * MAX_ROWS, (int) rowcount);
}
@Test
public void testTransformSpec() throws IOException
{
Assert.assertEquals(MAX_SHARD_NUMBER.longValue(), segmentSet.size());
Integer rowcount = 0;
final TransformSpec transformSpec = new TransformSpec(
new SelectorDimFilter(Column.TIME_COLUMN_NAME, "1", null),
ImmutableList.of(
new ExpressionTransform(METRIC_FLOAT_NAME, METRIC_FLOAT_NAME + " * 10", ExprMacroTable.nil())
)
);
int skipped = 0;
try (final IngestSegmentFirehose firehose =
(IngestSegmentFirehose)
factory.connect(transformSpec.decorate(rowParser), null)) {
while (firehose.hasMore()) {
InputRow row = firehose.nextRow();
if (row == null) {
skipped++;
continue;
}
Assert.assertArrayEquals(new String[]{DIM_NAME}, row.getDimensions().toArray());
Assert.assertArrayEquals(new String[]{DIM_VALUE}, row.getDimension(DIM_NAME).toArray());
Assert.assertEquals(METRIC_LONG_VALUE.longValue(), row.getMetric(METRIC_LONG_NAME).longValue());
Assert.assertEquals(
METRIC_FLOAT_VALUE * 10,
row.getMetric(METRIC_FLOAT_NAME).floatValue(),
METRIC_FLOAT_VALUE * 0.0001
);
++rowcount;
}
}
Assert.assertEquals(90, skipped);
Assert.assertEquals((int) MAX_ROWS, (int) rowcount);
}
@Test
public void testGetUniqueDimensionsAndMetrics()
{
final int numSegmentsPerPartitionChunk = 5;
final int numPartitionChunksPerTimelineObject = 10;
final int numSegments = numSegmentsPerPartitionChunk * numPartitionChunksPerTimelineObject;
final List<DataSegment> segments = new ArrayList<>(numSegments);
final Interval interval = Intervals.of("2017-01-01/2017-01-02");
final String version = "1";
final List<TimelineObjectHolder<String, DataSegment>> timelineSegments = new ArrayList<>();
for (int i = 0; i < numPartitionChunksPerTimelineObject; i++) {
final List<PartitionChunk<DataSegment>> chunks = new ArrayList<>();
for (int j = 0; j < numSegmentsPerPartitionChunk; j++) {
final List<String> dims = IntStream.range(i, i + numSegmentsPerPartitionChunk)
.mapToObj(suffix -> "dim" + suffix)
.collect(Collectors.toList());
final List<String> metrics = IntStream.range(i, i + numSegmentsPerPartitionChunk)
.mapToObj(suffix -> "met" + suffix)
.collect(Collectors.toList());
final DataSegment segment = new DataSegment(
"ds",
interval,
version,
ImmutableMap.of(),
dims,
metrics,
new NumberedShardSpec(numPartitionChunksPerTimelineObject, i),
1,
1
);
segments.add(segment);
final PartitionChunk<DataSegment> partitionChunk = new NumberedPartitionChunk<>(
i,
numPartitionChunksPerTimelineObject,
segment
);
chunks.add(partitionChunk);
}
final TimelineObjectHolder<String, DataSegment> timelineHolder = new TimelineObjectHolder<>(
interval,
version,
new PartitionHolder<>(chunks)
);
timelineSegments.add(timelineHolder);
}
final String[] expectedDims = new String[]{
"dim9",
"dim10",
"dim11",
"dim12",
"dim13",
"dim8",
"dim7",
"dim6",
"dim5",
"dim4",
"dim3",
"dim2",
"dim1",
"dim0"
};
final String[] expectedMetrics = new String[]{
"met9",
"met10",
"met11",
"met12",
"met13",
"met8",
"met7",
"met6",
"met5",
"met4",
"met3",
"met2",
"met1",
"met0"
};
Assert.assertEquals(
Arrays.asList(expectedDims),
IngestSegmentFirehoseFactory.getUniqueDimensions(timelineSegments, null)
);
Assert.assertEquals(
Arrays.asList(expectedMetrics),
IngestSegmentFirehoseFactory.getUniqueMetrics(timelineSegments)
);
}
private static ServiceEmitter newMockEmitter()
{
return new NoopServiceEmitter();
}
}
| |
package biz.c24.io.training.statements;
import java.util.Arrays;
/**
* CurrencyAndAmount. <p/>
* This object is composed of the following <i>attribute</i>:
* <ul>
* <li><b>Ccy</b> of type {@link java.lang.String} (required)</li>
* </ul>
* @author C24 Integration Objects;
* @see biz.c24.io.training.statements.CurrencyAndAmountClass
**/
public class CurrencyAndAmount extends biz.c24.io.api.data.ComplexDataObject
{
private static final java.lang.String[] NATIVE_ATTRIBUTES = new String[] {"Ccy"};
private java.lang.String ccy;
/**
* Constructs a new instance defined by the default element.
**/
public CurrencyAndAmount()
{
this(biz.c24.io.training.statements.CurrencyAndAmountClass.getInstance().getNullDefiningElementDecl());
}
/**
* Constructs a new instance defined by the specified element.
* @param definingElementDecl The element which defines the object.
**/
public CurrencyAndAmount(biz.c24.io.api.data.Element definingElementDecl)
{
super(definingElementDecl);
}
/**
* Constructs a new instance defined by the specified element and type.
* @param definingElementDecl The element which defines the object.
* @param type The type which defines the object.
**/
public CurrencyAndAmount(biz.c24.io.api.data.Element definingElementDecl, biz.c24.io.api.data.ComplexDataType type)
{
super(definingElementDecl, type);
}
/**
* Constructs a new instance cloned from the specified object.
* @param clone The object to be cloned.
**/
public CurrencyAndAmount(biz.c24.io.training.statements.CurrencyAndAmount clone)
{
super(clone);
}
/**
* Creates and returns a shallow clone of this object.
* @see #cloneDeep()
**/
public java.lang.Object clone()
{
return new biz.c24.io.training.statements.CurrencyAndAmount(this);
}
/**
* Creates and returns a deep clone of this object.
* @return The new object.
* @see #clone()
**/
public biz.c24.io.api.data.ComplexDataObject cloneDeep() throws java.lang.CloneNotSupportedException
{
biz.c24.io.api.data.ComplexDataObject obj = new biz.c24.io.training.statements.CurrencyAndAmount(this);
cloneDeep(obj);
return obj;
}
protected void cloneDeep(biz.c24.io.api.data.ComplexDataObject clone) throws java.lang.CloneNotSupportedException
{
biz.c24.io.training.statements.CurrencyAndAmount obj = (biz.c24.io.training.statements.CurrencyAndAmount) clone;
obj.ccy = (java.lang.String) biz.c24.io.api.Utils.cloneDeep(this.ccy, obj, "Ccy");
}
public boolean equals(java.lang.Object obj)
{
if(obj instanceof biz.c24.io.api.data.ComplexDataObject)
return equalContents((biz.c24.io.api.data.ComplexDataObject) obj, true, true, true, true);
else
return obj.equals(this);
}
/**
* Gets the attribute called <code>name</code>.<p>
* The legal value(s) for <code>name</code> are: <b>ccy</b>.
**/
public java.lang.Object getAttr(java.lang.String name)
{
switch (Arrays.binarySearch(NATIVE_ATTRIBUTES, name))
{
case 0:
return this.ccy;
default:
return super.getAttr(name);
}
}
/**
* Gets the value of Ccy (required).
**/
public java.lang.String getCcy()
{
return this.ccy;
}
public int getTotalAttrCount()
{
int count = super.getTotalAttrCount();
count += this.ccy == null ? 0 : 1;
return count;
}
/**
* Get content.
* @return The value.
**/
public float getValue()
{
return biz.c24.io.api.Utils.floatValue(getContent());
}
public int hashCode()
{
return this.toString().length();
}
/**
* Returns whether the attribute called <code>name</code> is present.<p>
* The legal value(s) for <code>name</code> are defined in {@link #getAttr}.
**/
public boolean isAttrPresent(java.lang.String name)
{
switch (Arrays.binarySearch(NATIVE_ATTRIBUTES, name))
{
case 0:
return this.ccy == null ? false : true;
default:
return super.isAttrPresent(name);
}
}
/**
* Removes the attribute called <code>name</code>.<p>
* The legal value(s) for <code>name</code> are defined in {@link #getAttr}.
**/
public void removeAttr(java.lang.String name)
{
switch (Arrays.binarySearch(NATIVE_ATTRIBUTES, name))
{
case 0:
this.ccy = null;
return;
default:
super.removeAttr(name);
}
}
/**
* Sets the attribute called <code>name</code> to <code>value<code>.<p>
* The legal value(s) for <code>name</code> are defined in {@link #getAttr}.
**/
public void setAttr(java.lang.String name, java.lang.Object value)
{
switch (Arrays.binarySearch(NATIVE_ATTRIBUTES, name))
{
case 0:
setCcy((java.lang.String) value);
return;
default:
super.setAttr(name, value);
}
}
/**
* Sets the value of Ccy (required).
* @param value The value to use.
**/
public void setCcy(java.lang.String value)
{
this.ccy = value;
}
public void setContent(java.lang.Object value)
{
if (value instanceof java.lang.Float || value == null)
super.setContent(value);
else
throw new ClassCastException("Expecting instance of float");
}
/**
* Set content.
* @param value The value to use.
**/
public void setValue(float value)
{
setContent(new java.lang.Float((float) value));
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
{
out.writeObject(this.ccy);
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException
{
this.ccy = (java.lang.String) in.readObject();
}
}
| |
/*
* Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sun.org.apache.xml.internal.security.keys.keyresolver;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import javax.crypto.SecretKey;
import com.sun.org.apache.xml.internal.security.keys.storage.StorageResolver;
import org.w3c.dom.Element;
/**
* This class is an abstract class for a child KeyInfo Element.
*
* If you want the your KeyResolver, at firstly you must extend this class, and register
* as following in config.xml
* <PRE>
* <KeyResolver URI="http://www.w3.org/2000/09/xmldsig#KeyValue"
* JAVACLASS="MyPackage.MyKeyValueImpl"//gt;
* </PRE>
*/
public abstract class KeyResolverSpi {
/** Field properties */
protected java.util.Map<String, String> properties = null;
protected boolean globalResolver = false;
protected boolean secureValidation;
/**
* Set whether secure validation is enabled or not. The default is false.
*/
public void setSecureValidation(boolean secureValidation) {
this.secureValidation = secureValidation;
}
/**
* This method returns whether the KeyResolverSpi is able to perform the requested action.
*
* @param element
* @param baseURI
* @param storage
* @return whether the KeyResolverSpi is able to perform the requested action.
*/
public boolean engineCanResolve(Element element, String baseURI, StorageResolver storage) {
throw new UnsupportedOperationException();
}
/**
* Method engineResolvePublicKey
*
* @param element
* @param baseURI
* @param storage
* @return resolved public key from the registered from the element.
*
* @throws KeyResolverException
*/
public PublicKey engineResolvePublicKey(
Element element, String baseURI, StorageResolver storage
) throws KeyResolverException {
throw new UnsupportedOperationException();
};
/**
* Method engineLookupAndResolvePublicKey
*
* @param element
* @param baseURI
* @param storage
* @return resolved public key from the registered from the element.
*
* @throws KeyResolverException
*/
public PublicKey engineLookupAndResolvePublicKey(
Element element, String baseURI, StorageResolver storage
) throws KeyResolverException {
KeyResolverSpi tmp = cloneIfNeeded();
if (!tmp.engineCanResolve(element, baseURI, storage)) {
return null;
}
return tmp.engineResolvePublicKey(element, baseURI, storage);
}
private KeyResolverSpi cloneIfNeeded() throws KeyResolverException {
KeyResolverSpi tmp = this;
if (globalResolver) {
try {
tmp = getClass().newInstance();
} catch (InstantiationException e) {
throw new KeyResolverException("", e);
} catch (IllegalAccessException e) {
throw new KeyResolverException("", e);
}
}
return tmp;
}
/**
* Method engineResolveCertificate
*
* @param element
* @param baseURI
* @param storage
* @return resolved X509Certificate key from the registered from the elements
*
* @throws KeyResolverException
*/
public X509Certificate engineResolveX509Certificate(
Element element, String baseURI, StorageResolver storage
) throws KeyResolverException{
throw new UnsupportedOperationException();
};
/**
* Method engineLookupResolveX509Certificate
*
* @param element
* @param baseURI
* @param storage
* @return resolved X509Certificate key from the registered from the elements
*
* @throws KeyResolverException
*/
public X509Certificate engineLookupResolveX509Certificate(
Element element, String baseURI, StorageResolver storage
) throws KeyResolverException {
KeyResolverSpi tmp = cloneIfNeeded();
if (!tmp.engineCanResolve(element, baseURI, storage)) {
return null;
}
return tmp.engineResolveX509Certificate(element, baseURI, storage);
}
/**
* Method engineResolveSecretKey
*
* @param element
* @param baseURI
* @param storage
* @return resolved SecretKey key from the registered from the elements
*
* @throws KeyResolverException
*/
public SecretKey engineResolveSecretKey(
Element element, String baseURI, StorageResolver storage
) throws KeyResolverException{
throw new UnsupportedOperationException();
};
/**
* Method engineLookupAndResolveSecretKey
*
* @param element
* @param baseURI
* @param storage
* @return resolved SecretKey key from the registered from the elements
*
* @throws KeyResolverException
*/
public SecretKey engineLookupAndResolveSecretKey(
Element element, String baseURI, StorageResolver storage
) throws KeyResolverException {
KeyResolverSpi tmp = cloneIfNeeded();
if (!tmp.engineCanResolve(element, baseURI, storage)) {
return null;
}
return tmp.engineResolveSecretKey(element, baseURI, storage);
}
/**
* Method engineLookupAndResolvePrivateKey
*
* @param element
* @param baseURI
* @param storage
* @return resolved PrivateKey key from the registered from the elements
*
* @throws KeyResolverException
*/
public PrivateKey engineLookupAndResolvePrivateKey(
Element element, String baseURI, StorageResolver storage
) throws KeyResolverException {
// This method was added later, it has no equivalent
// engineResolvePrivateKey() in the old API.
// We cannot throw UnsupportedOperationException because
// KeyResolverSpi implementations who don't know about
// this method would stop the search too early.
return null;
}
/**
* Method engineSetProperty
*
* @param key
* @param value
*/
public void engineSetProperty(String key, String value) {
if (properties == null) {
properties = new HashMap<String, String>();
}
properties.put(key, value);
}
/**
* Method engineGetProperty
*
* @param key
* @return obtain the property appointed by key
*/
public String engineGetProperty(String key) {
if (properties == null) {
return null;
}
return properties.get(key);
}
/**
* Method understandsProperty
*
* @param propertyToTest
* @return true if understood the property
*/
public boolean understandsProperty(String propertyToTest) {
if (properties == null) {
return false;
}
return properties.get(propertyToTest) != null;
}
public void setGlobalResolver(boolean globalResolver) {
this.globalResolver = globalResolver;
}
}
| |
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.players;
import org.lwjgl.input.Mouse;
import org.terasology.config.Config;
import org.terasology.engine.GameEngine;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.entitySystem.event.EventPriority;
import org.terasology.entitySystem.event.ReceiveEvent;
import org.terasology.entitySystem.systems.BaseComponentSystem;
import org.terasology.entitySystem.systems.RegisterMode;
import org.terasology.entitySystem.systems.RegisterSystem;
import org.terasology.input.Keyboard;
import org.terasology.input.binds.general.HideHUDButton;
import org.terasology.input.events.KeyDownEvent;
import org.terasology.input.events.KeyEvent;
import org.terasology.input.events.MouseXAxisEvent;
import org.terasology.input.events.MouseYAxisEvent;
import org.terasology.logic.characters.CharacterComponent;
import org.terasology.logic.console.ConsoleMessageEvent;
import org.terasology.logic.debug.DebugProperties;
import org.terasology.logic.health.DoDamageEvent;
import org.terasology.network.ClientComponent;
import org.terasology.registry.In;
import org.terasology.rendering.nui.NUIManager;
import org.terasology.rendering.nui.layers.ingame.metrics.DebugOverlay;
import org.terasology.rendering.world.ViewDistance;
import org.terasology.rendering.world.WorldRenderer;
import org.terasology.world.WorldProvider;
/**
* @author Benjamin Glatzel
* @author Immortius
*/
@RegisterSystem(RegisterMode.CLIENT)
public class DebugControlSystem extends BaseComponentSystem {
@In
private GameEngine engine;
@In
private WorldProvider world;
@In
private WorldRenderer worldRenderer;
@In
private Config config;
@In
private NUIManager nuiManager;
private DebugOverlay overlay;
private boolean mouseGrabbed = true;
@Override
public void initialise() {
overlay = nuiManager.addOverlay("engine:debugOverlay", DebugOverlay.class);
}
@ReceiveEvent(components = ClientComponent.class)
public void onHideHUD(HideHUDButton event, EntityRef entity) {
if (event.isDown()) {
// Make sure both are either visible or hidden
final boolean hide = !(config.getRendering().getDebug().isHudHidden() && config.getRendering().getDebug().isFirstPersonElementsHidden());
config.getRendering().getDebug().setFirstPersonElementsHidden(hide);
config.getRendering().getDebug().setHudHidden(hide);
event.consume();
}
}
@ReceiveEvent(components = ClientComponent.class)
public void onIncreaseViewDistance(IncreaseViewDistanceButton button, EntityRef entity) {
int viewDistance = config.getRendering().getViewDistance().getIndex();
int maxViewDistance = ViewDistance.values().length - 1;
if (viewDistance != maxViewDistance) {
config.getRendering().setViewDistance(ViewDistance.forIndex((config.getRendering().getViewDistance().getIndex() + 1)));
}
button.consume();
}
@ReceiveEvent(components = ClientComponent.class)
public void onDecreaseViewDistance(DecreaseViewDistanceButton button, EntityRef entity) {
int viewDistance = config.getRendering().getViewDistance().getIndex();
int minViewDistance = 0;
if (viewDistance != minViewDistance) {
config.getRendering().setViewDistance(ViewDistance.forIndex((config.getRendering().getViewDistance().getIndex() - 1)));
}
button.consume();
}
@ReceiveEvent(components = ClientComponent.class)
public void onKeyEvent(KeyEvent event, EntityRef entity) {
boolean debugEnabled = config.getSystem().isDebugEnabled();
// Features for debug mode only
if (debugEnabled && event.isDown()) {
switch (event.getKey().getId()) {
case Keyboard.KeyId.UP:
world.getTime().setDays(world.getTime().getDays() + 0.005f);
event.consume();
break;
case Keyboard.KeyId.DOWN:
world.getTime().setDays(world.getTime().getDays() - 0.005f);
event.consume();
break;
case Keyboard.KeyId.RIGHT:
world.getTime().setDays(world.getTime().getDays() + 0.02f);
event.consume();
break;
case Keyboard.KeyId.LEFT:
world.getTime().setDays(world.getTime().getDays() - 0.02f);
event.consume();
break;
default:
break;
}
}
}
@ReceiveEvent(components = ClientComponent.class)
public void onKeyDown(KeyDownEvent event, EntityRef entity) {
boolean debugEnabled = config.getSystem().isDebugEnabled();
// Features for debug mode only
if (debugEnabled) {
switch (event.getKey().getId()) {
case Keyboard.KeyId.K:
entity.send(new DoDamageEvent(9999, null));
break;
case Keyboard.KeyId.F6:
config.getRendering().getDebug().setEnabled(!config.getRendering().getDebug().isEnabled());
event.consume();
break;
case Keyboard.KeyId.F7:
config.getRendering().getDebug().cycleStage();
entity.send(new ConsoleMessageEvent("Set debug stage to: " + config.getRendering().getDebug().getStage()));
event.consume();
break;
case Keyboard.KeyId.F8:
config.getRendering().getDebug().setRenderChunkBoundingBoxes(!config.getRendering().getDebug().isRenderChunkBoundingBoxes());
event.consume();
break;
case Keyboard.KeyId.F9:
config.getRendering().getDebug().setWireframe(!config.getRendering().getDebug().isWireframe());
event.consume();
break;
default:
break;
}
}
switch (event.getKey().getId()) {
case Keyboard.KeyId.F1:
mouseGrabbed = !mouseGrabbed;
DebugProperties debugProperties = (DebugProperties) nuiManager.getHUD().getHUDElement("engine:DebugProperties");
debugProperties.setVisible(!mouseGrabbed);
Mouse.setGrabbed(mouseGrabbed);
event.consume();
break;
case Keyboard.KeyId.F3:
config.getSystem().setDebugEnabled(!config.getSystem().isDebugEnabled());
event.consume();
break;
case Keyboard.KeyId.F4:
overlay.toggleMetricsMode();
event.consume();
break;
default:
break;
}
}
@ReceiveEvent(components = CharacterComponent.class, priority = EventPriority.PRIORITY_HIGH)
public void onMouseX(MouseXAxisEvent event, EntityRef entity) {
if (!mouseGrabbed) {
event.consume();
}
}
@ReceiveEvent(components = CharacterComponent.class, priority = EventPriority.PRIORITY_HIGH)
public void onMouseY(MouseYAxisEvent event, EntityRef entity) {
if (!mouseGrabbed) {
event.consume();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.utils;
/**
* This Code is adapted from main/java/org/apache/mahout/math/Varint.java
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
/**
* <p>
* Encodes signed and unsigned values using a common variable-length scheme,
* found for example in <a
* href="http://code.google.com/apis/protocolbuffers/docs/encoding.html">
* Google's Protocol Buffers</a>. It uses fewer bytes to encode smaller values,
* but will use slightly more bytes to encode large values.
* </p>
* <p/>
* <p>
* Signed values are further encoded using so-called zig-zag encoding in order
* to make them "compatible" with variable-length encoding.
* </p>
*/
public final class Varint {
/**
* private constructor
*/
private Varint() {
}
/**
* Encodes a value using the variable-length encoding from <a
* href="http://code.google.com/apis/protocolbuffers/docs/encoding.html">
* Google Protocol Buffers</a>.
*
* @param value to encode
* @param out to write bytes to
* @throws IOException
* if {@link DataOutput} throws {@link IOException}
*/
private static void writeVarLong(
long value,
DataOutput out
) throws IOException {
while (true) {
int bits = ((int) value) & 0x7f;
value >>>= 7;
if (value == 0) {
out.writeByte((byte) bits);
return;
}
out.writeByte((byte) (bits | 0x80));
}
}
/**
* Encodes a value using the variable-length encoding from <a
* href="http://code.google.com/apis/protocolbuffers/docs/encoding.html">
* Google Protocol Buffers</a>.
*
* @param value to encode
* @param out to write bytes to
* @throws IOException
* if {@link DataOutput} throws {@link IOException}
*/
public static void writeUnsignedVarLong(
long value,
DataOutput out
) throws IOException {
if (value < 0) {
throw new IllegalStateException(
"Negative value passed into writeUnsignedVarLong - " + value);
}
writeVarLong(value, out);
}
/**
* Zig-zag encoding for signed longs
*
* @param value to encode
* @param out to write bytes to
* @throws IOException
* if {@link DataOutput} throws {@link IOException}
*/
public static void writeSignedVarLong(
long value,
DataOutput out
) throws IOException {
writeVarLong((value << 1) ^ (value >> 63), out);
}
/**
* @see #writeVarLong(long, DataOutput)
* @param value to encode
* @param out to write bytes to
* @throws IOException
*/
private static void writeVarInt(
int value,
DataOutput out
) throws IOException {
while (true) {
int bits = value & 0x7f;
value >>>= 7;
if (value == 0) {
out.writeByte((byte) bits);
return;
}
out.writeByte((byte) (bits | 0x80));
}
}
/**
* @see #writeVarLong(long, DataOutput)
* @param value to encode
* @param out to write bytes to
* @throws IOException
*/
public static void writeUnsignedVarInt(
int value,
DataOutput out
) throws IOException {
if (value < 0) {
throw new IllegalStateException(
"Negative value passed into writeUnsignedVarInt - " + value);
}
writeVarInt(value, out);
}
/**
* Zig-zag encoding for signed ints
*
* @see #writeUnsignedVarInt(int, DataOutput)
* @param value to encode
* @param out to write bytes to
* @throws IOException
*/
public static void writeSignedVarInt(
int value,
DataOutput out
) throws IOException {
writeVarInt((value << 1) ^ (value >> 31), out);
}
/**
* @param in to read bytes from
* @return decode value
* @throws IOException
* if {@link DataInput} throws {@link IOException}
*/
public static long readUnsignedVarLong(DataInput in) throws IOException {
long tmp;
// CHECKSTYLE: stop InnerAssignment
if ((tmp = in.readByte()) >= 0) {
return tmp;
}
long result = tmp & 0x7f;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 7;
} else {
result |= (tmp & 0x7f) << 7;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 14;
} else {
result |= (tmp & 0x7f) << 14;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 21;
} else {
result |= (tmp & 0x7f) << 21;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 28;
} else {
result |= (tmp & 0x7f) << 28;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 35;
} else {
result |= (tmp & 0x7f) << 35;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 42;
} else {
result |= (tmp & 0x7f) << 42;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 49;
} else {
result |= (tmp & 0x7f) << 49;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 56;
} else {
result |= (tmp & 0x7f) << 56;
result |= ((long) in.readByte()) << 63;
}
}
}
}
}
}
}
}
// CHECKSTYLE: resume InnerAssignment
return result;
}
/**
* @param in to read bytes from
* @return decode value
* @throws IOException
* if {@link DataInput} throws {@link IOException}
*/
public static long readSignedVarLong(DataInput in) throws IOException {
long raw = readUnsignedVarLong(in);
long temp = (((raw << 63) >> 63) ^ raw) >> 1;
return temp ^ (raw & (1L << 63));
}
/**
* @throws IOException
* if {@link DataInput} throws {@link IOException}
* @param in to read bytes from
* @return decode value
*/
public static int readUnsignedVarInt(DataInput in) throws IOException {
int tmp;
// CHECKSTYLE: stop InnerAssignment
if ((tmp = in.readByte()) >= 0) {
return tmp;
}
int result = tmp & 0x7f;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 7;
} else {
result |= (tmp & 0x7f) << 7;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 14;
} else {
result |= (tmp & 0x7f) << 14;
if ((tmp = in.readByte()) >= 0) {
result |= tmp << 21;
} else {
result |= (tmp & 0x7f) << 21;
result |= (in.readByte()) << 28;
}
}
}
// CHECKSTYLE: resume InnerAssignment
return result;
}
/**
* @throws IOException
* if {@link DataInput} throws {@link IOException}
* @param in to read bytes from
* @return decode value
*/
public static int readSignedVarInt(DataInput in) throws IOException {
int raw = readUnsignedVarInt(in);
int temp = (((raw << 31) >> 31) ^ raw) >> 1;
return temp ^ (raw & (1 << 31));
}
/**
* Simulation for what will happen when writing an unsigned long value
* as varlong.
* @param value to consider
* @return the number of bytes needed to write value.
* @throws IOException
*/
public static long sizeOfUnsignedVarLong(long value) throws IOException {
int result = 0;
do {
result++;
value >>>= 7;
} while (value != 0);
return result;
}
/**
* Simulation for what will happen when writing a signed long value
* as varlong.
* @param value to consider
* @return the number of bytes needed to write value.
* @throws IOException
*/
public static long sizeOfSignedVarLong(long value) throws IOException {
return sizeOfUnsignedVarLong((value << 1) ^ (value >> 63));
}
/**
* Simulation for what will happen when writing an unsigned int value
* as varint.
* @param value to consider
* @return the number of bytes needed to write value.
* @throws IOException
*/
public static int sizeOfUnsignedVarInt(int value) throws IOException {
int cnt = 0;
do {
cnt++;
value >>>= 7;
} while (value != 0);
return cnt;
}
/**
* Simulation for what will happen when writing a signed int value
* as varint.
* @param value to consider
* @return the number of bytes needed to write value.
* @throws IOException
*/
public static int sizeOfSignedVarInt(int value) throws IOException {
return sizeOfUnsignedVarInt((value << 1) ^ (value >> 31));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.guice.testing;
import java.lang.reflect.Modifier;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.base.Preconditions;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import org.apache.camel.guice.inject.Injectors;
import org.apache.camel.guice.support.CloseErrors;
import org.apache.camel.guice.support.CloseFailedException;
import org.apache.camel.guice.support.internal.CloseErrorsImpl;
import org.apache.camel.guice.util.CloseableScope;
/**
* Used to manage the injectors for the various injection points
*/
public class InjectorManager {
private static final String NESTED_MODULE_CLASS = "TestModule";
private Map<Object, Injector> injectors = new ConcurrentHashMap<Object, Injector>();
private AtomicInteger initializeCounter = new AtomicInteger(0);
private CloseableScope testScope = new CloseableScope(TestScoped.class);
private CloseableScope classScope = new CloseableScope(ClassScoped.class);
private boolean closeSingletonsAfterClasses;
private boolean runFinalizer = true;
private Class<? extends Module> moduleType;
public void beforeClasses() {
int counter = initializeCounter.incrementAndGet();
if (counter > 1) {
System.out.println("WARNING! Initialised more than once! Counter: " + counter);
} else {
if (runFinalizer) {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
closeSingletons();
} catch (Throwable e) {
System.out.println("Failed to shut down Guice Singletons: " + e);
e.printStackTrace();
}
}
});
}
}
}
/** Lets close all of the injectors we have created so far */
public void afterClasses() throws CloseFailedException {
Injector injector = injectors.get(moduleType);
if (injector != null) {
classScope.close(injector);
} else {
System.out.println("Could not close Class scope as there is no Injector for module type");
}
// NOTE that we don't have any good hooks yet to call complete()
// when the JVM is completed to ensure real singletons shut down
// correctly
//
if (isCloseSingletonsAfterClasses()) {
closeInjectors();
}
}
public void beforeTest(Object test) throws Exception {
Preconditions.checkNotNull(test, "test");
Class<? extends Object> testType = test.getClass();
moduleType = getModuleForTestClass(testType);
Injector classInjector = injectors.get(moduleType);
if (classInjector == null) {
classInjector = createInjector(moduleType);
Preconditions.checkNotNull(classInjector, "classInjector");
injectors.put(moduleType, classInjector);
}
injectors.put(testType, classInjector);
classInjector.injectMembers(test);
}
public void afterTest(Object test) throws Exception {
Injector injector = injectors.get(test.getClass());
if (injector == null) {
System.out.println("Warning - no injector available for: " + test);
} else {
testScope.close(injector);
}
}
/**
* Closes down any JVM level singletons used in this testing JVM
*/
public void closeSingletons() throws CloseFailedException {
closeInjectors();
}
public boolean isCloseSingletonsAfterClasses() {
return closeSingletonsAfterClasses;
}
public void setCloseSingletonsAfterClasses(boolean closeSingletonsAfterClasses) {
this.closeSingletonsAfterClasses = closeSingletonsAfterClasses;
}
protected class TestModule extends AbstractModule {
protected void configure() {
bindScope(ClassScoped.class, classScope);
bindScope(TestScoped.class, testScope);
}
}
protected void closeInjectors() throws CloseFailedException {
CloseErrors errors = new CloseErrorsImpl(this);
Set<Entry<Object, Injector>> entries = injectors.entrySet();
for (Entry<Object, Injector> entry : entries) {
Injector injector = entry.getValue();
Injectors.close(injector, errors);
}
injectors.clear();
errors.throwIfNecessary();
}
/**
* Factory method to return the module type that will be used to create an
* injector.
*
* The default implementation will use the system property
* <code>org.guiceyfruit.modules</code> (see
* {@link Injectors#MODULE_CLASS_NAMES} otherwise if that is not set it will
* look for the {@link UseModule} annotation and use the module defined on
* that otherwise it will try look for the inner public static class
* "TestModule"
*
* @see org.apache.camel.guice.testing.UseModule
* @see #NESTED_MODULE_CLASS
*/
@SuppressWarnings("unchecked")
protected Class<? extends Module> getModuleForTestClass(Class<?> objectType)
throws IllegalAccessException, InstantiationException, ClassNotFoundException {
String modules = System.getProperty(Injectors.MODULE_CLASS_NAMES);
if (modules != null) {
modules = modules.trim();
if (modules.length() > 0) {
System.out.println("Overloading Guice Modules: " + modules);
return null;
}
}
Class<? extends Module> moduleType;
UseModule config = objectType.getAnnotation(UseModule.class);
if (config != null) {
moduleType = config.value();
} else {
String name = objectType.getName() + "$" + NESTED_MODULE_CLASS;
Class<?> type;
try {
type = objectType.getClassLoader().loadClass(name);
} catch (ClassNotFoundException e) {
try {
type = Thread.currentThread().getContextClassLoader().loadClass(name);
} catch (ClassNotFoundException e2) {
throw new ClassNotFoundException(
"Class "
+ objectType.getName()
+ " does not have a @UseModule annotation nor does it have a nested class called "
+ NESTED_MODULE_CLASS
+ " available on the classpath. Please see: http://code.google.com/p/guiceyfruit/wiki/Testing"
+ e, e);
}
}
try {
moduleType = (Class<? extends Module>)type;
} catch (Exception e) {
throw new IllegalArgumentException("Class " + type.getName() + " is not a Guice Module!", e);
}
}
int modifiers = moduleType.getModifiers();
if (Modifier.isAbstract(modifiers) || !Modifier.isPublic(modifiers)) {
throw new IllegalArgumentException("Class " + moduleType.getName()
+ " must be a public class which is non abstract");
}
try {
moduleType.getConstructor();
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Class " + moduleType.getName()
+ " must have a zero argument constructor", e);
}
return moduleType;
}
/**
* Creates the injector for the given key
*/
protected Injector createInjector(Class<? extends Module> moduleType) throws InstantiationException,
IllegalAccessException, ClassNotFoundException {
if (moduleType == null) {
return Injectors.createInjector(System.getProperties(), new TestModule());
}
// System.out.println("Creating Guice Injector from module: " +
// moduleType.getName());
Module module = moduleType.newInstance();
return Guice.createInjector(module, new TestModule());
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.opencompare.ui;
import org.opencompare.explorable.Conflict;
import org.opencompare.explorable.Explorable;
public class DetailsDialog extends javax.swing.JDialog {
private static final long serialVersionUID = 1L;
private final Explorable explorable;
private final String fullId;
/**
* Creates new form DetailsDialog
*/
public DetailsDialog(java.awt.Frame parent, boolean modal, Explorable explorable, String fullId) {
super(parent, modal);
this.explorable = explorable;
this.fullId = fullId;
initComponents();
refreshFields();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
buttonClose = new javax.swing.JButton();
labelName = new javax.swing.JLabel();
editName = new javax.swing.JTextField();
labelFullName = new javax.swing.JLabel();
editFullName = new javax.swing.JTextField();
labelReferenceValue = new javax.swing.JLabel();
editReferenceValue = new javax.swing.JTextField();
labelActualValue = new javax.swing.JLabel();
editActualValue = new javax.swing.JTextField();
editReferenceCrc = new javax.swing.JTextField();
labelReferenceCrc = new javax.swing.JLabel();
labelActualCrc = new javax.swing.JLabel();
editActualCrc = new javax.swing.JTextField();
editSha = new javax.swing.JTextField();
labelSha = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Details");
setLocationByPlatform(true);
buttonClose.setText("Close");
buttonClose.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
buttonCloseActionPerformed(evt);
}
});
labelName.setText("Name:");
editName.setEditable(false);
labelFullName.setText("Full name:");
editFullName.setEditable(false);
labelReferenceValue.setText("Reference value:");
editReferenceValue.setEditable(false);
labelActualValue.setText("Actual value:");
editActualValue.setEditable(false);
editReferenceCrc.setEditable(false);
labelReferenceCrc.setText("CRC:");
labelActualCrc.setText("CRC:");
editActualCrc.setEditable(false);
editSha.setEditable(false);
labelSha.setText("SHA:");
org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.addContainerGap()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.add(labelFullName)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(editFullName))
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(labelReferenceValue)
.add(labelActualValue))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)
.add(editActualValue)
.add(editReferenceValue, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 515, Short.MAX_VALUE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.add(labelReferenceCrc)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(editReferenceCrc, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 115, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.add(labelActualCrc)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(editActualCrc, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 115, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))))
.add(layout.createSequentialGroup()
.add(labelName)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(editName)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(labelSha)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(editSha, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 115, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.add(0, 0, Short.MAX_VALUE)
.add(buttonClose)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(labelName)
.add(editName, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(labelSha)
.add(editSha, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(labelFullName)
.add(editFullName, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(labelReferenceValue)
.add(editReferenceValue, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(editReferenceCrc, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(labelReferenceCrc))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(labelActualValue)
.add(editActualValue, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(editActualCrc, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(labelActualCrc))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 18, Short.MAX_VALUE)
.add(buttonClose)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void buttonCloseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_buttonCloseActionPerformed
setVisible(false);
}//GEN-LAST:event_buttonCloseActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton buttonClose;
private javax.swing.JTextField editActualCrc;
private javax.swing.JTextField editActualValue;
private javax.swing.JTextField editFullName;
private javax.swing.JTextField editName;
private javax.swing.JTextField editReferenceCrc;
private javax.swing.JTextField editReferenceValue;
private javax.swing.JTextField editSha;
private javax.swing.JLabel labelActualCrc;
private javax.swing.JLabel labelActualValue;
private javax.swing.JLabel labelFullName;
private javax.swing.JLabel labelName;
private javax.swing.JLabel labelReferenceCrc;
private javax.swing.JLabel labelReferenceValue;
private javax.swing.JLabel labelSha;
// End of variables declaration//GEN-END:variables
private void refreshFields() {
editName.setText(explorable.getRelativeId());
editSha.setText(explorable.getSha());
editFullName.setText(fullId);
if (explorable instanceof Conflict) {
Conflict conflict = (Conflict) explorable;
Explorable actual = conflict.getActual();
if (actual != null) {
editActualValue.setText(actual.getValue());
editActualCrc.setText(Long.toString(actual.getValueHashCode()));
}
Explorable ref = conflict.getReference();
if (ref != null) {
editReferenceValue.setText(ref.getValue());
editReferenceCrc.setText(Long.toString(ref.getValueHashCode()));
}
} else {
editActualValue.setText(explorable.getValue());
editActualCrc.setText(Long.toString(explorable.getValueHashCode()));
editReferenceValue.setEnabled(false);
editReferenceCrc.setEnabled(false);
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Portions copyright 2006-2009 James Murty. Please see LICENSE.txt
* for applicable license terms and NOTICE.txt for applicable notices.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.s3.internal;
import static com.amazonaws.util.IOUtils.closeQuietly;
import static com.amazonaws.util.StringUtils.UTF8;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.SocketException;
import java.net.URL;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.net.ssl.SSLProtocolException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.transfer.exception.FileLockException;
import com.amazonaws.util.BinaryUtils;
import com.amazonaws.util.DateUtils;
import com.amazonaws.util.HttpUtils;
import com.amazonaws.util.Md5Utils;
/**
* General utility methods used throughout the AWS S3 Java client.
*/
public class ServiceUtils {
private static final Log log = LogFactory.getLog(ServiceUtils.class);
public static final boolean APPEND_MODE = true;
public static final boolean OVERWRITE_MODE = false;
@Deprecated
protected static final DateUtils dateUtils = new DateUtils();
public static Date parseIso8601Date(String dateString) {
return DateUtils.parseISO8601Date(dateString);
}
public static String formatIso8601Date(Date date) {
return DateUtils.formatISO8601Date(date);
}
public static Date parseRfc822Date(String dateString) {
return DateUtils.parseRFC822Date(dateString);
}
public static String formatRfc822Date(Date date) {
return DateUtils.formatRFC822Date(date);
}
/**
* Returns true if the specified ETag was from a multipart upload.
*
* @param eTag
* The ETag to test.
*
* @return True if the specified ETag was from a multipart upload, otherwise
* false it if belongs to an object that was uploaded in a single
* part.
*/
public static boolean isMultipartUploadETag(String eTag) {
return eTag.contains("-");
}
/**
* Safely converts a string to a byte array, first attempting to explicitly
* use our preferred encoding (UTF-8), and then falling back to the
* platform's default encoding if for some reason our preferred encoding
* isn't supported.
*
* @param s
* The string to convert to a byte array.
*
* @return The byte array contents of the specified string.
*/
public static byte[] toByteArray(String s) {
return s.getBytes(UTF8);
}
/**
* Removes any surrounding quotes from the specified string and returns a
* new string.
*
* @param s
* The string to check for surrounding quotes.
*
* @return A new string created from the specified string, minus any
* surrounding quotes.
*/
public static String removeQuotes(String s) {
if (s == null) return null;
s = s.trim();
if (s.startsWith("\"")) s = s.substring(1);
if (s.endsWith("\"")) s = s.substring(0, s.length() - 1);
return s;
}
/**
* Converts the specified request object into a URL, containing all the
* specified parameters, the specified request endpoint, etc.
*
* @param request
* The request to convert into a URL.
* @return A new URL representing the specified request.
*
* @throws AmazonClientException
* If the request cannot be converted to a well formed URL.
*/
public static URL convertRequestToUrl(Request<?> request) {
// To be backward compatible, this method by default does not
// remove the leading slash in the request resource-path.
return convertRequestToUrl(request, false);
}
/**
* Converts the specified request object into a URL, containing all the
* specified parameters, the specified request endpoint, etc.
*
* @param request
* The request to convert into a URL.
* @param removeLeadingSlashInResourcePath
* Whether the leading slash in resource-path should be removed
* before appending to the endpoint.
* @return A new URL representing the specified request.
*
* @throws AmazonClientException
* If the request cannot be converted to a well formed URL.
*/
public static URL convertRequestToUrl(Request<?> request, boolean removeLeadingSlashInResourcePath) {
String resourcePath = HttpUtils.urlEncode(request.getResourcePath(), true);
// Removed the padding "/" that was already added into the request's resource path.
if (removeLeadingSlashInResourcePath
&& resourcePath.startsWith("/")) {
resourcePath = resourcePath.substring(1);
}
// Some http client libraries (e.g. Apache HttpClient) cannot handle
// consecutive "/"s between URL authority and path components.
// So we escape "////..." into "/%2F%2F%2F...", in the same way as how
// we treat consecutive "/"s in AmazonS3Client#presignRequest(...)
String urlPath = "/" + resourcePath;
urlPath = urlPath.replaceAll("(?<=/)/", "%2F");
StringBuilder url = new StringBuilder(request.getEndpoint().toString());
url.append(urlPath);
StringBuilder queryParams = new StringBuilder();
Map<String, List<String>> requestParams = request.getParameters();
for (Map.Entry<String, List<String>> entry : requestParams.entrySet()) {
for (String value : entry.getValue()) {
queryParams = queryParams.length() > 0 ? queryParams
.append("&") : queryParams.append("?");
queryParams.append(entry.getKey())
.append("=")
.append(HttpUtils.urlEncode(value, false));
}
}
url.append(queryParams.toString());
try {
return new URL(url.toString());
} catch (MalformedURLException e) {
throw new AmazonClientException(
"Unable to convert request to well formed URL: " + e.getMessage(), e);
}
}
/**
* Returns a new string created by joining each of the strings in the
* specified list together, with a comma between them.
*
* @param strings
* The list of strings to join into a single, comma delimited
* string list.
* @return A new string created by joining each of the strings in the
* specified list together, with a comma between strings.
*/
public static String join(List<String> strings) {
StringBuilder result = new StringBuilder();
boolean first = true;
for (String s : strings) {
if (!first) result.append(", ");
result.append(s);
first = false;
}
return result.toString();
}
/**
* Downloads an S3Object, as returned from
* {@link AmazonS3Client#getObject(com.amazonaws.services.s3.model.GetObjectRequest)},
* to the specified file.
*
* @param s3Object
* The S3Object containing a reference to an InputStream
* containing the object's data.
* @param destinationFile
* The file to store the object's data in.
* @param performIntegrityCheck
* Boolean valuable to indicate whether to perform integrity check
* @param appendData
* appends the data to end of the file.
*/
public static void downloadObjectToFile(S3Object s3Object,
final File destinationFile, boolean performIntegrityCheck,
boolean appendData) {
downloadToFile(s3Object, destinationFile, performIntegrityCheck, appendData, -1);
}
/**
* Same as {@link #downloadObjectToFile(S3Object, File, boolean, boolean)}
* but has an additional expected file length parameter for integrity
* checking purposes.
*
* @param expectedFileLength
* applicable only when appendData is true; the expected length
* of the file to append to.
*/
public static void downloadToFile(S3Object s3Object,
final File dstfile, boolean performIntegrityCheck,
final boolean appendData,
final long expectedFileLength)
{
// attempt to create the parent if it doesn't exist
File parentDirectory = dstfile.getParentFile();
if ( parentDirectory != null && !parentDirectory.exists() ) {
if (!(parentDirectory.mkdirs())) {
throw new AmazonClientException(
"Unable to create directory in the path"
+ parentDirectory.getAbsolutePath());
}
}
if (!FileLocks.lock(dstfile)) {
throw new FileLockException("Fail to lock " + dstfile
+ " for appendData=" + appendData);
}
OutputStream outputStream = null;
try {
final long actualLen = dstfile.length();
if (appendData && actualLen != expectedFileLength) {
// Fail fast to prevent data corruption
throw new IllegalStateException(
"Expected file length to append is "
+ expectedFileLength + " but actual length is "
+ actualLen + " for file " + dstfile);
}
outputStream = new BufferedOutputStream(new FileOutputStream(
dstfile, appendData));
byte[] buffer = new byte[1024*10];
int bytesRead;
while ((bytesRead = s3Object.getObjectContent().read(buffer)) > -1) {
outputStream.write(buffer, 0, bytesRead);
}
} catch (IOException e) {
s3Object.getObjectContent().abort();
throw new AmazonClientException(
"Unable to store object contents to disk: " + e.getMessage(), e);
} finally {
closeQuietly(outputStream, log);
FileLocks.unlock(dstfile);
closeQuietly(s3Object.getObjectContent(), log);
}
if (performIntegrityCheck) {
byte[] clientSideHash = null;
byte[] serverSideHash = null;
try {
// Multipart Uploads don't have an MD5 calculated on the service
// side
// Server Side encryption with AWS KMS enabled objects has MD5 of
// cipher text. So the MD5 validation needs to be skipped.
final ObjectMetadata metadata = s3Object.getObjectMetadata();
if (metadata != null) {
final String etag = metadata.getETag();
if (!ServiceUtils.isMultipartUploadETag(etag)
&& !skipMd5CheckPerResponse(metadata))
{
clientSideHash = Md5Utils.computeMD5Hash(new FileInputStream(dstfile));
serverSideHash = BinaryUtils.fromHex(etag);
}
}
} catch (Exception e) {
log.warn("Unable to calculate MD5 hash to validate download: " + e.getMessage(), e);
}
if (clientSideHash != null && serverSideHash != null && !Arrays.equals(clientSideHash, serverSideHash)) {
throw new AmazonClientException("Unable to verify integrity of data download. " +
"Client calculated content hash didn't match hash calculated by Amazon S3. " +
"The data stored in '" + dstfile.getAbsolutePath() + "' may be corrupt.");
}
}
}
/**
* Interface for the task of downloading object from S3 to a specific file,
* enabling one-time retry mechanism after integrity check failure
* on the downloaded file.
*/
public interface RetryableS3DownloadTask {
/**
* User defines how to get the S3Object from S3 for this RetryableS3DownloadTask.
*
* @return
* The S3Object containing a reference to an InputStream
* containing the object's data.
*/
public S3Object getS3ObjectStream ();
/**
* User defines whether integrity check is needed for this RetryableS3DownloadTask.
*
* @return
* Boolean value indicating whether this task requires integrity check
* after downloading the S3 object to file.
*/
public boolean needIntegrityCheck ();
}
/**
* Gets an object stored in S3 and downloads it into the specified file.
* This method includes the one-time retry mechanism after integrity check failure
* on the downloaded file. It will also return immediately after getting null valued
* S3Object (when getObject request does not meet the specified constraints).
*
* @param file
* The file to store the object's data in.
* @param retryableS3DownloadTask
* The implementation of SafeS3DownloadTask interface which allows user to
* get access to all the visible variables at the calling site of this method.
*/
public static S3Object retryableDownloadS3ObjectToFile(File file,
RetryableS3DownloadTask retryableS3DownloadTask, boolean appendData) {
boolean hasRetried = false;
boolean needRetry;
S3Object s3Object;
do {
needRetry = false;
s3Object = retryableS3DownloadTask.getS3ObjectStream();
if ( s3Object == null )
return null;
try {
ServiceUtils.downloadObjectToFile(s3Object, file,
retryableS3DownloadTask.needIntegrityCheck(),
appendData);
} catch (AmazonClientException ace) {
if (!ace.isRetryable()) {
s3Object.getObjectContent().abort();
throw ace;
}
// Determine whether an immediate retry is needed according to the captured AmazonClientException.
// (There are three cases when downloadObjectToFile() throws AmazonClientException:
// 1) SocketException or SSLProtocolException when writing to disk (e.g. when user aborts the download)
// 2) Other IOException when writing to disk
// 3) MD5 hashes don't match
// The current code will retry the download only when case 2) or 3) happens.
if (ace.getCause() instanceof SocketException || ace.getCause() instanceof SSLProtocolException) {
throw ace;
} else {
needRetry = true;
if ( hasRetried ) {
s3Object.getObjectContent().abort();
throw ace;
} else {
log.info("Retry the download of object " + s3Object.getKey() + " (bucket " + s3Object.getBucketName() + ")", ace);
hasRetried = true;
}
}
}
} while ( needRetry );
return s3Object;
}
/**
* Based on the given metadata of an S3 response,
* Returns whether the specified request should skip MD5 check on the
* requested object content. Specifically, MD5 check should be skipped if
* either SSE-KMS or SSE-C is involved.
* <p>
* The reason is that when SSE-KMS or SSE-C is involved, the MD5 returned
* from the server side is the MD5 of the ciphertext, which will by definition
* mismatch the MD5 on the client side which is computed based on the plaintext.
*/
public static boolean skipMd5CheckPerResponse(ObjectMetadata metadata) {
return metadata != null
&& (metadata.getSSEAwsKmsKeyId() != null
|| metadata.getSSECustomerAlgorithm() != null);
}
/**
* Based on the given request object, returns whether the specified request
* should skip MD5 check on the requested object content. Specifically, MD5
* check should be skipped if one of the following condition is true:
* <ol>
* <li>The system property
*
* <pre>
* -Dcom.amazonaws.services.s3.disableGetObjectMD5Validation
* </pre>
*
* is specified;</li>
* <li>The request is a range-get operation</li>
* <li>The request is a GET object operation that involves SSE-C</li>
* <li>The request is a PUT object operation that involves SSE-C</li>
* <li>The request is a PUT object operation that involves SSE-KMS</li>
* <li>The request is an upload-part operation that involves SSE-C</li>
* </ol>
* Otherwise, MD5 check should not be skipped.
*/
public static boolean skipMd5CheckPerRequest(AmazonWebServiceRequest request) {
if (request instanceof GetObjectRequest) {
if (System.getProperty("com.amazonaws.services.s3.disableGetObjectMD5Validation") != null)
return true;
GetObjectRequest getObjectRequest = (GetObjectRequest)request;
// Skip MD5 check for range get
if (getObjectRequest.getRange() != null)
return true;
if (getObjectRequest.getSSECustomerKey() != null)
return true;
} else if (request instanceof PutObjectRequest) {
PutObjectRequest putObjectRequest = (PutObjectRequest)request;
return putObjectRequest.getSSECustomerKey() != null
|| putObjectRequest.getSSEAwsKeyManagementParams() != null;
} else if (request instanceof UploadPartRequest) {
UploadPartRequest uploadPartRequest = (UploadPartRequest)request;
return uploadPartRequest.getSSECustomerKey() != null;
}
return false;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.search.spell.DirectSpellChecker;
import org.apache.lucene.search.spell.JaroWinklerDistance;
import org.apache.lucene.search.spell.LevensteinDistance;
import org.apache.lucene.search.spell.LuceneLevenshteinDistance;
import org.apache.lucene.search.spell.NGramDistance;
import org.apache.lucene.search.spell.StringDistance;
import org.apache.lucene.search.spell.SuggestMode;
import org.apache.lucene.search.spell.SuggestWord;
import org.apache.lucene.search.spell.SuggestWordFrequencyComparator;
import org.apache.lucene.search.spell.SuggestWordQueue;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.automaton.LevenshteinAutomata;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.FastCharArrayReader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.CustomAnalyzer;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.io.IOException;
import java.util.Comparator;
import java.util.Locale;
public final class SuggestUtils {
public static final Comparator<SuggestWord> LUCENE_FREQUENCY = new SuggestWordFrequencyComparator();
public static final Comparator<SuggestWord> SCORE_COMPARATOR = SuggestWordQueue.DEFAULT_COMPARATOR;
private SuggestUtils() {
// utils!!
}
public static DirectSpellChecker getDirectSpellChecker(DirectSpellcheckerSettings suggestion) {
DirectSpellChecker directSpellChecker = new DirectSpellChecker();
directSpellChecker.setAccuracy(suggestion.accuracy());
Comparator<SuggestWord> comparator;
switch (suggestion.sort()) {
case SCORE:
comparator = SCORE_COMPARATOR;
break;
case FREQUENCY:
comparator = LUCENE_FREQUENCY;
break;
default:
throw new IllegalArgumentException("Illegal suggest sort: " + suggestion.sort());
}
directSpellChecker.setComparator(comparator);
directSpellChecker.setDistance(suggestion.stringDistance());
directSpellChecker.setMaxEdits(suggestion.maxEdits());
directSpellChecker.setMaxInspections(suggestion.maxInspections());
directSpellChecker.setMaxQueryFrequency(suggestion.maxTermFreq());
directSpellChecker.setMinPrefix(suggestion.prefixLength());
directSpellChecker.setMinQueryLength(suggestion.minWordLength());
directSpellChecker.setThresholdFrequency(suggestion.minDocFreq());
directSpellChecker.setLowerCaseTerms(false);
return directSpellChecker;
}
public static BytesRef join(BytesRef separator, BytesRefBuilder result, BytesRef... toJoin) {
result.clear();
for (int i = 0; i < toJoin.length - 1; i++) {
result.append(toJoin[i]);
result.append(separator);
}
result.append(toJoin[toJoin.length-1]);
return result.get();
}
public static abstract class TokenConsumer {
protected CharTermAttribute charTermAttr;
protected PositionIncrementAttribute posIncAttr;
protected OffsetAttribute offsetAttr;
public void reset(TokenStream stream) {
charTermAttr = stream.addAttribute(CharTermAttribute.class);
posIncAttr = stream.addAttribute(PositionIncrementAttribute.class);
offsetAttr = stream.addAttribute(OffsetAttribute.class);
}
protected BytesRef fillBytesRef(BytesRefBuilder spare) {
spare.copyChars(charTermAttr);
return spare.get();
}
public abstract void nextToken() throws IOException;
public void end() {}
}
public static int analyze(Analyzer analyzer, BytesRef toAnalyze, String field, TokenConsumer consumer, CharsRefBuilder spare) throws IOException {
spare.copyUTF8Bytes(toAnalyze);
return analyze(analyzer, spare.get(), field, consumer);
}
public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException {
try (TokenStream ts = analyzer.tokenStream(
field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length))) {
return analyze(ts, consumer);
}
}
/** NOTE: this method closes the TokenStream, even on exception, which is awkward
* because really the caller who called {@link Analyzer#tokenStream} should close it,
* but when trying that there are recursion issues when we try to use the same
* TokenStrem twice in the same recursion... */
public static int analyze(TokenStream stream, TokenConsumer consumer) throws IOException {
int numTokens = 0;
boolean success = false;
try {
stream.reset();
consumer.reset(stream);
while (stream.incrementToken()) {
consumer.nextToken();
numTokens++;
}
consumer.end();
} finally {
if (success) {
stream.close();
} else {
IOUtils.closeWhileHandlingException(stream);
}
}
return numTokens;
}
public static SuggestMode resolveSuggestMode(String suggestMode) {
suggestMode = suggestMode.toLowerCase(Locale.US);
if ("missing".equals(suggestMode)) {
return SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
} else if ("popular".equals(suggestMode)) {
return SuggestMode.SUGGEST_MORE_POPULAR;
} else if ("always".equals(suggestMode)) {
return SuggestMode.SUGGEST_ALWAYS;
} else {
throw new IllegalArgumentException("Illegal suggest mode " + suggestMode);
}
}
public static Suggest.Suggestion.Sort resolveSort(String sortVal) {
if ("score".equals(sortVal)) {
return Suggest.Suggestion.Sort.SCORE;
} else if ("frequency".equals(sortVal)) {
return Suggest.Suggestion.Sort.FREQUENCY;
} else {
throw new IllegalArgumentException("Illegal suggest sort " + sortVal);
}
}
public static StringDistance resolveDistance(String distanceVal) {
if ("internal".equals(distanceVal)) {
return DirectSpellChecker.INTERNAL_LEVENSHTEIN;
} else if ("damerau_levenshtein".equals(distanceVal) || "damerauLevenshtein".equals(distanceVal)) {
return new LuceneLevenshteinDistance();
} else if ("levenstein".equals(distanceVal)) {
return new LevensteinDistance();
//TODO Jaro and Winkler are 2 people - so apply same naming logic as damerau_levenshtein
} else if ("jarowinkler".equals(distanceVal)) {
return new JaroWinklerDistance();
} else if ("ngram".equals(distanceVal)) {
return new NGramDistance();
} else {
throw new IllegalArgumentException("Illegal distance option " + distanceVal);
}
}
public static class Fields {
public static final ParseField STRING_DISTANCE = new ParseField("string_distance");
public static final ParseField SUGGEST_MODE = new ParseField("suggest_mode");
public static final ParseField MAX_EDITS = new ParseField("max_edits");
public static final ParseField MAX_INSPECTIONS = new ParseField("max_inspections");
// TODO some of these constants are the same as MLT constants and
// could be moved to a shared class for maintaining consistency across
// the platform
public static final ParseField MAX_TERM_FREQ = new ParseField("max_term_freq");
public static final ParseField PREFIX_LENGTH = new ParseField("prefix_length", "prefix_len");
public static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len");
public static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq");
public static final ParseField SHARD_SIZE = new ParseField("shard_size");
}
public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName,
DirectSpellcheckerSettings suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException {
if ("accuracy".equals(fieldName)) {
suggestion.accuracy(parser.floatValue());
} else if (parseFieldMatcher.match(fieldName, Fields.SUGGEST_MODE)) {
suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text()));
} else if ("sort".equals(fieldName)) {
suggestion.sort(SuggestUtils.resolveSort(parser.text()));
} else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) {
suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text()));
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_EDITS)) {
suggestion.maxEdits(parser.intValue());
if (suggestion.maxEdits() < 1 || suggestion.maxEdits() > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
throw new IllegalArgumentException("Illegal max_edits value " + suggestion.maxEdits());
}
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_INSPECTIONS)) {
suggestion.maxInspections(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_TERM_FREQ)) {
suggestion.maxTermFreq(parser.floatValue());
} else if (parseFieldMatcher.match(fieldName, Fields.PREFIX_LENGTH)) {
suggestion.prefixLength(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.MIN_WORD_LENGTH)) {
suggestion.minQueryLength(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.MIN_DOC_FREQ)) {
suggestion.minDocFreq(parser.floatValue());
} else {
return false;
}
return true;
}
public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName,
SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException {
if ("analyzer".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
suggestion.setAnalyzer(analyzer);
} else if ("field".equals(fieldName)) {
suggestion.setField(parser.text());
} else if ("size".equals(fieldName)) {
suggestion.setSize(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.SHARD_SIZE)) {
suggestion.setShardSize(parser.intValue());
} else {
return false;
}
return true;
}
public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) {
// Verify options and set defaults
if (suggestion.getField() == null) {
throw new IllegalArgumentException("The required field option is missing");
}
if (suggestion.getText() == null) {
if (globalText == null) {
throw new IllegalArgumentException("The required text option is missing");
}
suggestion.setText(globalText);
}
if (suggestion.getAnalyzer() == null) {
suggestion.setAnalyzer(mapperService.searchAnalyzer());
}
if (suggestion.getShardSize() == -1) {
suggestion.setShardSize(Math.max(suggestion.getSize(), 5));
}
}
public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) {
if (analyzer instanceof NamedAnalyzer) {
analyzer = ((NamedAnalyzer)analyzer).analyzer();
}
if (analyzer instanceof CustomAnalyzer) {
final CustomAnalyzer a = (CustomAnalyzer) analyzer;
final TokenFilterFactory[] tokenFilters = a.tokenFilters();
for (TokenFilterFactory tokenFilterFactory : tokenFilters) {
if (tokenFilterFactory instanceof ShingleTokenFilterFactory) {
return ((ShingleTokenFilterFactory)tokenFilterFactory).getInnerFactory();
} else if (tokenFilterFactory instanceof ShingleTokenFilterFactory.Factory) {
return (ShingleTokenFilterFactory.Factory) tokenFilterFactory;
}
}
}
return null;
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dx.ssa.back;
import com.android.dx.rop.code.BasicBlock;
import com.android.dx.rop.code.BasicBlockList;
import com.android.dx.rop.code.InsnList;
import com.android.dx.rop.code.RegisterSpec;
import com.android.dx.rop.code.RegisterSpecList;
import com.android.dx.rop.code.Rop;
import com.android.dx.rop.code.RopMethod;
import com.android.dx.rop.code.Rops;
import com.android.dx.ssa.BasicRegisterMapper;
import com.android.dx.ssa.PhiInsn;
import com.android.dx.ssa.RegisterMapper;
import com.android.dx.ssa.SsaBasicBlock;
import com.android.dx.ssa.SsaInsn;
import com.android.dx.ssa.SsaMethod;
import com.android.dx.util.Hex;
import com.android.dx.util.IntList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Comparator;
/**
* Converts a method in SSA form to ROP form.
*/
public class SsaToRop {
/** local debug flag */
private static final boolean DEBUG = false;
/** {@code non-null;} method to process */
private final SsaMethod ssaMeth;
/**
* {@code true} if the converter should attempt to minimize
* the rop-form register count
*/
private final boolean minimizeRegisters;
/** {@code non-null;} interference graph */
private final InterferenceGraph interference;
/**
* Converts a method in SSA form to ROP form.
*
* @param ssaMeth {@code non-null;} method to process
* @param minimizeRegisters {@code true} if the converter should
* attempt to minimize the rop-form register count
* @return {@code non-null;} rop-form output
*/
public static RopMethod convertToRopMethod(SsaMethod ssaMeth,
boolean minimizeRegisters) {
return new SsaToRop(ssaMeth, minimizeRegisters).convert();
}
/**
* Constructs an instance.
*
* @param ssaMeth {@code non-null;} method to process
* @param minimizeRegisters {@code true} if the converter should
* attempt to minimize the rop-form register count
*/
private SsaToRop(SsaMethod ssaMethod, boolean minimizeRegisters) {
this.minimizeRegisters = minimizeRegisters;
this.ssaMeth = ssaMethod;
this.interference =
LivenessAnalyzer.constructInterferenceGraph(ssaMethod);
}
/**
* Performs the conversion.
*
* @return {@code non-null;} rop-form output
*/
private RopMethod convert() {
if (DEBUG) {
interference.dumpToStdout();
}
// These are other allocators for debugging or historical comparison:
// allocator = new NullRegisterAllocator(ssaMeth, interference);
// allocator = new FirstFitAllocator(ssaMeth, interference);
RegisterAllocator allocator =
new FirstFitLocalCombiningAllocator(ssaMeth, interference,
minimizeRegisters);
RegisterMapper mapper = allocator.allocateRegisters();
if (DEBUG) {
System.out.println("Printing reg map");
System.out.println(((BasicRegisterMapper)mapper).toHuman());
}
ssaMeth.setBackMode();
ssaMeth.mapRegisters(mapper);
removePhiFunctions();
if (allocator.wantsParamsMovedHigh()) {
moveParametersToHighRegisters();
}
removeEmptyGotos();
RopMethod ropMethod = new RopMethod(convertBasicBlocks(),
ssaMeth.blockIndexToRopLabel(ssaMeth.getEntryBlockIndex()));
ropMethod = new IdenticalBlockCombiner(ropMethod).process();
return ropMethod;
}
/**
* Removes all blocks containing only GOTOs from the control flow.
* Although much of this work will be done later when converting
* from rop to dex, not all simplification cases can be handled
* there. Furthermore, any no-op block between the exit block and
* blocks containing the real return or throw statements must be
* removed.
*/
private void removeEmptyGotos() {
final ArrayList<SsaBasicBlock> blocks = ssaMeth.getBlocks();
ssaMeth.forEachBlockDepthFirst(false, new SsaBasicBlock.Visitor() {
public void visitBlock(SsaBasicBlock b, SsaBasicBlock parent) {
ArrayList<SsaInsn> insns = b.getInsns();
if ((insns.size() == 1)
&& (insns.get(0).getOpcode() == Rops.GOTO)) {
BitSet preds = (BitSet) b.getPredecessors().clone();
for (int i = preds.nextSetBit(0); i >= 0;
i = preds.nextSetBit(i + 1)) {
SsaBasicBlock pb = blocks.get(i);
pb.replaceSuccessor(b.getIndex(),
b.getPrimarySuccessorIndex());
}
}
}
});
}
/**
* See Appel 19.6. To remove the phi instructions in an edge-split
* SSA representation we know we can always insert a move in a
* predecessor block.
*/
private void removePhiFunctions() {
ArrayList<SsaBasicBlock> blocks = ssaMeth.getBlocks();
for (SsaBasicBlock block : blocks) {
// Add moves in all the pred blocks for each phi insn.
block.forEachPhiInsn(new PhiVisitor(blocks));
// Delete the phi insns.
block.removeAllPhiInsns();
}
/*
* After all move insns have been added, sort them so they don't
* destructively interfere.
*/
for (SsaBasicBlock block : blocks) {
block.scheduleMovesFromPhis();
}
}
/**
* Helper for {@link #removePhiFunctions}: PhiSuccessorUpdater for
* adding move instructions to predecessors based on phi insns.
*/
private static class PhiVisitor implements PhiInsn.Visitor {
private final ArrayList<SsaBasicBlock> blocks;
public PhiVisitor(ArrayList<SsaBasicBlock> blocks) {
this.blocks = blocks;
}
public void visitPhiInsn(PhiInsn insn) {
RegisterSpecList sources = insn.getSources();
RegisterSpec result = insn.getResult();
int sz = sources.size();
for (int i = 0; i < sz; i++) {
RegisterSpec source = sources.get(i);
SsaBasicBlock predBlock = blocks.get(
insn.predBlockIndexForSourcesIndex(i));
predBlock.addMoveToEnd(result, source);
}
}
}
/**
* Moves the parameter registers, which allocateRegisters() places
* at the bottom of the frame, up to the top of the frame to match
* Dalvik calling convention.
*/
private void moveParametersToHighRegisters() {
int paramWidth = ssaMeth.getParamWidth();
BasicRegisterMapper mapper
= new BasicRegisterMapper(ssaMeth.getRegCount());
int regCount = ssaMeth.getRegCount();
for (int i = 0; i < regCount; i++) {
if (i < paramWidth) {
mapper.addMapping(i, regCount - paramWidth + i, 1);
} else {
mapper.addMapping(i, i - paramWidth, 1);
}
}
if (DEBUG) {
System.out.printf("Moving %d registers from 0 to %d\n",
paramWidth, regCount - paramWidth);
}
ssaMeth.mapRegisters(mapper);
}
/**
* @return rop-form basic block list
*/
private BasicBlockList convertBasicBlocks() {
ArrayList<SsaBasicBlock> blocks = ssaMeth.getBlocks();
// Exit block may be null.
SsaBasicBlock exitBlock = ssaMeth.getExitBlock();
ssaMeth.computeReachability();
int ropBlockCount = ssaMeth.getCountReachableBlocks();
// Don't count the exit block, if it exists and is reachable.
ropBlockCount -= (exitBlock != null && exitBlock.isReachable()) ? 1 : 0;
BasicBlockList result = new BasicBlockList(ropBlockCount);
// Convert all the reachable blocks except the exit block.
int ropBlockIndex = 0;
for (SsaBasicBlock b : blocks) {
if (b.isReachable() && b != exitBlock) {
result.set(ropBlockIndex++, convertBasicBlock(b));
}
}
// The exit block, which is discarded, must do nothing.
if (exitBlock != null && exitBlock.getInsns().size() != 0) {
throw new RuntimeException(
"Exit block must have no insns when leaving SSA form");
}
return result;
}
/**
* Validates that a basic block is a valid end predecessor. It must
* end in a RETURN or a THROW. Throws a runtime exception on error.
*
* @param b {@code non-null;} block to validate
* @throws RuntimeException on error
*/
private void verifyValidExitPredecessor(SsaBasicBlock b) {
ArrayList<SsaInsn> insns = b.getInsns();
SsaInsn lastInsn = insns.get(insns.size() - 1);
Rop opcode = lastInsn.getOpcode();
if (opcode.getBranchingness() != Rop.BRANCH_RETURN
&& opcode != Rops.THROW) {
throw new RuntimeException("Exit predecessor must end"
+ " in valid exit statement.");
}
}
/**
* Converts a single basic block to rop form.
*
* @param block SSA block to process
* @return {@code non-null;} ROP block
*/
private BasicBlock convertBasicBlock(SsaBasicBlock block) {
IntList successorList = block.getRopLabelSuccessorList();
int primarySuccessorLabel = block.getPrimarySuccessorRopLabel();
// Filter out any reference to the SSA form's exit block.
// Exit block may be null.
SsaBasicBlock exitBlock = ssaMeth.getExitBlock();
int exitRopLabel = (exitBlock == null) ? -1 : exitBlock.getRopLabel();
if (successorList.contains(exitRopLabel)) {
if (successorList.size() > 1) {
throw new RuntimeException(
"Exit predecessor must have no other successors"
+ Hex.u2(block.getRopLabel()));
} else {
successorList = IntList.EMPTY;
primarySuccessorLabel = -1;
verifyValidExitPredecessor(block);
}
}
successorList.setImmutable();
BasicBlock result = new BasicBlock(
block.getRopLabel(), convertInsns(block.getInsns()),
successorList,
primarySuccessorLabel);
return result;
}
/**
* Converts an insn list to rop form.
*
* @param ssaInsns {@code non-null;} old instructions
* @return {@code non-null;} immutable instruction list
*/
private InsnList convertInsns(ArrayList<SsaInsn> ssaInsns) {
int insnCount = ssaInsns.size();
InsnList result = new InsnList(insnCount);
for (int i = 0; i < insnCount; i++) {
result.set(i, ssaInsns.get(i).toRopInsn());
}
result.setImmutable();
return result;
}
/**
* <b>Note:</b> This method is not presently used.
*
* @return a list of registers ordered by most-frequently-used to
* least-frequently-used. Each register is listed once and only
* once.
*/
public int[] getRegistersByFrequency() {
int regCount = ssaMeth.getRegCount();
Integer[] ret = new Integer[regCount];
for (int i = 0; i < regCount; i++) {
ret[i] = i;
}
Arrays.sort(ret, new Comparator<Integer>() {
public int compare(Integer o1, Integer o2) {
return ssaMeth.getUseListForRegister(o2).size()
- ssaMeth.getUseListForRegister(o1).size();
}
});
int result[] = new int[regCount];
for (int i = 0; i < regCount; i++) {
result[i] = ret[i];
}
return result;
}
}
| |
/*
* $Id$
* This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc
*
* Copyright (c) 2000-2012 Stephane GALLAND.
* Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports,
* Universite de Technologie de Belfort-Montbeliard.
* Copyright (c) 2013-2020 The original authors, and other authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.arakhne.afc.math.geometry.d1.afp;
import org.eclipse.xtext.xbase.lib.Pure;
import org.arakhne.afc.math.geometry.d1.Point1D;
import org.arakhne.afc.math.geometry.d1.Segment1D;
import org.arakhne.afc.math.geometry.d1.Vector1D;
import org.arakhne.afc.math.geometry.d2.Point2D;
import org.arakhne.afc.vmutil.asserts.AssertMessages;
/** Fonctional interface that represented a rectangular shape on a plane.
*
* @param <ST> is the type of the general implementation.
* @param <IT> is the type of the implementation of this shape.
* @param <P> is the type of the points.
* @param <V> is the type of the vectors.
* @param <S> is the type of the segment.
* @param <B> is the type of the bounding boxes.
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
* @since 14.0
*/
public interface RectangularShape1afp<
ST extends Shape1afp<?, ?, P, V, S, B>,
IT extends RectangularShape1afp<?, ?, P, V, S, B>,
P extends Point1D<? super P, ? super V, ? super S>,
V extends Vector1D<? super V, ? super P, ? super S>,
S extends Segment1D<?, ?>,
B extends Rectangle1afp<?, ?, P, V, S, B>>
extends Shape1afp<ST, IT, P, V, S, B> {
@Override
default void toBoundingBox(B box) {
assert box != null : AssertMessages.notNullParameter();
box.setSegment(getSegment());
box.setFromCorners(getMinX(), getMinY(), getMaxX(), getMaxY());
}
@Override
default void clear() {
setFromCorners(0, 0, 0, 0);
}
/** Change the frame of the rectangle.
*
* @param x x coordinate of the lower corner of the rectangular shape.
* @param y y coordinate of the lower corner of the rectangular shape.
* @param width width of the rectangular shape.
* @param height height of the rectangular shape.
*/
default void set(double x, double y, double width, double height) {
assert width >= 0. : AssertMessages.positiveOrZeroParameter(2);
assert height >= 0. : AssertMessages.positiveOrZeroParameter(3);
setFromCorners(x, y, x + width, y + height);
}
@Override
default void set(IT shape) {
assert shape != null : AssertMessages.notNullParameter();
setFromCorners(shape.getMinX(), shape.getMinY(), shape.getMaxX(), shape.getMaxY());
}
/** Change the frame of the rectangle.
*
* @param min is the min corner of the rectangle.
* @param max is the max corner of the rectangle.
*/
default void set(Point2D<?, ?> min, Point2D<?, ?> max) {
assert min != null : AssertMessages.notNullParameter(0);
assert max != null : AssertMessages.notNullParameter(1);
setFromCorners(min.getX(), min.getY(), max.getX(), max.getY());
}
/** Change the width of the rectangle, not the min corner.
*
* @param width width of the rectangular shape.
*/
default void setWidth(double width) {
assert width >= 0. : AssertMessages.positiveOrZeroParameter();
setMaxX(getMinX() + width);
}
/** Change the height of the rectangle, not the min corner.
*
* @param height height of the rectangular shape.
*/
default void setHeight(double height) {
assert height >= 0. : AssertMessages.positiveOrZeroParameter();
setMaxY(getMinY() + height);
}
/** Change the frame of the rectangle conserving previous min and max if needed.
*
* @param x1 is the coordinate of the first corner.
* @param y1 is the coordinate of the first corner.
* @param x2 is the coordinate of the second corner.
* @param y2 is the coordinate of the second corner.
*/
// This function has no default implementation for allowing implementation to be atomic.
void setFromCorners(double x1, double y1, double x2, double y2);
/** Change the frame of the rectangle conserving previous min and max if needed.
*
* @param p1 the first corner.
* @param p2 the second corner.
*/
// This function has no default implementation for allowing implementation to be atomic.
default void setFromCorners(Point2D<?, ?> p1, Point2D<?, ?> p2) {
assert p1 != null : AssertMessages.notNullParameter(0);
assert p2 != null : AssertMessages.notNullParameter(1);
setFromCorners(p1.getX(), p1.getY(), p2.getX(), p2.getY());
}
/**
* Sets the framing rectangle of this <code>Shape</code>
* based on the specified center point coordinates and corner point
* coordinates. The framing rectangle is used by the subclasses of
* <code>RectangularShape</code> to define their geometry.
*
* @param centerX the X coordinate of the specified center point
* @param centerY the Y coordinate of the specified center point
* @param cornerX the X coordinate of the specified corner point
* @param cornerY the Y coordinate of the specified corner point
*/
default void setFromCenter(double centerX, double centerY, double cornerX, double cornerY) {
final double demiWidth = Math.abs(centerX - cornerX);
final double demiHeight = Math.abs(centerY - cornerY);
setFromCorners(centerX - demiWidth, centerY - demiHeight, centerX + demiWidth, centerY + demiHeight);
}
/**
* Sets the framing rectangle of this <code>Shape</code>
* based on the specified center point coordinates and corner point
* coordinates. The framing rectangle is used by the subclasses of
* <code>RectangularShape</code> to define their geometry.
*
* @param center the specified center point
* @param corner the specified corner point
*/
default void setFromCenter(Point2D<?, ?> center, Point2D<?, ?> corner) {
assert center != null : AssertMessages.notNullParameter(0);
assert corner != null : AssertMessages.notNullParameter(1);
setFromCenter(center.getX(), center.getY(), corner.getX(), corner.getY());
}
/** Replies the min X.
*
* @return the min x.
*/
@Pure
double getMinX();
/** Set the min X conserving previous min if needed.
*
* @param x the min x.
*/
void setMinX(double x);
/** Replies the center.
*
* @return the center.
*/
@Pure
default P getCenter() {
return getGeomFactory().newPoint(getSegment(), getCenterX(), getCenterY());
}
/** Replies the center x.
*
* @return the center x.
*/
@Pure
default double getCenterX() {
return (getMinX() + getMaxX()) / 2;
}
/** Replies the max x.
*
* @return the max x.
*/
@Pure
double getMaxX();
/** Set the max X conserving previous max if needed.
*
* @param x the max x.
*/
void setMaxX(double x);
/** Replies the min y.
*
* @return the min y.
*/
@Pure
double getMinY();
/** Set the min Y conserving previous min if needed.
*
* @param y the min y.
*/
void setMinY(double y);
/** Replies the center y.
*
* @return the center y.
*/
@Pure
default double getCenterY() {
return (getMinY() + getMaxY()) / 2;
}
/** Replies the max y.
*
* @return the max y.
*/
@Pure
double getMaxY();
/** Set the max Y conserving previous max if needed.
*
* @param y the max y.
*/
void setMaxY(double y);
/** Replies the width.
*
* @return the width.
*/
@Pure
default double getWidth() {
return getMaxX() - getMinX();
}
/** Replies the height.
*
* @return the height.
*/
@Pure
default double getHeight() {
return getMaxY() - getMinY();
}
@Override
default void translate(double dx, double dy) {
setFromCorners(getMinX() + dx, getMinY() + dy, getMaxX() + dx, getMaxY() + dy);
}
@Pure
@Override
default boolean isEmpty() {
return getMinX() == getMaxX() && getMinY() == getMaxY();
}
/** Inflate this rectangle with the given amounts.
*
* <p>The four borders may be inflated. If the value associated to a border
* is positive, the border is moved outside the current rectangle.
* If the value is negative, the border is moved inside the rectangle.
*
* @param minXBorder the value to substract to the minimum x.
* @param minYBorder the value to substract to the minimum y.
* @param maxXBorder the value to add to the maximum x.
* @param maxYBorder the value to add to the maximum y.
*/
default void inflate(double minXBorder, double minYBorder, double maxXBorder, double maxYBorder) {
setFromCorners(
getMinX() - minXBorder,
getMinY() - minYBorder,
getMaxX() + maxXBorder,
getMaxY() + maxYBorder);
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf.nano;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
/**
* The classes contained within are used internally by the Protocol Buffer
* library and generated message implementations. They are public only because
* those generated messages do not reside in the {@code protobuf} package.
* Others should not use this class directly.
*
* @author kenton@google.com (Kenton Varda)
*/
public final class InternalNano {
private InternalNano() {}
/**
* An object to provide synchronization when lazily initializing static fields
* of {@link MessageNano} subclasses.
* <p>
* To enable earlier versions of ProGuard to inline short methods from a
* generated MessageNano subclass to the call sites, that class must not have
* a class initializer, which will be created if there is any static variable
* initializers. To lazily initialize the static variables in a thread-safe
* manner, the initialization code will synchronize on this object.
*/
public static final Object LAZY_INIT_LOCK = new Object();
/**
* Helper called by generated code to construct default values for string
* fields.
* <p>
* The protocol compiler does not actually contain a UTF-8 decoder -- it
* just pushes UTF-8-encoded text around without touching it. The one place
* where this presents a problem is when generating Java string literals.
* Unicode characters in the string literal would normally need to be encoded
* using a Unicode escape sequence, which would require decoding them.
* To get around this, protoc instead embeds the UTF-8 bytes into the
* generated code and leaves it to the runtime library to decode them.
* <p>
* It gets worse, though. If protoc just generated a byte array, like:
* new byte[] {0x12, 0x34, 0x56, 0x78}
* Java actually generates *code* which allocates an array and then fills
* in each value. This is much less efficient than just embedding the bytes
* directly into the bytecode. To get around this, we need another
* work-around. String literals are embedded directly, so protoc actually
* generates a string literal corresponding to the bytes. The easiest way
* to do this is to use the ISO-8859-1 character set, which corresponds to
* the first 256 characters of the Unicode range. Protoc can then use
* good old CEscape to generate the string.
* <p>
* So we have a string literal which represents a set of bytes which
* represents another string. This function -- stringDefaultValue --
* converts from the generated string to the string we actually want. The
* generated code calls this automatically.
*/
public static String stringDefaultValue(String bytes) {
try {
return new String(bytes.getBytes("ISO-8859-1"), "UTF-8");
} catch (UnsupportedEncodingException e) {
// This should never happen since all JVMs are required to implement
// both of the above character sets.
throw new IllegalStateException(
"Java VM does not support a standard character set.", e);
}
}
/**
* Helper called by generated code to construct default values for bytes
* fields.
* <p>
* This is a lot like {@link #stringDefaultValue}, but for bytes fields.
* In this case we only need the second of the two hacks -- allowing us to
* embed raw bytes as a string literal with ISO-8859-1 encoding.
*/
public static byte[] bytesDefaultValue(String bytes) {
try {
return bytes.getBytes("ISO-8859-1");
} catch (UnsupportedEncodingException e) {
// This should never happen since all JVMs are required to implement
// ISO-8859-1.
throw new IllegalStateException(
"Java VM does not support a standard character set.", e);
}
}
/**
* Helper function to convert a string into UTF-8 while turning the
* UnsupportedEncodingException to a RuntimeException.
*/
public static byte[] copyFromUtf8(final String text) {
try {
return text.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("UTF-8 not supported?");
}
}
/**
* Checks repeated int field equality; null-value and 0-length fields are
* considered equal.
*/
public static boolean equals(int[] field1, int[] field2) {
if (field1 == null || field1.length == 0) {
return field2 == null || field2.length == 0;
} else {
return Arrays.equals(field1, field2);
}
}
/**
* Checks repeated long field equality; null-value and 0-length fields are
* considered equal.
*/
public static boolean equals(long[] field1, long[] field2) {
if (field1 == null || field1.length == 0) {
return field2 == null || field2.length == 0;
} else {
return Arrays.equals(field1, field2);
}
}
/**
* Checks repeated float field equality; null-value and 0-length fields are
* considered equal.
*/
public static boolean equals(float[] field1, float[] field2) {
if (field1 == null || field1.length == 0) {
return field2 == null || field2.length == 0;
} else {
return Arrays.equals(field1, field2);
}
}
/**
* Checks repeated double field equality; null-value and 0-length fields are
* considered equal.
*/
public static boolean equals(double[] field1, double[] field2) {
if (field1 == null || field1.length == 0) {
return field2 == null || field2.length == 0;
} else {
return Arrays.equals(field1, field2);
}
}
/**
* Checks repeated boolean field equality; null-value and 0-length fields are
* considered equal.
*/
public static boolean equals(boolean[] field1, boolean[] field2) {
if (field1 == null || field1.length == 0) {
return field2 == null || field2.length == 0;
} else {
return Arrays.equals(field1, field2);
}
}
/**
* Checks repeated bytes field equality. Only non-null elements are tested.
* Returns true if the two fields have the same sequence of non-null
* elements. Null-value fields and fields of any length with only null
* elements are considered equal.
*/
public static boolean equals(byte[][] field1, byte[][] field2) {
int index1 = 0;
int length1 = field1 == null ? 0 : field1.length;
int index2 = 0;
int length2 = field2 == null ? 0 : field2.length;
while (true) {
while (index1 < length1 && field1[index1] == null) {
index1++;
}
while (index2 < length2 && field2[index2] == null) {
index2++;
}
boolean atEndOf1 = index1 >= length1;
boolean atEndOf2 = index2 >= length2;
if (atEndOf1 && atEndOf2) {
// no more non-null elements to test in both arrays
return true;
} else if (atEndOf1 != atEndOf2) {
// one of the arrays have extra non-null elements
return false;
} else if (!Arrays.equals(field1[index1], field2[index2])) {
// element mismatch
return false;
}
index1++;
index2++;
}
}
/**
* Checks repeated string/message field equality. Only non-null elements are
* tested. Returns true if the two fields have the same sequence of non-null
* elements. Null-value fields and fields of any length with only null
* elements are considered equal.
*/
public static boolean equals(Object[] field1, Object[] field2) {
int index1 = 0;
int length1 = field1 == null ? 0 : field1.length;
int index2 = 0;
int length2 = field2 == null ? 0 : field2.length;
while (true) {
while (index1 < length1 && field1[index1] == null) {
index1++;
}
while (index2 < length2 && field2[index2] == null) {
index2++;
}
boolean atEndOf1 = index1 >= length1;
boolean atEndOf2 = index2 >= length2;
if (atEndOf1 && atEndOf2) {
// no more non-null elements to test in both arrays
return true;
} else if (atEndOf1 != atEndOf2) {
// one of the arrays have extra non-null elements
return false;
} else if (!field1[index1].equals(field2[index2])) {
// element mismatch
return false;
}
index1++;
index2++;
}
}
/**
* Computes the hash code of a repeated int field. Null-value and 0-length
* fields have the same hash code.
*/
public static int hashCode(int[] field) {
return field == null || field.length == 0 ? 0 : Arrays.hashCode(field);
}
/**
* Computes the hash code of a repeated long field. Null-value and 0-length
* fields have the same hash code.
*/
public static int hashCode(long[] field) {
return field == null || field.length == 0 ? 0 : Arrays.hashCode(field);
}
/**
* Computes the hash code of a repeated float field. Null-value and 0-length
* fields have the same hash code.
*/
public static int hashCode(float[] field) {
return field == null || field.length == 0 ? 0 : Arrays.hashCode(field);
}
/**
* Computes the hash code of a repeated double field. Null-value and 0-length
* fields have the same hash code.
*/
public static int hashCode(double[] field) {
return field == null || field.length == 0 ? 0 : Arrays.hashCode(field);
}
/**
* Computes the hash code of a repeated boolean field. Null-value and 0-length
* fields have the same hash code.
*/
public static int hashCode(boolean[] field) {
return field == null || field.length == 0 ? 0 : Arrays.hashCode(field);
}
/**
* Computes the hash code of a repeated bytes field. Only the sequence of all
* non-null elements are used in the computation. Null-value fields and fields
* of any length with only null elements have the same hash code.
*/
public static int hashCode(byte[][] field) {
int result = 0;
for (int i = 0, size = field == null ? 0 : field.length; i < size; i++) {
byte[] element = field[i];
if (element != null) {
result = 31 * result + Arrays.hashCode(element);
}
}
return result;
}
/**
* Computes the hash code of a repeated string/message field. Only the
* sequence of all non-null elements are used in the computation. Null-value
* fields and fields of any length with only null elements have the same hash
* code.
*/
public static int hashCode(Object[] field) {
int result = 0;
for (int i = 0, size = field == null ? 0 : field.length; i < size; i++) {
Object element = field[i];
if (element != null) {
result = 31 * result + element.hashCode();
}
}
return result;
}
}
| |
package org.atlasapi.system.legacy;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import org.atlasapi.media.channel.Channel;
import org.atlasapi.media.channel.ChannelResolver;
import org.atlasapi.media.entity.Actor;
import org.atlasapi.media.entity.BlackoutRestriction;
import org.atlasapi.media.entity.Broadcast;
import org.atlasapi.media.entity.Clip;
import org.atlasapi.media.entity.CrewMember;
import org.atlasapi.media.entity.Described;
import org.atlasapi.media.entity.Encoding;
import org.atlasapi.media.entity.Identified;
import org.atlasapi.media.entity.Item;
import org.atlasapi.media.entity.Location;
import org.atlasapi.media.entity.ParentRef;
import org.atlasapi.media.entity.Publisher;
import org.atlasapi.media.entity.Rating;
import org.atlasapi.media.entity.Restriction;
import org.atlasapi.media.entity.Review;
import org.atlasapi.media.entity.Series;
import org.atlasapi.media.entity.Version;
import com.metabroadcast.common.base.Maybe;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.joda.time.DateTime;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.Optional;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class LegacyContentTransformerTest {
@Mock
private ChannelResolver channelResolver;
@Mock
private LegacySegmentMigrator legacySegmentMigrator;
@Mock
private GenreToTagMapper genreToTagMapper;
@InjectMocks
private LegacyContentTransformer objectUnderTest;
@Test
public void testTransformSeriesWithParentRefWithNullId() {
Series legacy = new Series();
legacy.setId(1L);
legacy.setParentRef(new ParentRef("parentUrl"));
objectUnderTest.apply(legacy);
}
@Test
public void broadcastFieldsAreTransformed() {
Broadcast broadcast = new Broadcast(
"channelId",
DateTime.now(),
DateTime.now().plusHours(1)
).withId("broadcastId");
// Tested fields
broadcast.setNewEpisode(true);
broadcast.setBlackoutRestriction(new BlackoutRestriction(true));
Version version = new Version();
version.setBroadcasts(ImmutableSet.of(broadcast));
version.setRestriction(new Restriction());
Item legacyItem = new Item("uri", "", Publisher.METABROADCAST);
legacyItem.setId(1L);
legacyItem.setVersions(ImmutableSet.of(version));
when(channelResolver.fromUri("channelId")).thenReturn(Maybe.just(mock(Channel.class)));
org.atlasapi.content.Item transformed = (org.atlasapi.content.Item) objectUnderTest.apply(
legacyItem);
org.atlasapi.content.Broadcast actualBroadcast =
Iterables.getOnlyElement(transformed.getBroadcasts());
assertThat(
actualBroadcast.getNewEpisode(),
is(broadcast.getNewEpisode())
);
assertThat(
actualBroadcast.getBlackoutRestriction().get().getAll(),
is(broadcast.getBlackoutRestriction().getAll())
);
}
@Test
public void testIgnoreBroadcastsWithUnknownChannel() {
String channelId = "nonexistentChannel";
Item legacy = new Item();
legacy.setId(1L);
legacy.setParentRef(new ParentRef("parentUrl", 2L));
legacy.setPublisher(Publisher.PA);
Version version = new Version();
Broadcast broadcast = new Broadcast(channelId, DateTime.now(), DateTime.now().plusHours(1));
broadcast.withId("sourceId");
version.setBroadcasts(ImmutableSet.of(broadcast));
version.setRestriction(new Restriction());
legacy.setVersions(ImmutableSet.of(version));
legacy.setAliases(ImmutableSet.of());
when(channelResolver.fromUri(channelId)).thenReturn(Maybe.nothing());
org.atlasapi.content.Item transformed = (org.atlasapi.content.Item) objectUnderTest.apply(
legacy);
assertThat(transformed.getBroadcasts().size(), is(0));
}
@Test
public void testContentWithPeople() {
Item legacy = new Item();
legacy.setId(666L);
legacy.setPeople(Lists.newArrayList(
fluentifySetId(new Actor(), 123L).withName("Robert Smith").withCharacter("Himself"),
fluentifySetId(new CrewMember(), 321L).withName("Salesman McSaleFace").withRole(CrewMember.Role.ADVERTISER)
));
org.atlasapi.content.Item expected = new org.atlasapi.content.Item();
expected.setId(555L);
expected.setPeople(Lists.newArrayList(
fluentifySetId(new org.atlasapi.content.Actor(), 123L).withName("Robert Smith").withCharacter("Himself"),
fluentifySetId(new org.atlasapi.content.CrewMember(), 321L).withName("Salesman McSaleFace").withRole(org.atlasapi.content.CrewMember.Role.ADVERTISER)
));
org.atlasapi.content.Content actual = objectUnderTest.apply(legacy);
assertThat(actual instanceof org.atlasapi.content.Item, is(true));
assertThat(actual.people().size(), is(expected.people().size()));
assertThat(actual.people().containsAll(expected.people()), is(true));
}
@Test
public void testClip() {
Clip legacy = new Clip("uri", "curie", Publisher.PREVIEW_NETWORKS);
legacy.setTitle("trailer for film");
legacy.setVersions(ImmutableSet.of(
createVersionTestData("http://aka-m-p.maxplatform.com/15/59/28/xxlarge_640x360_HirsyF_1_uk_1_13826_27634_50544_1139.flv"),
createVersionTestData("http://aka-m-p.maxplatform.com/15/59/28/HD-1080p_1920x1080_IchmLh_1_uk_1_13826_27634_50544_1139.webm")
));
org.atlasapi.content.Clip expected = new org.atlasapi.content.Clip("uri", "curie", Publisher.PREVIEW_NETWORKS);
expected.setTitle("trailer for film");
org.atlasapi.content.Content actual = objectUnderTest.apply(legacy);
assertThat(actual instanceof org.atlasapi.content.Clip, is(true));
assertClipsEqual((org.atlasapi.content.Clip) actual, expected);
}
private Version createVersionTestData(String contentUrl) {
Location location = new Location();
location.setUri(contentUrl);
Encoding encoding = new Encoding();
encoding.addAvailableAt(location);
Version result = new Version();
result.setRestriction(new Restriction());
result.addManifestedAs(encoding);
return result;
}
@Test
public void testContentWithClips() {
Item legacy = new Item();
legacy.setId(666L);
legacy.setClips(Lists.newArrayList(
fluentifySetTitle(new Clip("uri1", "curie", Publisher.PREVIEW_NETWORKS), "film trailer 1"),
fluentifySetTitle(new Clip("uri2", "curie", Publisher.PREVIEW_NETWORKS), "film trailer 2")
));
org.atlasapi.content.Item expected = new org.atlasapi.content.Item();
expected.setClips(Lists.newArrayList(
fluentifySetTitle(new org.atlasapi.content.Clip("uri2", "curie", Publisher.PREVIEW_NETWORKS), "film trailer 2"),
fluentifySetTitle(new org.atlasapi.content.Clip("uri1", "curie", Publisher.PREVIEW_NETWORKS), "film trailer 1")
));
org.atlasapi.content.Content actual = objectUnderTest.apply(legacy);
assertThat(actual instanceof org.atlasapi.content.Item, is(true));
assertAllClipsEqual(actual.getClips(), expected.getClips());
}
private void assertAllClipsEqual(List<org.atlasapi.content.Clip> actual, List<org.atlasapi.content.Clip> expected) {
assertThat(actual.size(), is(expected.size()));
List<org.atlasapi.content.Clip> expectedClips = Lists.newArrayList(expected);
actual.stream().forEach(
actualClip -> {
assertThat(expectedClips.size(), greaterThan(0));
for(int i = 0; i < expectedClips.size(); i++) {
org.atlasapi.content.Clip needle = expectedClips.get(i);
if (needle.equals(actualClip)) {
assertClipsEqual(actualClip, needle);
expectedClips.remove(i);
break;
}
}
});
assertThat("expectedClips has items that are not matched by actual.getClips()", expectedClips.size(), is(0));
}
private void assertClipsEqual(org.atlasapi.content.Clip actual, org.atlasapi.content.Clip expected) {
// Clip.equals() is sparse, check fields that are referenced by API writer
assertThat(actual.getTitle(), is(expected.getTitle()));
assertThat(actual, is(expected));
}
private <C extends Identified> C fluentifySetId(C identifiedChild, long id) {
identifiedChild.setId(id);
return identifiedChild;
}
private <C extends org.atlasapi.entity.Identified> C fluentifySetId(C identifiedChild, long id) {
identifiedChild.setId(id);
return identifiedChild;
}
@Test
public void testReviews() {
Item legacyItem;
org.atlasapi.content.Item transformedItem;
legacyItem = new Item();
legacyItem.setId(1L);
legacyItem.setPublisher(Publisher.AMAZON_UK);
transformedItem = (org.atlasapi.content.Item) objectUnderTest.apply(legacyItem);
assertThat(transformedItem.getReviews().size(), is(0));
legacyItem = new Item();
legacyItem.setId(2L);
legacyItem.setPublisher(Publisher.AMAZON_UK);
legacyItem.setReviews(Arrays.asList(
Review.builder()
.withLocale(Locale.CHINESE)
.withReview("hen hao")
.withPublisherKey(Publisher.METABROADCAST.key())
.build(),
Review.builder()
.withLocale(Locale.ENGLISH)
.withReview("dog's bolls")
.withPublisherKey(Publisher.RADIO_TIMES.key())
.build(),
Review.builder()
.withLocale(Locale.FRENCH)
.withReview("tres bien")
.withPublisherKey(null)
.build()
));
transformedItem = (org.atlasapi.content.Item) objectUnderTest.apply(legacyItem);
assertThat(transformedItem.getReviews().size(), is(3));
assertThat(transformedItem.getReviews().containsAll(Arrays.asList(
org.atlasapi.entity.Review.builder("hen hao")
.withLocale(Locale.CHINESE)
.withSource(Optional.of(Publisher.METABROADCAST))
.build(),
org.atlasapi.entity.Review.builder("dog's bolls")
.withLocale(Locale.ENGLISH)
.withSource(Optional.of(Publisher.RADIO_TIMES))
.build(),
org.atlasapi.entity.Review.builder("tres bien")
.withLocale(Locale.FRENCH)
.withSource(Optional.empty())
.build()
)), is(true));
}
@Test
public void testRatings() {
Item legacyItem;
org.atlasapi.content.Item transformedItem;
legacyItem = new Item();
legacyItem.setId(1L);
transformedItem = (org.atlasapi.content.Item) objectUnderTest.apply(legacyItem);
assertThat(transformedItem.getRatings().size(), is(0));
legacyItem = new Item();
legacyItem.setId(2L);
legacyItem.setRatings(Arrays.asList(
new Rating("5STAR", 3.0f, Publisher.RADIO_TIMES, 1234L),
new Rating("MOOSE", 1.0f, Publisher.BBC, 1234L)
));
transformedItem = (org.atlasapi.content.Item) objectUnderTest.apply(legacyItem);
assertThat(transformedItem.getRatings().size(), is(2));
assertThat(transformedItem.getRatings().containsAll(Arrays.asList(
new org.atlasapi.entity.Rating("MOOSE", 1.0f, Publisher.BBC, 1234L),
new org.atlasapi.entity.Rating("5STAR", 3.0f, Publisher.RADIO_TIMES, 1234L)
)), is(true));
}
@Test(expected=NullPointerException.class)
public void testWithBrokenReview() {
org.atlasapi.content.Item transformedItem;
Item legacyItem = new Item();
legacyItem.setId(2L);
legacyItem.setReviews(Arrays.asList(
Review.builder().withLocale(Locale.ENGLISH).withReview(null).build() // this is broken Review
));
transformedItem = (org.atlasapi.content.Item) objectUnderTest.apply(legacyItem);
assertThat(transformedItem.getReviews().size(), is(0));
legacyItem.setReviews(Arrays.asList(
Review.builder().withLocale(Locale.CHINESE).withReview("hen hao").build(),
Review.builder().withLocale(Locale.ENGLISH).withReview( null).build() // this is broken Review
));
transformedItem = (org.atlasapi.content.Item) objectUnderTest.apply(legacyItem);
assertThat(transformedItem.getReviews().size(), is(1));
}
private <C extends Described> C fluentifySetTitle(C describedChild, String title) {
describedChild.setTitle(title);
return describedChild;
}
private <C extends org.atlasapi.content.Described> C fluentifySetTitle(C describedChild, String title) {
describedChild.setTitle(title);
return describedChild;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.connector.thrift.server;
import com.facebook.presto.connector.thrift.api.PrestoThriftBlock;
import com.facebook.presto.connector.thrift.api.PrestoThriftId;
import com.facebook.presto.connector.thrift.api.PrestoThriftNullableToken;
import com.facebook.presto.connector.thrift.api.PrestoThriftPageResult;
import com.facebook.presto.connector.thrift.api.PrestoThriftSchemaTableName;
import com.facebook.presto.connector.thrift.api.PrestoThriftServiceException;
import com.facebook.presto.connector.thrift.api.PrestoThriftSplit;
import com.facebook.presto.connector.thrift.api.PrestoThriftSplitBatch;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.RecordPageSource;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.split.MappedRecordSet;
import com.facebook.presto.tests.tpch.TpchIndexedData;
import com.facebook.presto.tests.tpch.TpchIndexedData.IndexedTable;
import com.facebook.presto.tests.tpch.TpchScaledTable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static com.facebook.presto.connector.thrift.server.SplitInfo.indexSplit;
import static com.facebook.presto.tests.AbstractTestIndexedQueries.INDEX_SPEC;
import static com.google.common.base.Preconditions.checkArgument;
public class ThriftIndexedTpchService
extends ThriftTpchService
{
private static final int NUMBER_OF_INDEX_SPLITS = 2;
private final TpchIndexedData indexedData = new TpchIndexedData("tpchindexed", INDEX_SPEC);
@Override
protected List<Set<String>> getIndexableKeys(String schemaName, String tableName)
{
TpchScaledTable tpchScaledTable = new TpchScaledTable(tableName, schemaNameToScaleFactor(schemaName));
return ImmutableList.copyOf(INDEX_SPEC.getColumnIndexes(tpchScaledTable));
}
@Override
protected PrestoThriftSplitBatch getIndexSplitsSync(
PrestoThriftSchemaTableName schemaTableName,
List<String> indexColumnNames,
PrestoThriftPageResult keys,
int maxSplitCount,
PrestoThriftNullableToken nextToken)
throws PrestoThriftServiceException
{
checkArgument(NUMBER_OF_INDEX_SPLITS <= maxSplitCount, "maxSplitCount for lookup splits is too low");
checkArgument(nextToken.getToken() == null, "no continuation is supported for lookup splits");
int totalKeys = keys.getRowCount();
int partSize = totalKeys / NUMBER_OF_INDEX_SPLITS;
List<PrestoThriftSplit> splits = new ArrayList<>(NUMBER_OF_INDEX_SPLITS);
for (int splitIndex = 0; splitIndex < NUMBER_OF_INDEX_SPLITS; splitIndex++) {
int begin = partSize * splitIndex;
int end = partSize * (splitIndex + 1);
if (splitIndex + 1 == NUMBER_OF_INDEX_SPLITS) {
// add remainder to the last split
end = totalKeys;
}
if (begin == end) {
// split is empty, skip it
continue;
}
SplitInfo splitInfo = indexSplit(
schemaTableName.getSchemaName(),
schemaTableName.getTableName(),
indexColumnNames,
thriftPageToList(keys, begin, end));
splits.add(new PrestoThriftSplit(new PrestoThriftId(SPLIT_INFO_CODEC.toJsonBytes(splitInfo)), ImmutableList.of()));
}
return new PrestoThriftSplitBatch(splits, null);
}
@Override
protected ConnectorPageSource createLookupPageSource(SplitInfo splitInfo, List<String> outputColumnNames)
{
IndexedTable indexedTable = indexedData.getIndexedTable(
splitInfo.getTableName(),
schemaNameToScaleFactor(splitInfo.getSchemaName()),
ImmutableSet.copyOf(splitInfo.getLookupColumnNames()))
.orElseThrow(() -> new IllegalArgumentException(String.format("No such index: %s%s", splitInfo.getTableName(), splitInfo.getLookupColumnNames())));
List<Type> lookupColumnTypes = types(splitInfo.getTableName(), splitInfo.getLookupColumnNames());
RecordSet keyRecordSet = new MappedRecordSet(new ListBasedRecordSet(splitInfo.getKeys(), lookupColumnTypes), computeRemap(splitInfo.getLookupColumnNames(), indexedTable.getKeyColumns()));
RecordSet outputRecordSet = lookupIndexKeys(keyRecordSet, indexedTable, outputColumnNames);
return new RecordPageSource(outputRecordSet);
}
/**
* Get lookup result and re-map output columns based on requested order.
*/
private static RecordSet lookupIndexKeys(RecordSet keys, IndexedTable table, List<String> outputColumnNames)
{
RecordSet allColumnsOutputRecordSet = table.lookupKeys(keys);
List<Integer> outputRemap = computeRemap(table.getOutputColumns(), outputColumnNames);
return new MappedRecordSet(allColumnsOutputRecordSet, outputRemap);
}
private static List<List<String>> thriftPageToList(PrestoThriftPageResult page, int begin, int end)
{
checkArgument(begin <= end, "invalid interval");
if (begin == end) {
// empty interval
return ImmutableList.of();
}
List<PrestoThriftBlock> blocks = page.getColumnBlocks();
List<List<String>> result = new ArrayList<>(blocks.size());
for (PrestoThriftBlock block : blocks) {
result.add(blockAsList(block, begin, end));
}
return result;
}
private static List<String> blockAsList(PrestoThriftBlock block, int begin, int end)
{
List<String> result = new ArrayList<>(end - begin);
if (block.getBigintData() != null) {
boolean[] nulls = block.getBigintData().getNulls();
long[] longs = block.getBigintData().getLongs();
for (int index = begin; index < end; index++) {
if (nulls != null && nulls[index]) {
result.add(null);
}
else {
checkArgument(longs != null, "block structure is incorrect");
result.add(String.valueOf(longs[index]));
}
}
}
else if (block.getIntegerData() != null) {
boolean[] nulls = block.getIntegerData().getNulls();
int[] ints = block.getIntegerData().getInts();
for (int index = begin; index < end; index++) {
if (nulls != null && nulls[index]) {
result.add(null);
}
else {
checkArgument(ints != null, "block structure is incorrect");
result.add(String.valueOf(ints[index]));
}
}
}
else if (block.getVarcharData() != null) {
boolean[] nulls = block.getVarcharData().getNulls();
int[] sizes = block.getVarcharData().getSizes();
byte[] bytes = block.getVarcharData().getBytes();
int startOffset = 0;
// calculate cumulative offset before the starting position
if (sizes != null) {
for (int index = 0; index < begin; index++) {
if (nulls == null || !nulls[index]) {
startOffset += sizes[index];
}
}
}
for (int index = begin; index < end; index++) {
if (nulls != null && nulls[index]) {
result.add(null);
}
else {
checkArgument(sizes != null, "block structure is incorrect");
if (sizes[index] == 0) {
result.add("");
}
else {
checkArgument(bytes != null);
result.add(new String(bytes, startOffset, sizes[index]));
startOffset += sizes[index];
}
}
}
}
else {
throw new IllegalArgumentException("Only bigint, integer and varchar blocks are supported");
}
return result;
}
private static List<Integer> computeRemap(List<String> startSchema, List<String> endSchema)
{
ImmutableList.Builder<Integer> builder = ImmutableList.builder();
for (String columnName : endSchema) {
int index = startSchema.indexOf(columnName);
checkArgument(index != -1, "Column name in end that is not in the start: %s", columnName);
builder.add(index);
}
return builder.build();
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.security;
import groovy.lang.Binding;
import hudson.ExtensionPoint;
import hudson.DescriptorExtensionList;
import hudson.Extension;
import hudson.cli.CLICommand;
import hudson.model.AbstractDescribableImpl;
import hudson.model.Descriptor;
import jenkins.model.IdStrategy;
import jenkins.model.Jenkins;
import hudson.security.FederatedLoginService.FederatedIdentity;
import hudson.security.captcha.CaptchaSupport;
import hudson.util.DescriptorList;
import hudson.util.PluginServletFilter;
import hudson.util.spring.BeanBuilder;
import org.acegisecurity.Authentication;
import org.acegisecurity.AuthenticationManager;
import org.acegisecurity.GrantedAuthorityImpl;
import org.acegisecurity.GrantedAuthority;
import org.acegisecurity.context.SecurityContext;
import org.acegisecurity.context.SecurityContextHolder;
import org.acegisecurity.ui.rememberme.RememberMeServices;
import static org.acegisecurity.ui.rememberme.TokenBasedRememberMeServices.ACEGI_SECURITY_HASHED_REMEMBER_ME_COOKIE_KEY;
import org.acegisecurity.userdetails.UserDetailsService;
import org.acegisecurity.userdetails.UserDetails;
import org.acegisecurity.userdetails.UsernameNotFoundException;
import org.apache.commons.lang.StringUtils;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.DoNotUse;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.springframework.context.ApplicationContext;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.dao.DataAccessException;
import javax.servlet.Filter;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpSession;
import javax.servlet.http.Cookie;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
/**
* Pluggable security realm that connects external user database to Hudson.
*
* <p>
* If additional views/URLs need to be exposed,
* an active {@link SecurityRealm} is bound to {@code CONTEXT_ROOT/securityRealm/}
* through {@link jenkins.model.Jenkins#getSecurityRealm()}, so you can define additional pages and
* operations on your {@link SecurityRealm}.
*
* <h2>How do I implement this class?</h2>
* <p>
* For compatibility reasons, there are two somewhat different ways to implement a custom SecurityRealm.
*
* <p>
* One is to override the {@link #createSecurityComponents()} and create key Acegi components
* that control the authentication process.
* The default {@link SecurityRealm#createFilter(FilterConfig)} implementation then assembles them
* into a chain of {@link Filter}s. All the incoming requests to Hudson go through this filter chain,
* and when the filter chain is done, {@link SecurityContext#getAuthentication()} would tell us
* who the current user is.
*
* <p>
* If your {@link SecurityRealm} needs to touch the default {@link Filter} chain configuration
* (e.g., adding new ones), then you can also override {@link #createFilter(FilterConfig)} to do so.
*
* <p>
* This model is expected to fit most {@link SecurityRealm} implementations.
*
*
* <p>
* The other way of doing this is to ignore {@link #createSecurityComponents()} completely (by returning
* {@link SecurityComponents} created by the default constructor) and just concentrate on {@link #createFilter(FilterConfig)}.
* As long as the resulting filter chain properly sets up {@link Authentication} object at the end of the processing,
* Hudson doesn't really need you to fit the standard Acegi models like {@link AuthenticationManager} and
* {@link UserDetailsService}.
*
* <p>
* This model is for those "weird" implementations.
*
*
* <h2>Views</h2>
* <dl>
* <dt>loginLink.jelly</dt>
* <dd>
* This view renders the login link on the top right corner of every page, when the user
* is anonymous. For {@link SecurityRealm}s that support user sign-up, this is a good place
* to show a "sign up" link. See {@link HudsonPrivateSecurityRealm} implementation
* for an example of this.
*
* <dt>config.jelly</dt>
* <dd>
* This view is used to render the configuration page in the system config screen.
* </dl>
*
* @author Kohsuke Kawaguchi
* @since 1.160
* @see PluginServletFilter
*/
public abstract class SecurityRealm extends AbstractDescribableImpl<SecurityRealm> implements ExtensionPoint {
/**
* Captcha Support to be used with this SecurityRealm for User Signup
*/
private CaptchaSupport captchaSupport;
/**
* Creates fully-configured {@link AuthenticationManager} that performs authentication
* against the user realm. The implementation hides how such authentication manager
* is configured.
*
* <p>
* {@link AuthenticationManager} instantiation often depends on the user-specified parameters
* (for example, if the authentication is based on LDAP, the user needs to specify
* the host name of the LDAP server.) Such configuration is expected to be
* presented to the user via {@code config.jelly} and then
* captured as instance variables inside the {@link SecurityRealm} implementation.
*
* <p>
* Your {@link SecurityRealm} may also wants to alter {@link Filter} set up by
* overriding {@link #createFilter(FilterConfig)}.
*/
public abstract SecurityComponents createSecurityComponents();
/**
* Returns the {@link IdStrategy} that should be used for turning
* {@link org.acegisecurity.userdetails.UserDetails#getUsername()} into an ID.
* Mostly this should be {@link IdStrategy.CaseInsensitive} but there may be occasions when either
* {@link IdStrategy.CaseSensitive} or {@link IdStrategy.CaseSensitiveEmailAddress} are the correct approach.
*
* @return the {@link IdStrategy} that should be used for turning
* {@link org.acegisecurity.userdetails.UserDetails#getUsername()} into an ID.
* @since 1.566
*/
public IdStrategy getUserIdStrategy() {
return IdStrategy.CASE_INSENSITIVE;
}
/**
* Returns the {@link IdStrategy} that should be used for turning {@link hudson.security.GroupDetails#getName()}
* into an ID.
* Note: Mostly this should be the same as {@link #getUserIdStrategy()} but some security realms may have legitimate
* reasons for a different strategy.
*
* @return the {@link IdStrategy} that should be used for turning {@link hudson.security.GroupDetails#getName()}
* into an ID.
* @since 1.566
*/
public IdStrategy getGroupIdStrategy() {
return getUserIdStrategy();
}
/**
* Creates a {@link CliAuthenticator} object that authenticates an invocation of a CLI command.
* See {@link CliAuthenticator} for more details.
*
* @param command
* The command about to be executed.
* @return
* never null. By default, this method returns a no-op authenticator that always authenticates
* the session as authenticated by the transport (which is often just {@link jenkins.model.Jenkins#ANONYMOUS}.)
* @deprecated See {@link CliAuthenticator}.
*/
@Deprecated
public CliAuthenticator createCliAuthenticator(final CLICommand command) {
return new CliAuthenticator() {
public Authentication authenticate() {
return command.getTransportAuthentication();
}
};
}
/**
* {@inheritDoc}
*
* <p>
* {@link SecurityRealm} is a singleton resource in Hudson, and therefore
* it's always configured through {@code config.jelly} and never with
* {@code global.jelly}.
*/
@Override
public Descriptor<SecurityRealm> getDescriptor() {
return super.getDescriptor();
}
/**
* Returns the URL to submit a form for the authentication.
* There's no need to override this, except for {@link LegacySecurityRealm}.
*/
public String getAuthenticationGatewayUrl() {
return "j_acegi_security_check";
}
/**
* Gets the target URL of the "login" link.
* There's no need to override this, except for {@link LegacySecurityRealm}.
* On legacy implementation this should point to {@code loginEntry}, which
* is protected by {@code web.xml}, so that the user can be eventually authenticated
* by the container.
*
* <p>
* Path is relative from the context root of the Hudson application.
* The URL returned by this method will get the "from" query parameter indicating
* the page that the user was at.
*/
public String getLoginUrl() {
return "login";
}
/**
* Returns true if this {@link SecurityRealm} supports explicit logout operation.
*
* <p>
* If the method returns false, "logout" link will not be displayed. This is useful
* when authentication doesn't require an explicit login activity (such as NTLM authentication
* or Kerberos authentication, where Hudson has no ability to log off the current user.)
*
* <p>
* By default, this method returns true.
*
* @since 1.307
*/
public boolean canLogOut() {
return true;
}
/**
* Controls where the user is sent to after a logout. By default, it's the top page
* of Hudson, but you can return arbitrary URL.
*
* @param req
* {@link StaplerRequest} that represents the current request. Primarily so that
* you can get the context path. By the time this method is called, the session
* is already invalidated. Never null.
* @param auth
* The {@link Authentication} object that represents the user that was logging in.
* This parameter allows you to redirect people to different pages depending on who they are.
* @return
* never null.
* @since 1.314
* @see #doLogout(StaplerRequest, StaplerResponse)
*/
protected String getPostLogOutUrl(StaplerRequest req, Authentication auth) {
return req.getContextPath()+"/";
}
public CaptchaSupport getCaptchaSupport() {
return captchaSupport;
}
public void setCaptchaSupport(CaptchaSupport captchaSupport) {
this.captchaSupport = captchaSupport;
}
public List<Descriptor<CaptchaSupport>> getCaptchaSupportDescriptors() {
return CaptchaSupport.all();
}
/**
* Handles the logout processing.
*
* <p>
* The default implementation erases the session and do a few other clean up, then
* redirect the user to the URL specified by {@link #getPostLogOutUrl(StaplerRequest, Authentication)}.
*
* @since 1.314
*/
public void doLogout(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
HttpSession session = req.getSession(false);
if(session!=null)
session.invalidate();
Authentication auth = SecurityContextHolder.getContext().getAuthentication();
SecurityContextHolder.clearContext();
// reset remember-me cookie
Cookie cookie = new Cookie(ACEGI_SECURITY_HASHED_REMEMBER_ME_COOKIE_KEY,"");
cookie.setMaxAge(0);
cookie.setSecure(req.isSecure());
cookie.setHttpOnly(true);
cookie.setPath(req.getContextPath().length()>0 ? req.getContextPath() : "/");
rsp.addCookie(cookie);
rsp.sendRedirect2(getPostLogOutUrl(req,auth));
}
/**
* Returns true if this {@link SecurityRealm} allows online sign-up.
* This creates a hyperlink that redirects users to {@code CONTEXT_ROOT/signUp},
* which will be served by the {@code signup.jelly} view of this class.
*
* <p>
* If the implementation needs to redirect the user to a different URL
* for signing up, use the following jelly script as {@code signup.jelly}
*
* <pre>{@code <xmp>
* <st:redirect url="http://www.sun.com/" xmlns:st="jelly:stapler"/>
* </xmp>}</pre>
*/
public boolean allowsSignup() {
Class clz = getClass();
return clz.getClassLoader().getResource(clz.getName().replace('.','/')+"/signup.jelly")!=null;
}
/**
* Shortcut for {@link UserDetailsService#loadUserByUsername(String)}.
*
* @throws UserMayOrMayNotExistException
* If the security realm cannot even tell if the user exists or not.
* @return
* never null.
*/
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException, DataAccessException {
return getSecurityComponents().userDetails.loadUserByUsername(username);
}
/**
* If this {@link SecurityRealm} supports a look up of {@link GroupDetails} by their names, override this method
* to provide the look up.
*
* <p>
* This information, when available, can be used by {@link AuthorizationStrategy}s to improve the UI and
* error diagnostics for the user.
*/
public GroupDetails loadGroupByGroupname(String groupname) throws UsernameNotFoundException, DataAccessException {
throw new UserMayOrMayNotExistException(groupname);
}
/**
* If this {@link SecurityRealm} supports a look up of {@link GroupDetails} by their names, override this method
* to provide the look up.
* <p>
* This information, when available, can be used by {@link AuthorizationStrategy}s to improve the UI and
* error diagnostics for the user.
*
* @param groupname the name of the group to fetch
* @param fetchMembers if {@code true} then try and fetch the members of the group if it exists. Trying does not
* imply that the members will be fetched and {@link hudson.security.GroupDetails#getMembers()}
* may still return {@code null}
* @throws UserMayOrMayNotExistException if no conclusive result could be determined regarding the group existence.
* @throws UsernameNotFoundException if the group does not exist.
* @throws DataAccessException if the backing security realm could not be connected to.
* @since 1.549
*/
public GroupDetails loadGroupByGroupname(String groupname, boolean fetchMembers)
throws UsernameNotFoundException, DataAccessException {
return loadGroupByGroupname(groupname);
}
/**
* Starts the user registration process for a new user that has the given verified identity.
*
* <p>
* If the user logs in through a {@link FederatedLoginService}, verified that the current user
* owns an {@linkplain FederatedIdentity identity}, but no existing user account has claimed that identity,
* then this method is invoked.
*
* <p>
* The expected behaviour is to confirm that the user would like to create a new account, and
* associate this federated identity to the newly created account (via {@link FederatedIdentity#addToCurrentUser()}.
*
* @throws UnsupportedOperationException
* If this implementation doesn't support the signup through this mechanism.
* This is the default implementation.
*
* @since 1.394
*/
public HttpResponse commenceSignup(FederatedIdentity identity) {
throw new UnsupportedOperationException();
}
/**
* Generates a captcha image.
*/
public final void doCaptcha(StaplerRequest req, StaplerResponse rsp) throws IOException {
if (captchaSupport != null) {
String id = req.getSession().getId();
rsp.setContentType("image/png");
// source: https://stackoverflow.com/a/3414217
rsp.setHeader("Cache-Control", "no-cache, no-store, must-revalidate");
rsp.setHeader("Pragma", "no-cache");
rsp.setHeader("Expires", "0");
captchaSupport.generateImage(id, rsp.getOutputStream());
}
}
/**
* Validates the captcha.
*/
protected final boolean validateCaptcha(String text) {
if (captchaSupport != null) {
String id = Stapler.getCurrentRequest().getSession().getId();
return captchaSupport.validateCaptcha(id, text);
}
// If no Captcha Support then bogus validation always returns true
return true;
}
/**
* Picks up the instance of the given type from the spring context.
* If there are multiple beans of the same type or if there are none,
* this method treats that as an {@link IllegalArgumentException}.
*
* This method is intended to be used to pick up a Acegi object from
* spring once the bean definition file is parsed.
*/
public static <T> T findBean(Class<T> type, ApplicationContext context) {
Map m = context.getBeansOfType(type);
switch(m.size()) {
case 0:
throw new IllegalArgumentException("No beans of "+type+" are defined");
case 1:
return type.cast(m.values().iterator().next());
default:
throw new IllegalArgumentException("Multiple beans of "+type+" are defined: "+m);
}
}
/**
* Holder for the SecurityComponents.
*/
private transient SecurityComponents securityComponents;
/**
* Use this function to get the security components, without necessarily
* recreating them.
*/
public synchronized SecurityComponents getSecurityComponents() {
if (this.securityComponents == null) {
this.securityComponents = this.createSecurityComponents();
}
return this.securityComponents;
}
/**
* Creates {@link Filter} that all the incoming HTTP requests will go through
* for authentication.
*
* <p>
* The default implementation uses {@link #getSecurityComponents()} and builds
* a standard filter chain from /WEB-INF/security/SecurityFilters.groovy.
* But subclasses can override this to completely change the filter sequence.
*
* <p>
* For other plugins that want to contribute {@link Filter}, see
* {@link PluginServletFilter}.
*
* @since 1.271
*/
public Filter createFilter(FilterConfig filterConfig) {
LOGGER.entering(SecurityRealm.class.getName(), "createFilter");
Binding binding = new Binding();
SecurityComponents sc = getSecurityComponents();
binding.setVariable("securityComponents", sc);
binding.setVariable("securityRealm",this);
BeanBuilder builder = new BeanBuilder();
builder.parse(filterConfig.getServletContext().getResourceAsStream("/WEB-INF/security/SecurityFilters.groovy"),binding);
WebApplicationContext context = builder.createApplicationContext();
return (Filter) context.getBean("filter");
}
/**
* Singleton constant that represents "no authentication."
*/
public static final SecurityRealm NO_AUTHENTICATION = new None();
/**
* Perform a calculation where we should go back after successful login
*
* @return Encoded URI where we should go back after successful login
* or "/" if no way back or an issue occurred
*
* @since 2.4
*/
@Restricted(DoNotUse.class)
public static String getFrom() {
String from = null, returnValue = null;
final StaplerRequest request = Stapler.getCurrentRequest();
// Try to obtain a return point either from the Session
// or from the QueryParameter in this order
if (request != null
&& request.getSession(false) != null) {
from = (String) request.getSession().getAttribute("from");
} else if (request != null) {
from = request.getParameter("from");
}
// If entry point was not found, try to deduce it from the request URI
// except pages related to login process
if (from == null
&& request != null
&& request.getRequestURI() != null
&& !request.getRequestURI().equals("/loginError")
&& !request.getRequestURI().equals("/login")) {
from = request.getRequestURI();
}
// If deduced entry point isn't deduced yet or the content is a blank value
// use the root web point "/" as a fallback
from = StringUtils.defaultIfBlank(from, "/").trim();
// Encode the return value
try {
returnValue = java.net.URLEncoder.encode(from, "UTF-8");
} catch (UnsupportedEncodingException e) { }
// Return encoded value or at least "/" in the case exception occurred during encode()
// or if the encoded content is blank value
return StringUtils.isBlank(returnValue) ? "/" : returnValue;
}
private static class None extends SecurityRealm {
public SecurityComponents createSecurityComponents() {
return new SecurityComponents(new AuthenticationManager() {
public Authentication authenticate(Authentication authentication) {
return authentication;
}
}, new UserDetailsService() {
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException, DataAccessException {
throw new UsernameNotFoundException(username);
}
});
}
/**
* This special instance is not configurable explicitly,
* so it doesn't have a descriptor.
*/
@Override
public Descriptor<SecurityRealm> getDescriptor() {
return null;
}
/**
* There's no group.
*/
@Override
public GroupDetails loadGroupByGroupname(String groupname) throws UsernameNotFoundException, DataAccessException {
throw new UsernameNotFoundException(groupname);
}
/**
* We don't need any filter for this {@link SecurityRealm}.
*/
@Override
public Filter createFilter(FilterConfig filterConfig) {
return new ChainedServletFilter();
}
/**
* Maintain singleton semantics.
*/
private Object readResolve() {
return NO_AUTHENTICATION;
}
}
/**
* Just a tuple so that we can create various inter-related security related objects and
* return them all at once.
*
* <p>
* None of the fields are ever null.
*
* @see SecurityRealm#createSecurityComponents()
*/
public static final class SecurityComponents {
public final AuthenticationManager manager;
public final UserDetailsService userDetails;
public final RememberMeServices rememberMe;
public SecurityComponents() {
// we use AuthenticationManagerProxy here just as an implementation that fails all the time,
// not as a proxy. No one is supposed to use this as a proxy.
this(new AuthenticationManagerProxy());
}
public SecurityComponents(AuthenticationManager manager) {
// we use UserDetailsServiceProxy here just as an implementation that fails all the time,
// not as a proxy. No one is supposed to use this as a proxy.
this(manager,new UserDetailsServiceProxy());
}
public SecurityComponents(AuthenticationManager manager, UserDetailsService userDetails) {
this(manager,userDetails,createRememberMeService(userDetails));
}
public SecurityComponents(AuthenticationManager manager, UserDetailsService userDetails, RememberMeServices rememberMe) {
assert manager!=null && userDetails!=null && rememberMe!=null;
this.manager = manager;
this.userDetails = userDetails;
this.rememberMe = rememberMe;
}
@SuppressWarnings("deprecation")
private static RememberMeServices createRememberMeService(UserDetailsService uds) {
// create our default TokenBasedRememberMeServices, which depends on the availability of the secret key
TokenBasedRememberMeServices2 rms = new TokenBasedRememberMeServices2();
rms.setUserDetailsService(uds);
/*
TokenBasedRememberMeServices needs to be used in conjunction with RememberMeAuthenticationProvider,
and both needs to use the same key (this is a reflection of a poor design in AcgeiSecurity, if you ask me)
and various security plugins have its own groovy script that configures them.
So if we change this, it creates a painful situation for those plugins by forcing them to choose
to work with earlier version of Jenkins or newer version of Jenkins, and not both.
So we keep this here.
*/
rms.setKey(Jenkins.getInstance().getSecretKey());
rms.setParameter("remember_me"); // this is the form field name in login.jelly
return rms;
}
}
/**
* All registered {@link SecurityRealm} implementations.
*
* @deprecated as of 1.286
* Use {@link #all()} for read access, and use {@link Extension} for registration.
*/
@Deprecated
public static final DescriptorList<SecurityRealm> LIST = new DescriptorList<SecurityRealm>(SecurityRealm.class);
/**
* Returns all the registered {@link SecurityRealm} descriptors.
*/
public static DescriptorExtensionList<SecurityRealm,Descriptor<SecurityRealm>> all() {
return Jenkins.getInstance().<SecurityRealm,Descriptor<SecurityRealm>>getDescriptorList(SecurityRealm.class);
}
private static final Logger LOGGER = Logger.getLogger(SecurityRealm.class.getName());
/**
* {@link GrantedAuthority} that represents the built-in "authenticated" role, which is granted to
* anyone non-anonymous.
*/
public static final GrantedAuthority AUTHENTICATED_AUTHORITY = new GrantedAuthorityImpl("authenticated");
}
| |
/*
*
* * Licensed to the Apache Software Foundation (ASF) under one or more
* * contributor license agreements. See the NOTICE file distributed with
* * this work for additional information regarding copyright ownership.
* * The ASF licenses this file to You under the Apache License, Version 2.0
* * (the "License"); you may not use this file except in compliance with
* * the License. You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.apache.ignite.internal.commandline;
import java.util.Arrays;
import junit.framework.TestCase;
import org.apache.ignite.internal.visor.tx.VisorTxProjection;
import org.apache.ignite.internal.visor.tx.VisorTxSortOrder;
import org.apache.ignite.internal.visor.tx.VisorTxTaskArg;
import static java.util.Arrays.asList;
import static org.apache.ignite.internal.commandline.CommandHandler.DFLT_HOST;
import static org.apache.ignite.internal.commandline.CommandHandler.DFLT_PORT;
/**
* Tests Command Handler parsing arguments.
*/
public class CommandHandlerParsingTest extends TestCase {
/**
* Test parsing and validation for user and password arguments.
*/
public void testParseAndValidateUserAndPassword() {
CommandHandler hnd = new CommandHandler();
for (Command cmd : Command.values()) {
try {
hnd.parseAndValidate(asList("--user"));
fail("expected exception: Expected user name");
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
try {
hnd.parseAndValidate(asList("--password"));
fail("expected exception: Expected password");
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
try {
hnd.parseAndValidate(asList("--user", "testUser", cmd.text()));
fail("expected exception: Both user and password should be specified");
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
try {
hnd.parseAndValidate(asList("--password", "testPass", cmd.text()));
fail("expected exception: Both user and password should be specified");
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
Arguments args = hnd.parseAndValidate(asList("--user", "testUser", "--password", "testPass", cmd.text()));
assertEquals("testUser", args.user());
assertEquals("testPass", args.password());
assertEquals(cmd, args.command());
}
}
/**
* Tests connection settings arguments.
*/
public void testConnectionSettings() {
CommandHandler hnd = new CommandHandler();
for (Command cmd : Command.values()) {
Arguments args = hnd.parseAndValidate(asList(cmd.text()));
assertEquals(cmd, args.command());
assertEquals(DFLT_HOST, args.host());
assertEquals(DFLT_PORT, args.port());
args = hnd.parseAndValidate(asList("--port", "12345", "--host", "test-host", "--ping-interval", "5000",
"--ping-timeout", "40000", cmd.text()));
assertEquals(cmd, args.command());
assertEquals("test-host", args.host());
assertEquals("12345", args.port());
assertEquals(5000, args.pingInterval());
assertEquals(40000, args.pingTimeout());
try {
hnd.parseAndValidate(asList("--port", "wrong-port", cmd.text()));
fail("expected exception: Invalid value for port:");
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
try {
hnd.parseAndValidate(asList("--ping-interval", "-10", cmd.text()));
fail("expected exception: Ping interval must be specified");
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
try {
hnd.parseAndValidate(asList("--ping-timeout", "-20", cmd.text()));
fail("expected exception: Ping timeout must be specified");
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
}
}
/**
* test parsing dump transaction arguments
*/
@SuppressWarnings("Null")
public void testTransactionArguments() {
CommandHandler hnd = new CommandHandler();
Arguments args;
args = hnd.parseAndValidate(asList("--tx"));
try {
hnd.parseAndValidate(asList("--tx", "minDuration"));
fail("Expected exception");
}
catch (IllegalArgumentException ignored) {
}
try {
hnd.parseAndValidate(asList("--tx", "minDuration", "-1"));
fail("Expected exception");
}
catch (IllegalArgumentException ignored) {
}
try {
hnd.parseAndValidate(asList("--tx", "minSize"));
fail("Expected exception");
}
catch (IllegalArgumentException ignored) {
}
try {
hnd.parseAndValidate(asList("--tx", "minSize", "-1"));
fail("Expected exception");
}
catch (IllegalArgumentException ignored) {
}
try {
hnd.parseAndValidate(asList("--tx", "label"));
fail("Expected exception");
}
catch (IllegalArgumentException ignored) {
}
try {
hnd.parseAndValidate(asList("--tx", "label", "tx123["));
fail("Expected exception");
}
catch (IllegalArgumentException ignored) {
}
try {
hnd.parseAndValidate(asList("--tx", "servers", "nodes", "1,2,3"));
fail("Expected exception");
}
catch (IllegalArgumentException ignored) {
}
args = hnd.parseAndValidate(asList("--tx", "minDuration", "120", "minSize", "10", "limit", "100", "order", "SIZE",
"servers"));
VisorTxTaskArg arg = args.transactionArguments();
assertEquals(Long.valueOf(120 * 1000L), arg.getMinDuration());
assertEquals(Integer.valueOf(10), arg.getMinSize());
assertEquals(Integer.valueOf(100), arg.getLimit());
assertEquals(VisorTxSortOrder.SIZE, arg.getSortOrder());
assertEquals(VisorTxProjection.SERVER, arg.getProjection());
args = hnd.parseAndValidate(asList("--tx", "minDuration", "130", "minSize", "1", "limit", "60", "order", "DURATION",
"clients"));
arg = args.transactionArguments();
assertEquals(Long.valueOf(130 * 1000L), arg.getMinDuration());
assertEquals(Integer.valueOf(1), arg.getMinSize());
assertEquals(Integer.valueOf(60), arg.getLimit());
assertEquals(VisorTxSortOrder.DURATION, arg.getSortOrder());
assertEquals(VisorTxProjection.CLIENT, arg.getProjection());
args = hnd.parseAndValidate(asList("--tx", "nodes", "1,2,3"));
arg = args.transactionArguments();
assertNull(arg.getProjection());
assertEquals(Arrays.asList("1", "2", "3"), arg.getConsistentIds());
}
}
| |
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.book;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import br.com.intelligence.R;
import android.app.Activity;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.webkit.CookieManager;
import android.webkit.CookieSyncManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import com.google.zxing.client.android.HttpHelper;
import com.google.zxing.client.android.Intents;
import com.google.zxing.client.android.LocaleManager;
/**
* Uses Google Book Search to find a word or phrase in the requested book.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
public final class SearchBookContentsActivity extends Activity {
private static final String TAG = SearchBookContentsActivity.class
.getSimpleName();
private static final Pattern TAG_PATTERN = Pattern.compile("\\<.*?\\>");
private static final Pattern LT_ENTITY_PATTERN = Pattern.compile("<");
private static final Pattern GT_ENTITY_PATTERN = Pattern.compile(">");
private static final Pattern QUOTE_ENTITY_PATTERN = Pattern
.compile("'");
private static final Pattern QUOT_ENTITY_PATTERN = Pattern
.compile(""");
private String isbn;
private EditText queryTextView;
private View queryButton;
private ListView resultListView;
private TextView headerView;
private AsyncTask<String, ?, ?> networkTask;
private final Button.OnClickListener buttonListener = new Button.OnClickListener() {
@Override
public void onClick(View view) {
launchSearch();
}
};
private final View.OnKeyListener keyListener = new View.OnKeyListener() {
@Override
public boolean onKey(View view, int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_ENTER
&& event.getAction() == KeyEvent.ACTION_DOWN) {
launchSearch();
return true;
}
return false;
}
};
String getISBN() {
return isbn;
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
// Make sure that expired cookies are removed on launch.
CookieSyncManager.createInstance(this);
CookieManager.getInstance().removeExpiredCookie();
Intent intent = getIntent();
if (intent == null
|| !intent.getAction()
.equals(Intents.SearchBookContents.ACTION)) {
finish();
return;
}
isbn = intent.getStringExtra(Intents.SearchBookContents.ISBN);
if (LocaleManager.isBookSearchUrl(isbn)) {
setTitle(getString(R.string.sbc_name));
} else {
setTitle(getString(R.string.sbc_name) + ": ISBN " + isbn);
}
setContentView(R.layout.search_book_contents);
queryTextView = (EditText) findViewById(R.id.query_text_view);
String initialQuery = intent
.getStringExtra(Intents.SearchBookContents.QUERY);
if (initialQuery != null && !initialQuery.isEmpty()) {
// Populate the search box but don't trigger the search
queryTextView.setText(initialQuery);
}
queryTextView.setOnKeyListener(keyListener);
queryButton = findViewById(R.id.query_button);
queryButton.setOnClickListener(buttonListener);
resultListView = (ListView) findViewById(R.id.result_list_view);
LayoutInflater factory = LayoutInflater.from(this);
headerView = (TextView) factory.inflate(
R.layout.search_book_contents_header, resultListView, false);
resultListView.addHeaderView(headerView);
}
@Override
protected void onResume() {
super.onResume();
queryTextView.selectAll();
}
@Override
protected void onPause() {
AsyncTask<?, ?, ?> oldTask = networkTask;
if (oldTask != null) {
oldTask.cancel(true);
networkTask = null;
}
super.onPause();
}
private void launchSearch() {
String query = queryTextView.getText().toString();
if (query != null && !query.isEmpty()) {
AsyncTask<?, ?, ?> oldTask = networkTask;
if (oldTask != null) {
oldTask.cancel(true);
}
networkTask = new NetworkTask();
networkTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR,
query, isbn);
headerView.setText(R.string.msg_sbc_searching_book);
resultListView.setAdapter(null);
queryTextView.setEnabled(false);
queryButton.setEnabled(false);
}
}
private final class NetworkTask extends
AsyncTask<String, Object, JSONObject> {
@Override
protected JSONObject doInBackground(String... args) {
try {
// These return a JSON result which describes if and where the
// query was found. This API may
// break or disappear at any time in the future. Since this is
// an API call rather than a
// website, we don't use LocaleManager to change the TLD.
String theQuery = args[0];
String theIsbn = args[1];
String uri;
if (LocaleManager.isBookSearchUrl(theIsbn)) {
int equals = theIsbn.indexOf('=');
String volumeId = theIsbn.substring(equals + 1);
uri = "http://www.google.com/books?id=" + volumeId
+ "&jscmd=SearchWithinVolume2&q=" + theQuery;
} else {
uri = "http://www.google.com/books?vid=isbn" + theIsbn
+ "&jscmd=SearchWithinVolume2&q=" + theQuery;
}
CharSequence content = HttpHelper.downloadViaHttp(uri,
HttpHelper.ContentType.JSON);
return new JSONObject(content.toString());
} catch (IOException ioe) {
Log.w(TAG, "Error accessing book search", ioe);
return null;
} catch (JSONException je) {
Log.w(TAG, "Error accessing book search", je);
return null;
}
}
@Override
protected void onPostExecute(JSONObject result) {
if (result == null) {
headerView.setText(R.string.msg_sbc_failed);
} else {
handleSearchResults(result);
}
queryTextView.setEnabled(true);
queryTextView.selectAll();
queryButton.setEnabled(true);
}
// Currently there is no way to distinguish between a query which had no
// results and a book
// which is not searchable - both return zero results.
private void handleSearchResults(JSONObject json) {
try {
int count = json.getInt("number_of_results");
headerView.setText(getString(R.string.msg_sbc_results) + " : "
+ count);
if (count > 0) {
JSONArray results = json.getJSONArray("search_results");
SearchBookContentsResult.setQuery(queryTextView.getText()
.toString());
List<SearchBookContentsResult> items = new ArrayList<SearchBookContentsResult>(
count);
for (int x = 0; x < count; x++) {
items.add(parseResult(results.getJSONObject(x)));
}
resultListView
.setOnItemClickListener(new BrowseBookListener(
SearchBookContentsActivity.this, items));
resultListView.setAdapter(new SearchBookContentsAdapter(
SearchBookContentsActivity.this, items));
} else {
String searchable = json.optString("searchable");
if ("false".equals(searchable)) {
headerView
.setText(R.string.msg_sbc_book_not_searchable);
}
resultListView.setAdapter(null);
}
} catch (JSONException e) {
Log.w(TAG, "Bad JSON from book search", e);
resultListView.setAdapter(null);
headerView.setText(R.string.msg_sbc_failed);
}
}
// Available fields: page_id, page_number, snippet_text
private SearchBookContentsResult parseResult(JSONObject json) {
String pageId;
String pageNumber;
String snippet;
try {
pageId = json.getString("page_id");
pageNumber = json.optString("page_number");
snippet = json.optString("snippet_text");
} catch (JSONException e) {
Log.w(TAG, e);
// Never seen in the wild, just being complete.
return new SearchBookContentsResult(
getString(R.string.msg_sbc_no_page_returned), "", "",
false);
}
if (pageNumber == null || pageNumber.isEmpty()) {
// This can happen for text on the jacket, and possibly other
// reasons.
pageNumber = "";
} else {
pageNumber = getString(R.string.msg_sbc_page) + ' '
+ pageNumber;
}
boolean valid = snippet != null && !snippet.isEmpty();
if (valid) {
// Remove all HTML tags and encoded characters.
snippet = TAG_PATTERN.matcher(snippet).replaceAll("");
snippet = LT_ENTITY_PATTERN.matcher(snippet).replaceAll("<");
snippet = GT_ENTITY_PATTERN.matcher(snippet).replaceAll(">");
snippet = QUOTE_ENTITY_PATTERN.matcher(snippet).replaceAll("'");
snippet = QUOT_ENTITY_PATTERN.matcher(snippet).replaceAll("\"");
} else {
snippet = '(' + getString(R.string.msg_sbc_snippet_unavailable) + ')';
}
return new SearchBookContentsResult(pageId, pageNumber, snippet,
valid);
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.event.BuckEventListener;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.BuildRuleSuccessType;
import com.facebook.buck.testutil.integration.BuckBuildLog;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.io.Files;
import com.martiansoftware.nailgun.NGContext;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Map;
@RunWith(Parameterized.class)
public class CxxPreprocessAndCompileIntegrationTest {
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> data() {
return ImmutableList.of(
new Object[] {CxxPreprocessMode.COMBINED},
new Object[] {CxxPreprocessMode.SEPARATE},
new Object[] {CxxPreprocessMode.PIPED});
}
@Parameterized.Parameter
public CxxPreprocessMode mode;
@Rule
public DebuggableTemporaryFolder tmp = new DebuggableTemporaryFolder();
private ProjectWorkspace workspace;
@Before
public void setUp() throws IOException {
workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "step_test", tmp);
workspace.setUp();
workspace.writeContentsToPath(
"[cxx]\n" +
" preprocess_mode = " + mode.toString().toLowerCase() + "\n" +
" asflags = -g\n" +
" cppflags = -g\n" +
" cflags = -g\n" +
" cxxppflags = -g\n" +
" cxxflags = -g\n" +
"[build]\n" +
" depfiles = disabled\n",
".buckconfig");
}
@Test
public void sanitizeWorkingDirectory() throws IOException {
BuildTarget target = BuildTargetFactory.newInstance("//:simple#default,static");
workspace.runBuckBuild(target.getFullyQualifiedName()).assertSuccess();
Path lib = workspace.getPath(BuildTargets.getGenPath(target, "%s/libsimple.a"));
String contents =
Files.asByteSource(lib.toFile())
.asCharSource(Charsets.ISO_8859_1)
.read();
assertFalse(lib.toString(), contents.contains(tmp.getRootPath().toString()));
}
@Test
public void sanitizeWorkingDirectoryWhenBuildingAssembly() throws IOException {
BuildTarget target = BuildTargetFactory.newInstance("//:simple_assembly#default,static");
workspace.runBuckBuild(target.getFullyQualifiedName()).assertSuccess();
Path lib = workspace.getPath(BuildTargets.getGenPath(target, "%s/libsimple_assembly.a"));
String contents =
Files.asByteSource(lib.toFile())
.asCharSource(Charsets.ISO_8859_1)
.read();
assertFalse(lib.toString(), contents.contains(tmp.getRootPath().toString()));
}
@Test
public void sanitizeSymlinkedWorkingDirectory() throws IOException {
TemporaryFolder folder = new TemporaryFolder();
folder.create();
// Setup up a symlink to our working directory.
Path symlinkedRoot = folder.getRoot().toPath().resolve("symlinked-root");
java.nio.file.Files.createSymbolicLink(symlinkedRoot, tmp.getRootPath());
// Run the build, setting PWD to the above symlink. Typically, this causes compilers to use
// the symlinked directory, even though it's not the right project root.
Map<String, String> envCopy = Maps.newHashMap(System.getenv());
envCopy.put("PWD", symlinkedRoot.toString());
BuildTarget target = BuildTargetFactory.newInstance("//:simple#default,static");
workspace
.runBuckCommandWithEnvironmentAndContext(
tmp.getRootPath(),
Optional.<NGContext>absent(),
Optional.<BuckEventListener>absent(),
Optional.of(ImmutableMap.copyOf(envCopy)),
"build",
target.getFullyQualifiedName())
.assertSuccess();
// Verify that we still sanitized this path correctly.
Path lib = workspace.getPath(BuildTargets.getGenPath(target, "%s/libsimple.a"));
String contents =
Files.asByteSource(lib.toFile())
.asCharSource(Charsets.ISO_8859_1)
.read();
assertFalse(lib.toString(), contents.contains(tmp.getRootPath().toString()));
assertFalse(lib.toString(), contents.contains(symlinkedRoot.toString()));
folder.delete();
}
@Test
public void inputBasedRuleKeyAvoidsRerunningIfGeneratedSourceDoesNotChange() throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance(
workspace.getDestPath(),
"//:binary_using_generated_source");
String unusedGenruleInput = "unused.dat";
BuildTarget genrule = BuildTargetFactory.newInstance("//:gensource");
String sourceName = "bar.cpp";
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(),
target,
cxxPlatform);
BuildTarget preprocessTarget =
cxxSourceRuleFactory.createPreprocessBuildTarget(sourceName, AbstractCxxSource.Type.CXX);
BuildTarget compileTarget = cxxSourceRuleFactory.createCompileBuildTarget(sourceName);
// Run the build and verify that the C++ source was (preprocessed and) compiled.
workspace.runBuckBuild(target.toString()).assertSuccess();
if (mode == CxxPreprocessMode.SEPARATE) {
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
}
assertThat(
workspace.getBuildLog().getLogEntry(compileTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Now modify the unused genrule input.
workspace.writeContentsToPath(
"SOMETHING ELSE",
unusedGenruleInput);
// Run the build again and verify that got a matching input-based rule key, and therefore
// didn't recompile.
workspace.runBuckBuild(target.toString()).assertSuccess();
// Verify that the genrule actually re-ran.
assertThat(
workspace.getBuildLog().getLogEntry(genrule).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Verify that the (preprocess and) compile rules aren't re-run.
if (mode == CxxPreprocessMode.SEPARATE) {
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.MATCHING_INPUT_BASED_RULE_KEY)));
}
assertThat(
workspace.getBuildLog().getLogEntry(compileTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.MATCHING_INPUT_BASED_RULE_KEY)));
}
@Test
public void inputBasedRuleKeyAvoidsRerunningIfGeneratedHeaderDoesNotChange() throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_using_generated_header");
String unusedGenruleInput = "unused.dat";
BuildTarget genrule = BuildTargetFactory.newInstance("//:genheader");
String sourceName = "foo.cpp";
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(),
target,
cxxPlatform);
BuildTarget preprocessTarget =
cxxSourceRuleFactory.createPreprocessBuildTarget(sourceName, AbstractCxxSource.Type.CXX);
BuildTarget compileTarget =
cxxSourceRuleFactory.createCompileBuildTarget(sourceName);
// Run the build and verify that the C++ source was (preprocessed and) compiled.
workspace.runBuckBuild(target.toString()).assertSuccess();
if (mode == CxxPreprocessMode.SEPARATE) {
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
}
assertThat(
workspace.getBuildLog().getLogEntry(compileTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Now modify the unused genrule input.
workspace.writeContentsToPath(
"SOMETHING ELSE",
unusedGenruleInput);
// Run the build again and verify that got a matching input-based rule key, and therefore
// didn't recompile.
workspace.runBuckBuild(target.toString()).assertSuccess();
// Verify that the genrule actually re-ran.
assertThat(
workspace.getBuildLog().getLogEntry(genrule).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Verify that the (preprocess and) compile rules aren't re-run.
if (mode == CxxPreprocessMode.SEPARATE) {
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.MATCHING_INPUT_BASED_RULE_KEY)));
}
assertThat(
workspace.getBuildLog().getLogEntry(compileTarget).getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.MATCHING_INPUT_BASED_RULE_KEY)));
}
@Test
public void inputBasedRuleKeyAvoidsRecompilingAfterChangeToUnusedHeader() throws Exception {
// This test is only meant to check the separate flow, as we want to avoid recompiling if only
// unused headers have changed.
assumeTrue(
"only tests \"separate\" preprocess mode",
mode == CxxPreprocessMode.SEPARATE);
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_with_unused_header");
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(),
target,
cxxPlatform);
String unusedHeaderName = "unused_header.h";
String sourceName = "source.cpp";
BuildTarget compileTarget = cxxSourceRuleFactory.createCompileBuildTarget(sourceName);
// Run the build and verify that the C++ source was compiled.
workspace.runBuckBuild(target.toString());
BuckBuildLog.BuildLogEntry firstRunEntry = workspace.getBuildLog().getLogEntry(compileTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Now modify the unused header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
unusedHeaderName);
// Run the build again and verify that got a matching input-based rule key, and therefore
// didn't recompile.
workspace.runBuckBuild(target.toString());
BuckBuildLog.BuildLogEntry secondRunEntry = workspace.getBuildLog().getLogEntry(compileTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.MATCHING_INPUT_BASED_RULE_KEY)));
// Also, make sure the original rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void depfileBasedRuleKeyRebuildsAfterChangeToUsedHeader() throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_with_used_full_header");
String usedHeaderName = "source_full_header.h";
String sourceName = "source_full_header.cpp";
BuildTarget preprocessTarget =
getPreprocessTarget(
cxxPlatform,
target,
sourceName,
AbstractCxxSource.Type.CXX);
// Run the build and verify that the C++ source was preprocessed.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry firstRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Modify the used header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
usedHeaderName);
// Run the build again and verify that we recompiled as the header caused the depfile rule key
// to change.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry secondRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Also, make sure all three rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void depfileBasedRuleKeyRebuildsAfterChangeToUsedHeaderUsingFileRelativeInclusion()
throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_with_used_relative_header");
String usedHeaderName = "source_relative_header.h";
String sourceName = "source_relative_header.cpp";
BuildTarget preprocessTarget =
getPreprocessTarget(
cxxPlatform,
target,
sourceName,
AbstractCxxSource.Type.CXX);
// Run the build and verify that the C++ source was preprocessed.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry firstRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Modify the used header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
usedHeaderName);
// Run the build again and verify that we recompiled as the header caused the depfile rule key
// to change.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry secondRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Also, make sure all three rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void depfileBasedRuleKeyRebuildsAfterChangeToUsedParentHeaderUsingFileRelativeInclusion()
throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target =
BuildTargetFactory.newInstance("//:binary_with_used_relative_parent_header");
String usedHeaderName = "source_relative_parent_header.h";
String sourceName = "source_relative_parent_header/source.cpp";
BuildTarget preprocessTarget =
getPreprocessTarget(
cxxPlatform,
target,
sourceName,
AbstractCxxSource.Type.CXX);
// Run the build and verify that the C++ source was preprocessed.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry firstRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Modify the used header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
usedHeaderName);
// Run the build again and verify that we recompiled as the header caused the depfile rule key
// to change.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry secondRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Also, make sure all three rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void depfileBasedRuleKeyAvoidsRecompilingAfterChangeToUnusedHeader() throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_with_unused_header");
String unusedHeaderName = "unused_header.h";
String sourceName = "source.cpp";
BuildTarget preprocessTarget =
getPreprocessTarget(
cxxPlatform,
target,
sourceName,
AbstractCxxSource.Type.CXX);
// Run the build and verify that the C++ source was preprocessed.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry firstRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Now modify the unused header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
unusedHeaderName);
// Run the build again and verify that got a matching depfile rule key, and therefore
// didn't recompile.
workspace.runBuckBuild("--config", "build.depfiles=enabled", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry secondRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.MATCHING_DEP_FILE_RULE_KEY)));
// Also, make sure the original rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void manifestCachingRebuildsAfterChangeToUsedHeader() throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_with_used_full_header");
String usedHeaderName = "source_full_header.h";
String sourceName = "source_full_header.cpp";
BuildTarget preprocessTarget =
getPreprocessTarget(
cxxPlatform,
target,
sourceName,
AbstractCxxSource.Type.CXX);
// Enable caching for manifest-based caching.
workspace.enableDirCache();
// Run the build and verify that the C++ source was preprocessed.
workspace.runBuckBuild("--config", "build.depfiles=cache", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry firstRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Modify the used header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
usedHeaderName);
// Clean the build directory, so that we need to go to cache.
workspace.runBuckCommand("clean");
// Run the build again and verify that we recompiled as the header caused the depfile rule key
// to change.
workspace.runBuckBuild("--config", "build.depfiles=cache", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry secondRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Also, make sure all three rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void manifestCachingRebuildsAfterChangeToUsedHeaderUsingFileRelativeInclusion()
throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_with_used_relative_header");
String usedHeaderName = "source_relative_header.h";
String sourceName = "source_relative_header.cpp";
BuildTarget preprocessTarget =
getPreprocessTarget(
cxxPlatform,
target,
sourceName,
AbstractCxxSource.Type.CXX);
// Enable caching for manifest-based caching.
workspace.enableDirCache();
// Run the build and verify that the C++ source was preprocessed.
workspace.runBuckBuild("--config", "build.depfiles=cache", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry firstRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Modify the used header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
usedHeaderName);
// Clean the build directory, so that we need to go to cache.
workspace.runBuckCommand("clean");
// Run the build again and verify that we recompiled as the header caused the depfile rule key
// to change.
workspace.runBuckBuild("--config", "build.depfiles=cache", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry secondRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Also, make sure all three rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void manifestCachingGetsHitAfterChangeToUnusedHeader() throws Exception {
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildTarget target = BuildTargetFactory.newInstance("//:binary_with_unused_header");
String unusedHeaderName = "unused_header.h";
String sourceName = "source.cpp";
BuildTarget preprocessTarget =
getPreprocessTarget(
cxxPlatform,
target,
sourceName,
AbstractCxxSource.Type.CXX);
// Enable caching for manifest-based caching.
workspace.enableDirCache();
// Run the build and verify that the C++ source was preprocessed.
workspace.runBuckBuild("--config", "build.depfiles=cache", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry firstRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
firstRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
// Clean the build directory, so that we need to go to cache.
workspace.runBuckCommand("clean");
// Now modify the unused header.
workspace.writeContentsToPath(
"static inline int newFunction() { return 20; }",
unusedHeaderName);
// Run the build again and verify that got a matching depfile rule key, and therefore
// didn't recompile.
workspace.runBuckBuild("--config", "build.depfiles=cache", target.toString()).assertSuccess();
BuckBuildLog.BuildLogEntry secondRunEntry =
workspace.getBuildLog().getLogEntry(preprocessTarget);
assertThat(
secondRunEntry.getSuccessType(),
equalTo(Optional.of(BuildRuleSuccessType.FETCHED_FROM_CACHE_MANIFEST_BASED)));
// Also, make sure the original rule keys are actually different.
assertThat(
secondRunEntry.getRuleKey(),
Matchers.not(equalTo(firstRunEntry.getRuleKey())));
}
@Test
public void parentDirectoryReferenceInSource() throws IOException {
workspace.writeContentsToPath(
"\n[project]\n check_package_boundary = false\n",
".buckconfig");
workspace.runBuckBuild("//parent_dir_ref:simple#default,static").assertSuccess();
}
@Test
public void langCompilerFlags() throws IOException {
workspace.runBuckBuild("//:lang_compiler_flags#default,static").assertSuccess();
}
@Test
public void binaryBuildRuleTools() throws IOException {
workspace.runBuckBuild(
"-c", "cxx.cc=//:cc",
"-c", "cxx.cc_type=default",
"-c", "cxx.cpp=//:cc",
"-c", "cxx.cpp_type=default",
"-c", "cxx.cxx=//:cxx",
"-c", "cxx.cxx_type=default",
"-c", "cxx.cxxpp=//:cxx",
"-c", "cxx.cxxpp_type=default",
"//:simple#default,static")
.assertSuccess();
}
@Test
public void ignoreVerifyHeaders() throws IOException {
workspace.runBuckBuild("-c", "cxx.untracked_headers=ignore", "//:untracked_header")
.assertSuccess();
}
@Test
public void errorVerifyHeaders() throws IOException {
ProjectWorkspace.ProcessResult result =
workspace.runBuckBuild(
"-c", "cxx.untracked_headers=error",
"-c", "cxx.untracked_headers_whitelist=/usr/include/stdc-predef\\.h",
"//:untracked_header");
result.assertFailure();
assertThat(
result.getStderr(),
Matchers.containsString(
"untracked_header.cpp: included an untracked header \"untracked_header.h\""));
}
@Test
public void whitelistVerifyHeaders() throws IOException {
ProjectWorkspace.ProcessResult result =
workspace.runBuckBuild(
"-c", "cxx.untracked_headers=error",
"-c", "cxx.untracked_headers_whitelist=" +
"/usr/include/stdc-predef\\.h, /usr/local/.*, untracked_.*.h",
"//:untracked_header");
result.assertSuccess();
}
private BuildTarget getPreprocessTarget(
CxxPlatform cxxPlatform,
BuildTarget target,
String source,
CxxSource.Type type) {
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(),
target,
cxxPlatform);
if (mode == CxxPreprocessMode.SEPARATE) {
return cxxSourceRuleFactory.createPreprocessBuildTarget(source, type);
} else {
return cxxSourceRuleFactory.createCompileBuildTarget(source);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.common.util;
import java.io.IOException;
import java.io.Writer;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Base64;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.solr.common.EnumFieldValue;
import org.apache.solr.common.IteratorWriter;
import org.apache.solr.common.MapSerializable;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.PushWriter;
// Base interface for all text based writers
public interface TextWriter extends PushWriter {
default void writeVal(String name, Object val) throws IOException {
writeVal(name, val, false);
}
default void writeVal(String name, Object val, boolean raw) throws IOException {
// if there get to be enough types, perhaps hashing on the type
// to get a handler might be faster (but types must be exact to do that...)
// (see a patch on LUCENE-3041 for inspiration)
// go in order of most common to least common, however some of the more general types like Map
// belong towards the end
if (val == null) {
writeNull(name);
} else if (val instanceof CharSequence) {
if (raw) {
writeStrRaw(name, val.toString());
} else {
writeStr(name, val.toString(), true);
// micro-optimization... using toString() avoids a cast first
}
} else if (val instanceof Number) {
writeNumber(name, (Number) val);
} else if (val instanceof Boolean) {
writeBool(name, (Boolean) val);
} else if (val instanceof AtomicBoolean) {
writeBool(name, ((AtomicBoolean) val).get());
} else if (val instanceof Date) {
writeDate(name, (Date) val);
} else if (val instanceof NamedList) {
writeNamedList(name, (NamedList) val);
} else if (val instanceof Path) {
final String pathStr = ((Path) val).toAbsolutePath().toString();
if (raw) {
writeStrRaw(name, pathStr);
} else {
writeStr(name, pathStr, true);
}
} else if (val instanceof IteratorWriter) {
writeIterator(name, (IteratorWriter) val, raw);
} else if (val instanceof MapWriter) {
writeMap(name, (MapWriter) val);
} else if (val instanceof MapSerializable) {
// todo find a better way to reuse the map more efficiently
writeMap(name, ((MapSerializable) val).toMap(new LinkedHashMap<>()), false, true);
} else if (val instanceof Map) {
writeMap(name, (Map) val, false, true);
} else if (val instanceof Iterator) { // very generic; keep towards the end
writeArray(name, (Iterator) val, raw);
} else if (val instanceof Iterable) { // very generic; keep towards the end
writeArray(name, ((Iterable) val).iterator(), raw);
} else if (val instanceof Object[]) {
writeArray(name, (Object[]) val, raw);
} else if (val instanceof byte[]) {
byte[] arr = (byte[]) val;
writeByteArr(name, arr, 0, arr.length);
} else if (val instanceof EnumFieldValue) {
if (raw) {
writeStrRaw(name, val.toString());
} else {
writeStr(name, val.toString(), true);
}
} else {
// default... for debugging only. Would be nice to "assert false" ?
writeStr(name, val.getClass().getName() + ':' + val.toString(), true);
}
}
/**
* Writes the specified val directly to the backing writer, without wrapping (e.g., in quotes) or
* escaping of any kind.
*/
default void writeStrRaw(String name, String val) throws IOException {
throw new UnsupportedOperationException();
}
void writeStr(String name, String val, boolean needsEscaping) throws IOException;
void writeMap(String name, Map<?, ?> val, boolean excludeOuter, boolean isFirstVal)
throws IOException;
void writeArray(String name, Iterator<?> val, boolean raw) throws IOException;
void writeNull(String name) throws IOException;
/** if this form of the method is called, val is the Java string form of an int */
void writeInt(String name, String val) throws IOException;
/** if this form of the method is called, val is the Java string form of a long */
void writeLong(String name, String val) throws IOException;
/** if this form of the method is called, val is the Java string form of a boolean */
void writeBool(String name, String val) throws IOException;
/** if this form of the method is called, val is the Java string form of a float */
void writeFloat(String name, String val) throws IOException;
/** if this form of the method is called, val is the Java string form of a double */
void writeDouble(String name, String val) throws IOException;
/** if this form of the method is called, val is the Solr ISO8601 based date format */
void writeDate(String name, String val) throws IOException;
void writeNamedList(String name, NamedList<?> val) throws IOException;
Writer getWriter();
default void writeNumber(String name, Number val) throws IOException {
if (val instanceof Integer) {
writeInt(name, val.toString());
} else if (val instanceof Long) {
writeLong(name, val.toString());
} else if (val instanceof Float) {
// we pass the float instead of using toString() because
// it may need special formatting. same for double.
writeFloat(name, val.floatValue());
} else if (val instanceof Double) {
writeDouble(name, val.doubleValue());
} else if (val instanceof Short) {
writeInt(name, val.toString());
} else if (val instanceof Byte) {
writeInt(name, val.toString());
} else if (val instanceof AtomicInteger) {
writeInt(name, ((AtomicInteger) val).get());
} else if (val instanceof AtomicLong) {
writeLong(name, ((AtomicLong) val).get());
} else {
// default... for debugging only
writeStr(name, val.getClass().getName() + ':' + val.toString(), true);
}
}
default void writeArray(String name, Object[] val, boolean raw) throws IOException {
writeArray(name, Arrays.asList(val), raw);
}
default void writeArray(String name, List<?> l, boolean raw) throws IOException {
writeArray(name, l.iterator(), raw);
}
default void writeDate(String name, Date val) throws IOException {
writeDate(name, val.toInstant().toString());
}
default void writeByteArr(String name, byte[] buf, int offset, int len) throws IOException {
writeStr(
name,
new String(
Base64.getEncoder().encode(ByteBuffer.wrap(buf, offset, len)).array(),
StandardCharsets.ISO_8859_1),
false);
}
default void writeInt(String name, int val) throws IOException {
writeInt(name, Integer.toString(val));
}
default void writeLong(String name, long val) throws IOException {
writeLong(name, Long.toString(val));
}
default void writeBool(String name, boolean val) throws IOException {
writeBool(name, Boolean.toString(val));
}
default void writeFloat(String name, float val) throws IOException {
String s = Float.toString(val);
// If it's not a normal number, write the value as a string instead.
// The following test also handles NaN since comparisons are always false.
if (val > Float.NEGATIVE_INFINITY && val < Float.POSITIVE_INFINITY) {
writeFloat(name, s);
} else {
writeStr(name, s, false);
}
}
default void writeDouble(String name, double val) throws IOException {
String s = Double.toString(val);
// If it's not a normal number, write the value as a string instead.
// The following test also handles NaN since comparisons are always false.
if (val > Double.NEGATIVE_INFINITY && val < Double.POSITIVE_INFINITY) {
writeDouble(name, s);
} else {
writeStr(name, s, false);
}
}
default void writeBool(String name, Boolean val) throws IOException {
writeBool(name, val.toString());
}
@Override
default void writeMap(MapWriter mw) throws IOException {
// todo
}
default void writeMap(String name, MapWriter mw) throws IOException {
writeMap(mw);
}
@Override
default void writeIterator(IteratorWriter iw) throws IOException {
/*todo*/
}
default void writeIterator(String name, IteratorWriter iw, boolean raw) throws IOException {
writeIterator(iw);
}
default void indent() throws IOException {
if (doIndent()) indent(level());
}
int incLevel();
int decLevel();
TextWriter setIndent(boolean doIndent);
int level();
boolean doIndent();
default void indent(int lev) throws IOException {
getWriter()
.write(
SolrJSONWriter.indentChars,
0,
Math.min((lev << 1) + 1, SolrJSONWriter.indentChars.length));
}
}
| |
package org.apache.lucene.util;
import java.io.IOException;
import java.net.URI;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import org.apache.lucene.mockfile.DisableFsyncFS;
import org.apache.lucene.mockfile.HandleLimitFS;
import org.apache.lucene.mockfile.LeakFS;
import org.apache.lucene.mockfile.VerboseFS;
import org.apache.lucene.mockfile.WindowsFS;
import org.apache.lucene.util.LuceneTestCase.SuppressTempFileChecks;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Checks and cleans up temporary files.
*
* @see LuceneTestCase#createTempDir()
* @see LuceneTestCase#createTempFile()
*/
final class TestRuleTemporaryFilesCleanup extends TestRuleAdapter {
/**
* Retry to create temporary file name this many times.
*/
private static final int TEMP_NAME_RETRY_THRESHOLD = 9999;
/**
* Writeable temporary base folder.
*/
private Path javaTempDir;
/**
* Per-test class temporary folder.
*/
private Path tempDirBase;
/**
* Per-test filesystem
*/
private FileSystem fileSystem;
/**
* Suite failure marker.
*/
private final TestRuleMarkFailure failureMarker;
/**
* A queue of temporary resources to be removed after the
* suite completes.
* @see #registerToRemoveAfterSuite(Path)
*/
private final static List<Path> cleanupQueue = new ArrayList<Path>();
public TestRuleTemporaryFilesCleanup(TestRuleMarkFailure failureMarker) {
this.failureMarker = failureMarker;
}
/**
* Register temporary folder for removal after the suite completes.
*/
void registerToRemoveAfterSuite(Path f) {
assert f != null;
if (LuceneTestCase.LEAVE_TEMPORARY) {
System.err.println("INFO: Will leave temporary file: " + f.toAbsolutePath());
return;
}
synchronized (cleanupQueue) {
cleanupQueue.add(f);
}
}
@Override
protected void before() throws Throwable {
super.before();
assert tempDirBase == null;
fileSystem = initializeFileSystem();
javaTempDir = initializeJavaTempDir();
}
// os/config-independent limit for too many open files
// TODO: can we make this lower?
private static final int MAX_OPEN_FILES = 2048;
private FileSystem initializeFileSystem() {
FileSystem fs = FileSystems.getDefault();
if (LuceneTestCase.VERBOSE) {
fs = new VerboseFS(fs, new TestRuleSetupAndRestoreClassEnv.ThreadNameFixingPrintStreamInfoStream(System.out)).getFileSystem(null);
}
Random random = RandomizedContext.current().getRandom();
// sometimes just use a bare filesystem
if (random.nextInt(10) > 0) {
fs = new DisableFsyncFS(fs).getFileSystem(null);
fs = new LeakFS(fs).getFileSystem(null);
fs = new HandleLimitFS(fs, MAX_OPEN_FILES).getFileSystem(null);
// windows is currently slow
if (random.nextInt(10) == 0) {
// don't try to emulate windows on windows: they don't get along
if (!Constants.WINDOWS) {
fs = new WindowsFS(fs).getFileSystem(null);
}
}
}
if (LuceneTestCase.VERBOSE) {
System.out.println("filesystem: " + fs.provider());
}
return fs.provider().getFileSystem(URI.create("file:///"));
}
private Path initializeJavaTempDir() throws IOException {
Path javaTempDir = fileSystem.getPath(System.getProperty("tempDir", System.getProperty("java.io.tmpdir")));
Files.createDirectories(javaTempDir);
assert Files.isDirectory(javaTempDir) &&
Files.isWritable(javaTempDir);
return javaTempDir.toRealPath();
}
@Override
protected void afterAlways(List<Throwable> errors) throws Throwable {
// Drain cleanup queue and clear it.
final Path [] everything;
final String tempDirBasePath;
synchronized (cleanupQueue) {
tempDirBasePath = (tempDirBase != null ? tempDirBase.toAbsolutePath().toString() : null);
tempDirBase = null;
Collections.reverse(cleanupQueue);
everything = new Path [cleanupQueue.size()];
cleanupQueue.toArray(everything);
cleanupQueue.clear();
}
// Only check and throw an IOException on un-removable files if the test
// was successful. Otherwise just report the path of temporary files
// and leave them there.
if (failureMarker.wasSuccessful()) {
try {
IOUtils.rm(everything);
} catch (IOException e) {
Class<?> suiteClass = RandomizedContext.current().getTargetClass();
if (suiteClass.isAnnotationPresent(SuppressTempFileChecks.class)) {
System.err.println("WARNING: Leftover undeleted temporary files (bugUrl: "
+ suiteClass.getAnnotation(SuppressTempFileChecks.class).bugUrl() + "): "
+ e.getMessage());
return;
}
throw e;
}
if (fileSystem != FileSystems.getDefault()) {
fileSystem.close();
}
} else {
if (tempDirBasePath != null) {
System.err.println("NOTE: leaving temporary files on disk at: " + tempDirBasePath);
}
}
}
final Path getPerTestClassTempDir() {
if (tempDirBase == null) {
RandomizedContext ctx = RandomizedContext.current();
Class<?> clazz = ctx.getTargetClass();
String prefix = clazz.getName();
prefix = prefix.replaceFirst("^org.apache.lucene.", "lucene.");
prefix = prefix.replaceFirst("^org.apache.solr.", "solr.");
int attempt = 0;
Path f;
boolean success = false;
do {
if (attempt++ >= TEMP_NAME_RETRY_THRESHOLD) {
throw new RuntimeException(
"Failed to get a temporary name too many times, check your temp directory and consider manually cleaning it: "
+ javaTempDir.toAbsolutePath());
}
f = javaTempDir.resolve(prefix + " " + ctx.getRunnerSeedAsString()
+ "-" + String.format(Locale.ENGLISH, "%03d", attempt));
try {
Files.createDirectory(f);
success = true;
} catch (IOException ignore) {}
} while (!success);
tempDirBase = f;
registerToRemoveAfterSuite(tempDirBase);
}
return tempDirBase;
}
/**
* @see LuceneTestCase#createTempDir()
*/
public Path createTempDir(String prefix) {
Path base = getPerTestClassTempDir();
int attempt = 0;
Path f;
boolean success = false;
do {
if (attempt++ >= TEMP_NAME_RETRY_THRESHOLD) {
throw new RuntimeException(
"Failed to get a temporary name too many times, check your temp directory and consider manually cleaning it: "
+ base.toAbsolutePath());
}
f = base.resolve(prefix + "-" + String.format(Locale.ENGLISH, "%03d", attempt));
try {
Files.createDirectory(f);
success = true;
} catch (IOException ignore) {}
} while (!success);
registerToRemoveAfterSuite(f);
return f;
}
/**
* @see LuceneTestCase#createTempFile()
*/
public Path createTempFile(String prefix, String suffix) throws IOException {
Path base = getPerTestClassTempDir();
int attempt = 0;
Path f;
boolean success = false;
do {
if (attempt++ >= TEMP_NAME_RETRY_THRESHOLD) {
throw new RuntimeException(
"Failed to get a temporary name too many times, check your temp directory and consider manually cleaning it: "
+ base.toAbsolutePath());
}
f = base.resolve(prefix + "-" + String.format(Locale.ENGLISH, "%03d", attempt) + suffix);
try {
Files.createFile(f);
success = true;
} catch (IOException ignore) {}
} while (!success);
registerToRemoveAfterSuite(f);
return f;
}
}
| |
/**
*============================================================================
* Copyright The Ohio State University Research Foundation, The University of Chicago -
* Argonne National Laboratory, Emory University, SemanticBits LLC, and
* Ekagra Software Technologies Ltd.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-core/LICENSE.txt for details.
*============================================================================
**/
package org.cagrid.cql.test;
import gov.nih.nci.cagrid.common.SchemaValidationException;
import gov.nih.nci.cagrid.common.SchemaValidator;
import gov.nih.nci.cagrid.common.Utils;
import gov.nih.nci.cagrid.common.XMLUtilities;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.text.ParseException;
import java.util.Date;
import junit.framework.TestCase;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import org.cagrid.cql.utilities.AnyNodeHelper;
import org.cagrid.cql.utilities.AttributeFactory;
import org.cagrid.cql.utilities.CQL2SerializationUtil;
import org.cagrid.cql.utilities.CQLConstants;
import org.cagrid.cql2.Aggregation;
import org.cagrid.cql2.AssociationPopulationSpecification;
import org.cagrid.cql2.AttributeValue;
import org.cagrid.cql2.BinaryPredicate;
import org.cagrid.cql2.CQLAssociatedObject;
import org.cagrid.cql2.CQLAttribute;
import org.cagrid.cql2.CQLGroup;
import org.cagrid.cql2.CQLQuery;
import org.cagrid.cql2.CQLQueryModifier;
import org.cagrid.cql2.CQLTargetObject;
import org.cagrid.cql2.DistinctAttribute;
import org.cagrid.cql2.GroupLogicalOperator;
import org.cagrid.cql2.NamedAssociation;
import org.cagrid.cql2.NamedAssociationList;
import org.cagrid.cql2.PopulationDepth;
import org.cagrid.cql2.UnaryPredicate;
import org.cagrid.cql2.results.CQLAggregateResult;
import org.cagrid.cql2.results.CQLAttributeResult;
import org.cagrid.cql2.results.CQLObjectResult;
import org.cagrid.cql2.results.CQLQueryResults;
import org.cagrid.cql2.results.TargetAttribute;
import org.exolab.castor.types.AnyNode;
import org.exolab.castor.types.Time;
public class CQL2SerializationAndValidationTestCase extends TestCase {
private SchemaValidator queryValidator = null;
private SchemaValidator resultsValidator = null;
private InputStream wsddStream = null;
public CQL2SerializationAndValidationTestCase(String name) {
super(name);
}
public void setUp() {
File cql2Xsd = new File("schema/cql2.0/CQLQueryComponents.xsd");
File resultsXsd = new File("schema/cql2.0/CQLQueryResults.xsd");
assertTrue(cql2Xsd.exists());
assertTrue(resultsXsd.exists());
try {
String queryPath = cql2Xsd.getCanonicalPath();
queryValidator = new SchemaValidator(queryPath);
String resultsPath = resultsXsd.getAbsolutePath();
resultsValidator = new SchemaValidator(resultsPath);
} catch (Exception ex) {
ex.printStackTrace();
fail("Error setting up schema validator: " + ex.getMessage());
}
wsddStream = getClass().getResourceAsStream("/org/cagrid/cql2/mapping/client-config.wsdd");
assertNotNull("Could not load CQL 2 client config", wsddStream);
}
public void tearDown() {
try {
wsddStream.close();
} catch (IOException e) {
e.printStackTrace();
System.err.println("Error closing client config input stream");
}
}
private void validate(CQLQuery query) {
// serialize
StringWriter writer = new StringWriter();
try {
Utils.serializeObject(query, CQLConstants.CQL2_QUERY_QNAME, writer, wsddStream);
} catch (Exception ex) {
ex.printStackTrace();
fail("Error serializing CQL 2 query: " + ex.getMessage());
}
String text = writer.getBuffer().toString();
// validate
try {
queryValidator.validate(text);
} catch (SchemaValidationException ex) {
ex.printStackTrace();
System.err.println(text);
fail("Error validating serialized CQL 2 query: " + ex.getMessage());
}
// deserialize
CQLQuery deserializedQuery = null;
try {
deserializedQuery = CQL2SerializationUtil.deserializeCql2Query(text);
} catch (Exception ex) {
ex.printStackTrace();
fail("Error deserializing serialized CQL 2 query: " + ex.getMessage());
}
assertEquals("Deserialized query didn't match original", query, deserializedQuery);
}
private void validate(CQLQueryResults results) {
// serialize
StringWriter writer = new StringWriter();
try {
Utils.serializeObject(results, CQLConstants.CQL2_RESULTS_QNAME, writer, wsddStream);
} catch (Exception ex) {
ex.printStackTrace();
fail("Error serializing CQL 2 results: " + ex.getMessage());
}
String text = writer.getBuffer().toString();
// validate
try {
resultsValidator.validate(text);
} catch (SchemaValidationException ex) {
ex.printStackTrace();
System.err.println(text);
fail("Error validating serialized CQL 2 results: " + ex.getMessage());
}
}
public void testTargetOnly() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
query.setCQLTargetObject(target);
validate(query);
}
public void testTargetWithBinaryPredicateAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLAttribute attribute = new CQLAttribute();
attribute.setName("word");
attribute.setBinaryPredicate(BinaryPredicate.EQUAL_TO);
AttributeValue value = new AttributeValue();
value.setStringValue("hello");
attribute.setAttributeValue(value);
target.setCQLAttribute(attribute);
query.setCQLTargetObject(target);
validate(query);
}
public void testStringValueAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, "hello"));
query.setCQLTargetObject(target);
validate(query);
}
public void testIntegerValueAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, Integer.valueOf(0)));
query.setCQLTargetObject(target);
validate(query);
}
public void testLongValueAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, Long.valueOf(0)));
query.setCQLTargetObject(target);
validate(query);
}
public void testBooleanValueAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, Boolean.TRUE));
query.setCQLTargetObject(target);
validate(query);
}
public void testDoubleValueAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, Double.valueOf(1.1)));
query.setCQLTargetObject(target);
validate(query);
}
public void testFloatValueAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, Float.valueOf(1.2f)));
query.setCQLTargetObject(target);
validate(query);
}
public void testDateValueAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
Date date = new Date(System.currentTimeMillis());
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, date));
query.setCQLTargetObject(target);
validate(query);
}
public void testTimeValueAttribute() {
String timeString = "01:02:03.040";
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
try {
Time t = Time.parseTime(timeString);
target.setCQLAttribute(AttributeFactory.createAttribute("word", BinaryPredicate.EQUAL_TO, t));
} catch (ParseException e1) {
e1.printStackTrace();
fail(e1.getMessage());
}
query.setCQLTargetObject(target);
try {
String text = CQL2SerializationUtil.serializeCql2Query(query);
CQLQuery des = CQL2SerializationUtil.deserializeCql2Query(text);
CQLTargetObject desTarget = des.getCQLTargetObject();
assertNotNull("No target object!", desTarget);
CQLAttribute desAttr = desTarget.getCQLAttribute();
assertNotNull("No attribute!", desAttr);
AttributeValue desVal = desAttr.getAttributeValue();
assertNotNull("No value!", desVal);
Time t = desVal.getTimeValue();
assertNotNull("No time!", t);
assertEquals("Time value not as expected", timeString, t.toString());
} catch (Exception ex) {
ex.printStackTrace();
}
validate(query);
}
public void testGroupOfAttributes() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLAttribute a1 = new CQLAttribute();
a1.setName("alpha");
a1.setBinaryPredicate(BinaryPredicate.LESS_THAN_EQUAL_TO);
AttributeValue value1 = new AttributeValue();
value1.setStringValue("beta");
a1.setAttributeValue(value1);
CQLAttribute a2 = new CQLAttribute();
a2.setName("bird");
a2.setBinaryPredicate(BinaryPredicate.EQUAL_TO);
AttributeValue value2 = new AttributeValue();
value2.setStringValue("the word");
a2.setAttributeValue(value2);
CQLGroup group = new CQLGroup();
group.setLogicalOperation(GroupLogicalOperator.AND);
group.setCQLAttribute(new CQLAttribute[] {a1, a2});
target.setCQLGroup(group);
query.setCQLTargetObject(target);
validate(query);
}
public void testAssociation() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLAssociatedObject assoc = new CQLAssociatedObject();
assoc.setClassName("abc.def");
CQLAttribute haveYouHeard = new CQLAttribute();
haveYouHeard.setName("bird");
haveYouHeard.setBinaryPredicate(BinaryPredicate.EQUAL_TO);
AttributeValue value2 = new AttributeValue();
value2.setStringValue("the word");
haveYouHeard.setAttributeValue(value2);
assoc.setCQLAttribute(haveYouHeard);
target.setCQLAssociatedObject(assoc);
query.setCQLTargetObject(target);
validate(query);
}
public void testNestedAssociations() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLAssociatedObject assoc = new CQLAssociatedObject();
assoc.setClassName("abc.def");
CQLAssociatedObject nested = new CQLAssociatedObject();
nested.setClassName("lol.idk");
assoc.setCQLAssociatedObject(nested);
target.setCQLAssociatedObject(assoc);
query.setCQLTargetObject(target);
validate(query);
}
public void testGroupedAssociations() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLAssociatedObject assoc1 = new CQLAssociatedObject();
assoc1.setClassName("abc.def");
CQLAttribute haveYouHeard = new CQLAttribute();
haveYouHeard.setName("bird");
haveYouHeard.setBinaryPredicate(BinaryPredicate.EQUAL_TO);
AttributeValue value2 = new AttributeValue();
value2.setStringValue("the word");
haveYouHeard.setAttributeValue(value2);
assoc1.setCQLAttribute(haveYouHeard);
CQLAssociatedObject assoc2 = new CQLAssociatedObject();
assoc2.setClassName("xyz.abc");
CQLGroup group = new CQLGroup();
group.setLogicalOperation(GroupLogicalOperator.AND);
group.setCQLAssociatedObject(new CQLAssociatedObject[] {assoc1, assoc2});
target.setCQLGroup(group);
query.setCQLTargetObject(target);
validate(query);
}
public void testNestedGroups() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLAssociatedObject assoc1 = new CQLAssociatedObject();
assoc1.setClassName("abc.def");
CQLAttribute haveYouHeard = new CQLAttribute();
haveYouHeard.setName("bird");
haveYouHeard.setBinaryPredicate(BinaryPredicate.EQUAL_TO);
AttributeValue value2 = new AttributeValue();
value2.setStringValue("the word");
haveYouHeard.setAttributeValue(value2);
assoc1.setCQLAttribute(haveYouHeard);
CQLAssociatedObject assoc2 = new CQLAssociatedObject();
assoc2.setClassName("xyz.abc");
CQLGroup nestedGroup = new CQLGroup();
nestedGroup.setLogicalOperation(GroupLogicalOperator.OR);
CQLAttribute a1 = new CQLAttribute();
a1.setName("nested1");
a1.setUnaryPredicate(UnaryPredicate.IS_NOT_NULL);
CQLAttribute a2 = new CQLAttribute();
a2.setName("nested2");
a2.setUnaryPredicate(UnaryPredicate.IS_NULL);
nestedGroup.setCQLAttribute(new CQLAttribute[] {a1, a2});
CQLGroup group = new CQLGroup();
group.setLogicalOperation(GroupLogicalOperator.AND);
group.setCQLAssociatedObject(new CQLAssociatedObject[] {assoc1, assoc2});
group.setCQLGroup(new CQLGroup[] {nestedGroup});
target.setCQLGroup(group);
query.setCQLTargetObject(target);
validate(query);
}
public void testQueryModifierCountOnly() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLQueryModifier mods = new CQLQueryModifier();
mods.setCountOnly(Boolean.TRUE);
query.setCQLTargetObject(target);
query.setCQLQueryModifier(mods);
validate(query);
}
public void testQueryModifierDistinctAttribute() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
CQLQueryModifier mods = new CQLQueryModifier();
DistinctAttribute da = new DistinctAttribute();
da.setAttributeName("id");
da.setAggregation(Aggregation.MAX);
mods.setDistinctAttribute(da);
query.setCQLTargetObject(target);
query.setCQLQueryModifier(mods);
validate(query);
}
public void testAssociationPopulationNamedAssociations() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
query.setCQLTargetObject(target);
AssociationPopulationSpecification spec = new AssociationPopulationSpecification();
NamedAssociationList list = new NamedAssociationList();
NamedAssociation na1 = new NamedAssociation();
na1.setEndName("test");
na1.set_instanceof("some.other.class");
NamedAssociation na2 = new NamedAssociation();
na2.setEndName("wow");
na1.setNamedAssociationList(new NamedAssociationList(new NamedAssociation[] {na2}));
list.setNamedAssociation(new NamedAssociation[] {na1});
spec.setNamedAssociationList(list);
query.setAssociationPopulationSpecification(spec);
validate(query);
}
public void testAssociationPopulationDepth() {
CQLQuery query = new CQLQuery();
CQLTargetObject target = new CQLTargetObject();
target.setClassName("foo.bar");
target.set_instanceof("zor");
query.setCQLTargetObject(target);
AssociationPopulationSpecification spec = new AssociationPopulationSpecification();
PopulationDepth depth = new PopulationDepth(2);
spec.setPopulationDepth(depth);
query.setAssociationPopulationSpecification(spec);
validate(query);
}
public void testAggregationResult() {
CQLQueryResults results = new CQLQueryResults();
results.setTargetClassname("foo.bar");
CQLAggregateResult agg = new CQLAggregateResult();
agg.setAggregation(Aggregation.COUNT);
agg.setAttributeName("id");
agg.setValue("5");
results.setAggregationResult(agg);
validate(results);
}
public void testObjectResult() {
CQLQueryResults results = new CQLQueryResults();
results.setTargetClassname("foo.bar");
CQLObjectResult obj = new CQLObjectResult();
AnyNode node = null;
try {
String xml = "<id displayable=\"true\" extension=\"1\" reliability=\"ISS\" root=\"2.16.12.123.456.1\" scope=\"OBJ\" xsi:type=\"ns3:II\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:ns3=\"uri:iso.org:21090\"/>";
node = AnyNodeHelper.convertStringToAnyNode(xml);
// node = AnyNodeHelper.convertStringToAnyNode("<foo name=\"bar\" xsi:type=\"zor\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">text here</foo>");
} catch (Exception e) {
e.printStackTrace();
fail("Error creating node: " + e.getMessage());
}
obj.set_any(node);
results.setObjectResult(new CQLObjectResult[] {obj});
// validate(results);
// serialize
StringWriter writer = new StringWriter();
try {
Utils.serializeObject(results, CQLConstants.CQL2_RESULTS_QNAME, writer,
getClass().getResourceAsStream("/org/cagrid/cql2/mapping/client-config.wsdd"));
} catch (Exception ex) {
ex.printStackTrace();
fail("Error serializing CQL 2 results: " + ex.getMessage());
}
String text = writer.getBuffer().toString();
try {
System.out.println("Here's what we serialized:");
System.out.println(XMLUtilities.formatXML(text));
} catch (Exception ex) {
// meh
}
// deserialize
CQLQueryResults des = null;
try {
des = Utils.deserializeObject(new StringReader(text), CQLQueryResults.class,
getClass().getResourceAsStream("/org/cagrid/cql2/mapping/client-config.wsdd"));
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
assertEquals(results, des);
}
public void testAttributeResults() {
CQLQueryResults results = new CQLQueryResults();
results.setTargetClassname("foo.bar");
CQLAttributeResult[] attribResults = new CQLAttributeResult[5];
for (int i = 0; i < attribResults.length; i++) {
CQLAttributeResult attributeResult = new CQLAttributeResult();
TargetAttribute[] tas = new TargetAttribute[4];
for (int j = 0; j < tas.length; j++) {
tas[j] = new TargetAttribute("Name " + i + ", " + j, "Name " + i + ", " + j);
}
attributeResult.setAttribute(tas);
attribResults[i] = attributeResult;
}
results.setAttributeResult(attribResults);
validate(results);
}
public static void main(String args[]) {
TestRunner runner = new TestRunner();
TestResult result = runner.doRun(
new TestSuite(CQL2SerializationAndValidationTestCase.class));
System.exit(result.errorCount() + result.failureCount());
}
}
| |
/*
* TouchImageView.java
* By: Michael Ortiz
* Updated By: Patrick Lackemacher
* Updated By: Babay88
* Updated By: @ipsilondev
* Updated By: hank-cp
* Updated By: singpolyma
* -------------------
* Extends Android ImageView to include pinch zooming, panning, fling and double tap zoom.
*/
package com.hu.yang.baseskill_android.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Build;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.ImageView;
import android.widget.OverScroller;
import android.widget.Scroller;
public class TouchImageView extends ImageView {
private static final String DEBUG = "DEBUG";
//
// SuperMin and SuperMax multipliers. Determine how much the image can be
// zoomed below or above the zoom boundaries, before animating back to the
// min/max zoom boundary.
//
private static final float SUPER_MIN_MULTIPLIER = .75f;
private static final float SUPER_MAX_MULTIPLIER = 1.25f;
//
// Scale of image ranges from minScale to maxScale, where minScale == 1
// when the image is stretched to fit view.
//
private float normalizedScale;
//
// Matrix applied to image. MSCALE_X and MSCALE_Y should always be equal.
// MTRANS_X and MTRANS_Y are the other values used. prevMatrix is the matrix
// saved prior to the screen rotating.
//
private Matrix matrix, prevMatrix;
private static enum State { NONE, DRAG, ZOOM, FLING, ANIMATE_ZOOM };
private State state;
private float minScale;
private float maxScale;
private float superMinScale;
private float superMaxScale;
private float[] m;
private Context context;
private Fling fling;
private ScaleType mScaleType;
private boolean imageRenderedAtLeastOnce;
private boolean onDrawReady;
private ZoomVariables delayedZoomVariables;
//
// Size of view and previous view size (ie before rotation)
//
private int viewWidth, viewHeight, prevViewWidth, prevViewHeight;
//
// Size of image when it is stretched to fit view. Before and After rotation.
//
private float matchViewWidth, matchViewHeight, prevMatchViewWidth, prevMatchViewHeight;
private ScaleGestureDetector mScaleDetector;
private GestureDetector mGestureDetector;
private GestureDetector.OnDoubleTapListener doubleTapListener = null;
private OnTouchListener userTouchListener = null;
private OnTouchImageViewListener touchImageViewListener = null;
public TouchImageView(Context context) {
super(context);
sharedConstructing(context);
}
public TouchImageView(Context context, AttributeSet attrs) {
super(context, attrs);
sharedConstructing(context);
}
public TouchImageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
sharedConstructing(context);
}
private void sharedConstructing(Context context) {
super.setClickable(true);
this.context = context;
mScaleDetector = new ScaleGestureDetector(context, new ScaleListener());
mGestureDetector = new GestureDetector(context, new GestureListener());
matrix = new Matrix();
prevMatrix = new Matrix();
m = new float[9];
normalizedScale = 1;
if (mScaleType == null) {
mScaleType = ScaleType.FIT_CENTER;
}
minScale = 1;
maxScale = 3;
superMinScale = SUPER_MIN_MULTIPLIER * minScale;
superMaxScale = SUPER_MAX_MULTIPLIER * maxScale;
setImageMatrix(matrix);
setScaleType(ScaleType.MATRIX);
setState(State.NONE);
onDrawReady = false;
super.setOnTouchListener(new PrivateOnTouchListener());
}
@Override
public void setOnTouchListener(OnTouchListener l) {
userTouchListener = l;
}
public void setOnTouchImageViewListener(OnTouchImageViewListener l) {
touchImageViewListener = l;
}
public void setOnDoubleTapListener(GestureDetector.OnDoubleTapListener l) {
doubleTapListener = l;
}
@Override
public void setImageResource(int resId) {
super.setImageResource(resId);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageBitmap(Bitmap bm) {
imageRenderedAtLeastOnce = false;
super.setImageBitmap(bm);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageDrawable(Drawable drawable) {
super.setImageDrawable(drawable);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageURI(Uri uri) {
super.setImageURI(uri);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setScaleType(ScaleType type) {
if (type == ScaleType.FIT_START || type == ScaleType.FIT_END) {
throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END");
}
if (type == ScaleType.MATRIX) {
super.setScaleType(ScaleType.MATRIX);
} else {
mScaleType = type;
if (onDrawReady) {
//
// If the image is already rendered, scaleType has been called programmatically
// and the TouchImageView should be updated with the new scaleType.
//
setZoom(this);
}
}
}
@Override
public ScaleType getScaleType() {
return mScaleType;
}
/**
* Returns false if image is in initial, unzoomed state. False, otherwise.
* @return true if image is zoomed
*/
public boolean isZoomed() {
return normalizedScale != 1;
}
/**
* Return a Rect representing the zoomed image.
* @return rect representing zoomed image
*/
public RectF getZoomedRect() {
if (mScaleType == ScaleType.FIT_XY) {
throw new UnsupportedOperationException("getZoomedRect() not supported with FIT_XY");
}
PointF topLeft = transformCoordTouchToBitmap(0, 0, true);
PointF bottomRight = transformCoordTouchToBitmap(viewWidth, viewHeight, true);
float w = getDrawable().getIntrinsicWidth();
float h = getDrawable().getIntrinsicHeight();
return new RectF(topLeft.x / w, topLeft.y / h, bottomRight.x / w, bottomRight.y / h);
}
/**
* Save the current matrix and view dimensions
* in the prevMatrix and prevView variables.
*/
private void savePreviousImageValues() {
if (matrix != null && viewHeight != 0 && viewWidth != 0) {
matrix.getValues(m);
prevMatrix.setValues(m);
prevMatchViewHeight = matchViewHeight;
prevMatchViewWidth = matchViewWidth;
prevViewHeight = viewHeight;
prevViewWidth = viewWidth;
}
}
@Override
public Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
bundle.putParcelable("instanceState", super.onSaveInstanceState());
bundle.putFloat("saveScale", normalizedScale);
bundle.putFloat("matchViewHeight", matchViewHeight);
bundle.putFloat("matchViewWidth", matchViewWidth);
bundle.putInt("viewWidth", viewWidth);
bundle.putInt("viewHeight", viewHeight);
matrix.getValues(m);
bundle.putFloatArray("matrix", m);
bundle.putBoolean("imageRendered", imageRenderedAtLeastOnce);
return bundle;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle bundle = (Bundle) state;
normalizedScale = bundle.getFloat("saveScale");
m = bundle.getFloatArray("matrix");
prevMatrix.setValues(m);
prevMatchViewHeight = bundle.getFloat("matchViewHeight");
prevMatchViewWidth = bundle.getFloat("matchViewWidth");
prevViewHeight = bundle.getInt("viewHeight");
prevViewWidth = bundle.getInt("viewWidth");
imageRenderedAtLeastOnce = bundle.getBoolean("imageRendered");
super.onRestoreInstanceState(bundle.getParcelable("instanceState"));
return;
}
super.onRestoreInstanceState(state);
}
@Override
protected void onDraw(Canvas canvas) {
onDrawReady = true;
imageRenderedAtLeastOnce = true;
if (delayedZoomVariables != null) {
setZoom(delayedZoomVariables.scale, delayedZoomVariables.focusX, delayedZoomVariables.focusY, delayedZoomVariables.scaleType);
delayedZoomVariables = null;
}
super.onDraw(canvas);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
savePreviousImageValues();
}
/**
* Get the max zoom multiplier.
* @return max zoom multiplier.
*/
public float getMaxZoom() {
return maxScale;
}
/**
* Set the max zoom multiplier. Default value: 3.
* @param max max zoom multiplier.
*/
public void setMaxZoom(float max) {
maxScale = max;
superMaxScale = SUPER_MAX_MULTIPLIER * maxScale;
}
/**
* Get the min zoom multiplier.
* @return min zoom multiplier.
*/
public float getMinZoom() {
return minScale;
}
/**
* Get the current zoom. This is the zoom relative to the initial
* scale, not the original resource.
* @return current zoom multiplier.
*/
public float getCurrentZoom() {
return normalizedScale;
}
/**
* Set the min zoom multiplier. Default value: 1.
* @param min min zoom multiplier.
*/
public void setMinZoom(float min) {
minScale = min;
superMinScale = SUPER_MIN_MULTIPLIER * minScale;
}
/**
* Reset zoom and translation to initial state.
*/
public void resetZoom() {
normalizedScale = 1;
fitImageToView();
}
/**
* Set zoom to the specified scale. Image will be centered by default.
* @param scale
*/
public void setZoom(float scale) {
setZoom(scale, 0.5f, 0.5f);
}
/**
* Set zoom to the specified scale. Image will be centered around the point
* (focusX, focusY). These floats range from 0 to 1 and denote the focus point
* as a fraction from the left and top of the view. For example, the top left
* corner of the image would be (0, 0). And the bottom right corner would be (1, 1).
* @param scale
* @param focusX
* @param focusY
*/
public void setZoom(float scale, float focusX, float focusY) {
setZoom(scale, focusX, focusY, mScaleType);
}
/**
* Set zoom to the specified scale. Image will be centered around the point
* (focusX, focusY). These floats range from 0 to 1 and denote the focus point
* as a fraction from the left and top of the view. For example, the top left
* corner of the image would be (0, 0). And the bottom right corner would be (1, 1).
* @param scale
* @param focusX
* @param focusY
* @param scaleType
*/
public void setZoom(float scale, float focusX, float focusY, ScaleType scaleType) {
//
// setZoom can be called before the image is on the screen, but at this point,
// image and view sizes have not yet been calculated in onMeasure. Thus, we should
// delay calling setZoom until the view has been measured.
//
if (!onDrawReady) {
delayedZoomVariables = new ZoomVariables(scale, focusX, focusY, scaleType);
return;
}
if (scaleType != mScaleType) {
setScaleType(scaleType);
}
resetZoom();
scaleImage(scale, viewWidth / 2, viewHeight / 2, true);
matrix.getValues(m);
m[Matrix.MTRANS_X] = -((focusX * getImageWidth()) - (viewWidth * 0.5f));
m[Matrix.MTRANS_Y] = -((focusY * getImageHeight()) - (viewHeight * 0.5f));
matrix.setValues(m);
fixTrans();
setImageMatrix(matrix);
}
/**
* Set zoom parameters equal to another TouchImageView. Including scale, position,
* and ScaleType.
* @param TouchImageView
*/
public void setZoom(TouchImageView img) {
PointF center = img.getScrollPosition();
setZoom(img.getCurrentZoom(), center.x, center.y, img.getScaleType());
}
/**
* Return the point at the center of the zoomed image. The PointF coordinates range
* in value between 0 and 1 and the focus point is denoted as a fraction from the left
* and top of the view. For example, the top left corner of the image would be (0, 0).
* And the bottom right corner would be (1, 1).
* @return PointF representing the scroll position of the zoomed image.
*/
public PointF getScrollPosition() {
Drawable drawable = getDrawable();
if (drawable == null) {
return null;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
PointF point = transformCoordTouchToBitmap(viewWidth / 2, viewHeight / 2, true);
point.x /= drawableWidth;
point.y /= drawableHeight;
return point;
}
/**
* Set the focus point of the zoomed image. The focus points are denoted as a fraction from the
* left and top of the view. The focus points can range in value between 0 and 1.
* @param focusX
* @param focusY
*/
public void setScrollPosition(float focusX, float focusY) {
setZoom(normalizedScale, focusX, focusY);
}
/**
* Performs boundary checking and fixes the image matrix if it
* is out of bounds.
*/
private void fixTrans() {
matrix.getValues(m);
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
float fixTransX = getFixTrans(transX, viewWidth, getImageWidth());
float fixTransY = getFixTrans(transY, viewHeight, getImageHeight());
if (fixTransX != 0 || fixTransY != 0) {
matrix.postTranslate(fixTransX, fixTransY);
}
}
/**
* When transitioning from zooming from focus to zoom from center (or vice versa)
* the image can become unaligned within the view. This is apparent when zooming
* quickly. When the content size is less than the view size, the content will often
* be centered incorrectly within the view. fixScaleTrans first calls fixTrans() and
* then makes sure the image is centered correctly within the view.
*/
private void fixScaleTrans() {
fixTrans();
matrix.getValues(m);
if (getImageWidth() < viewWidth) {
m[Matrix.MTRANS_X] = (viewWidth - getImageWidth()) / 2;
}
if (getImageHeight() < viewHeight) {
m[Matrix.MTRANS_Y] = (viewHeight - getImageHeight()) / 2;
}
matrix.setValues(m);
}
private float getFixTrans(float trans, float viewSize, float contentSize) {
float minTrans, maxTrans;
if (contentSize <= viewSize) {
minTrans = 0;
maxTrans = viewSize - contentSize;
} else {
minTrans = viewSize - contentSize;
maxTrans = 0;
}
if (trans < minTrans)
return -trans + minTrans;
if (trans > maxTrans)
return -trans + maxTrans;
return 0;
}
private float getFixDragTrans(float delta, float viewSize, float contentSize) {
if (contentSize <= viewSize) {
return 0;
}
return delta;
}
private float getImageWidth() {
return matchViewWidth * normalizedScale;
}
private float getImageHeight() {
return matchViewHeight * normalizedScale;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Drawable drawable = getDrawable();
if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) {
setMeasuredDimension(0, 0);
return;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
viewWidth = setViewSize(widthMode, widthSize, drawableWidth);
viewHeight = setViewSize(heightMode, heightSize, drawableHeight);
//
// Set view dimensions
//
setMeasuredDimension(viewWidth, viewHeight);
//
// Fit content within view
//
fitImageToView();
}
/**
* If the normalizedScale is equal to 1, then the image is made to fit the screen. Otherwise,
* it is made to fit the screen according to the dimensions of the previous image matrix. This
* allows the image to maintain its zoom after rotation.
*/
private void fitImageToView() {
Drawable drawable = getDrawable();
if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) {
return;
}
if (matrix == null || prevMatrix == null) {
return;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
//
// Scale image for view
//
float scaleX = (float) viewWidth / drawableWidth;
float scaleY = (float) viewHeight / drawableHeight;
switch (mScaleType) {
case CENTER:
scaleX = scaleY = 1;
break;
case CENTER_CROP:
scaleX = scaleY = Math.max(scaleX, scaleY);
break;
case CENTER_INSIDE:
scaleX = scaleY = Math.min(1, Math.min(scaleX, scaleY));
case FIT_CENTER:
scaleX = scaleY = Math.min(scaleX, scaleY);
break;
case FIT_XY:
break;
default:
//
// FIT_START and FIT_END not supported
//
throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END");
}
//
// Center the image
//
float redundantXSpace = viewWidth - (scaleX * drawableWidth);
float redundantYSpace = viewHeight - (scaleY * drawableHeight);
matchViewWidth = viewWidth - redundantXSpace;
matchViewHeight = viewHeight - redundantYSpace;
if (!isZoomed() && !imageRenderedAtLeastOnce) {
//
// Stretch and center image to fit view
//
matrix.setScale(scaleX, scaleY);
matrix.postTranslate(redundantXSpace / 2, redundantYSpace / 2);
normalizedScale = 1;
} else {
//
// These values should never be 0 or we will set viewWidth and viewHeight
// to NaN in translateMatrixAfterRotate. To avoid this, call savePreviousImageValues
// to set them equal to the current values.
//
if (prevMatchViewWidth == 0 || prevMatchViewHeight == 0) {
savePreviousImageValues();
}
prevMatrix.getValues(m);
//
// Rescale Matrix after rotation
//
m[Matrix.MSCALE_X] = matchViewWidth / drawableWidth * normalizedScale;
m[Matrix.MSCALE_Y] = matchViewHeight / drawableHeight * normalizedScale;
//
// TransX and TransY from previous matrix
//
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
//
// Width
//
float prevActualWidth = prevMatchViewWidth * normalizedScale;
float actualWidth = getImageWidth();
translateMatrixAfterRotate(Matrix.MTRANS_X, transX, prevActualWidth, actualWidth, prevViewWidth, viewWidth, drawableWidth);
//
// Height
//
float prevActualHeight = prevMatchViewHeight * normalizedScale;
float actualHeight = getImageHeight();
translateMatrixAfterRotate(Matrix.MTRANS_Y, transY, prevActualHeight, actualHeight, prevViewHeight, viewHeight, drawableHeight);
//
// Set the matrix to the adjusted scale and translate values.
//
matrix.setValues(m);
}
fixTrans();
setImageMatrix(matrix);
}
/**
* Set view dimensions based on layout params
*
* @param mode
* @param size
* @param drawableWidth
* @return
*/
private int setViewSize(int mode, int size, int drawableWidth) {
int viewSize;
switch (mode) {
case MeasureSpec.EXACTLY:
viewSize = size;
break;
case MeasureSpec.AT_MOST:
viewSize = Math.min(drawableWidth, size);
break;
case MeasureSpec.UNSPECIFIED:
viewSize = drawableWidth;
break;
default:
viewSize = size;
break;
}
return viewSize;
}
/**
* After rotating, the matrix needs to be translated. This function finds the area of image
* which was previously centered and adjusts translations so that is again the center, post-rotation.
*
* @param axis Matrix.MTRANS_X or Matrix.MTRANS_Y
* @param trans the value of trans in that axis before the rotation
* @param prevImageSize the width/height of the image before the rotation
* @param imageSize width/height of the image after rotation
* @param prevViewSize width/height of view before rotation
* @param viewSize width/height of view after rotation
* @param drawableSize width/height of drawable
*/
private void translateMatrixAfterRotate(int axis, float trans, float prevImageSize, float imageSize, int prevViewSize, int viewSize, int drawableSize) {
if (imageSize < viewSize) {
//
// The width/height of image is less than the view's width/height. Center it.
//
m[axis] = (viewSize - (drawableSize * m[Matrix.MSCALE_X])) * 0.5f;
} else if (trans > 0) {
//
// The image is larger than the view, but was not before rotation. Center it.
//
m[axis] = -((imageSize - viewSize) * 0.5f);
} else {
//
// Find the area of the image which was previously centered in the view. Determine its distance
// from the left/top side of the view as a fraction of the entire image's width/height. Use that percentage
// to calculate the trans in the new view width/height.
//
float percentage = (Math.abs(trans) + (0.5f * prevViewSize)) / prevImageSize;
m[axis] = -((percentage * imageSize) - (viewSize * 0.5f));
}
}
private void setState(State state) {
this.state = state;
}
public boolean canScrollHorizontallyFroyo(int direction) {
return canScrollHorizontally(direction);
}
@Override
public boolean canScrollHorizontally(int direction) {
matrix.getValues(m);
float x = m[Matrix.MTRANS_X];
if (getImageWidth() < viewWidth) {
return false;
} else if (x >= -1 && direction < 0) {
return false;
} else if (Math.abs(x) + viewWidth + 1 >= getImageWidth() && direction > 0) {
return false;
}
return true;
}
/**
* Gesture Listener detects a single click or long click and passes that on
* to the view's listener.
* @author Ortiz
*
*/
private class GestureListener extends GestureDetector.SimpleOnGestureListener {
@Override
public boolean onSingleTapConfirmed(MotionEvent e)
{
if(doubleTapListener != null) {
return doubleTapListener.onSingleTapConfirmed(e);
}
return performClick();
}
@Override
public void onLongPress(MotionEvent e)
{
performLongClick();
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY)
{
if (fling != null) {
//
// If a previous fling is still active, it should be cancelled so that two flings
// are not run simultaenously.
//
fling.cancelFling();
}
fling = new Fling((int) velocityX, (int) velocityY);
compatPostOnAnimation(fling);
return super.onFling(e1, e2, velocityX, velocityY);
}
@Override
public boolean onDoubleTap(MotionEvent e) {
boolean consumed = false;
if(doubleTapListener != null) {
consumed = doubleTapListener.onDoubleTap(e);
}
if (state == State.NONE) {
float targetZoom = (normalizedScale == minScale) ? maxScale : minScale;
DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, e.getX(), e.getY(), false);
compatPostOnAnimation(doubleTap);
consumed = true;
}
return consumed;
}
@Override
public boolean onDoubleTapEvent(MotionEvent e) {
if(doubleTapListener != null) {
return doubleTapListener.onDoubleTapEvent(e);
}
return false;
}
}
public interface OnTouchImageViewListener {
public void onMove();
}
/**
* Responsible for all touch events. Handles the heavy lifting of drag and also sends
* touch events to Scale Detector and Gesture Detector.
* @author Ortiz
*
*/
private class PrivateOnTouchListener implements OnTouchListener {
//
// Remember last point position for dragging
//
private PointF last = new PointF();
@Override
public boolean onTouch(View v, MotionEvent event) {
mScaleDetector.onTouchEvent(event);
mGestureDetector.onTouchEvent(event);
PointF curr = new PointF(event.getX(), event.getY());
if (state == State.NONE || state == State.DRAG || state == State.FLING) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
last.set(curr);
if (fling != null)
fling.cancelFling();
setState(State.DRAG);
break;
case MotionEvent.ACTION_MOVE:
if (state == State.DRAG) {
float deltaX = curr.x - last.x;
float deltaY = curr.y - last.y;
float fixTransX = getFixDragTrans(deltaX, viewWidth, getImageWidth());
float fixTransY = getFixDragTrans(deltaY, viewHeight, getImageHeight());
matrix.postTranslate(fixTransX, fixTransY);
fixTrans();
last.set(curr.x, curr.y);
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
setState(State.NONE);
break;
}
}
setImageMatrix(matrix);
//
// User-defined OnTouchListener
//
if(userTouchListener != null) {
userTouchListener.onTouch(v, event);
}
//
// OnTouchImageViewListener is set: TouchImageView dragged by user.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
//
// indicate event was handled
//
return true;
}
}
/**
* ScaleListener detects user two finger scaling and scales image.
* @author Ortiz
*
*/
private class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
setState(State.ZOOM);
return true;
}
@Override
public boolean onScale(ScaleGestureDetector detector) {
scaleImage(detector.getScaleFactor(), detector.getFocusX(), detector.getFocusY(), true);
//
// OnTouchImageViewListener is set: TouchImageView pinch zoomed by user.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
return true;
}
@Override
public void onScaleEnd(ScaleGestureDetector detector) {
super.onScaleEnd(detector);
setState(State.NONE);
boolean animateToZoomBoundary = false;
float targetZoom = normalizedScale;
if (normalizedScale > maxScale) {
targetZoom = maxScale;
animateToZoomBoundary = true;
} else if (normalizedScale < minScale) {
targetZoom = minScale;
animateToZoomBoundary = true;
}
if (animateToZoomBoundary) {
DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, viewWidth / 2, viewHeight / 2, true);
compatPostOnAnimation(doubleTap);
}
}
}
private void scaleImage(double deltaScale, float focusX, float focusY, boolean stretchImageToSuper) {
float lowerScale, upperScale;
if (stretchImageToSuper) {
lowerScale = superMinScale;
upperScale = superMaxScale;
} else {
lowerScale = minScale;
upperScale = maxScale;
}
float origScale = normalizedScale;
normalizedScale *= deltaScale;
if (normalizedScale > upperScale) {
normalizedScale = upperScale;
deltaScale = upperScale / origScale;
} else if (normalizedScale < lowerScale) {
normalizedScale = lowerScale;
deltaScale = lowerScale / origScale;
}
matrix.postScale((float) deltaScale, (float) deltaScale, focusX, focusY);
fixScaleTrans();
}
/**
* DoubleTapZoom calls a series of runnables which apply
* an animated zoom in/out graphic to the image.
* @author Ortiz
*
*/
private class DoubleTapZoom implements Runnable {
private long startTime;
private static final float ZOOM_TIME = 500;
private float startZoom, targetZoom;
private float bitmapX, bitmapY;
private boolean stretchImageToSuper;
private AccelerateDecelerateInterpolator interpolator = new AccelerateDecelerateInterpolator();
private PointF startTouch;
private PointF endTouch;
DoubleTapZoom(float targetZoom, float focusX, float focusY, boolean stretchImageToSuper) {
setState(State.ANIMATE_ZOOM);
startTime = System.currentTimeMillis();
this.startZoom = normalizedScale;
this.targetZoom = targetZoom;
this.stretchImageToSuper = stretchImageToSuper;
PointF bitmapPoint = transformCoordTouchToBitmap(focusX, focusY, false);
this.bitmapX = bitmapPoint.x;
this.bitmapY = bitmapPoint.y;
//
// Used for translating image during scaling
//
startTouch = transformCoordBitmapToTouch(bitmapX, bitmapY);
endTouch = new PointF(viewWidth / 2, viewHeight / 2);
}
@Override
public void run() {
float t = interpolate();
double deltaScale = calculateDeltaScale(t);
scaleImage(deltaScale, bitmapX, bitmapY, stretchImageToSuper);
translateImageToCenterTouchPosition(t);
fixScaleTrans();
setImageMatrix(matrix);
//
// OnTouchImageViewListener is set: double tap runnable updates listener
// with every frame.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
if (t < 1f) {
//
// We haven't finished zooming
//
compatPostOnAnimation(this);
} else {
//
// Finished zooming
//
setState(State.NONE);
}
}
/**
* Interpolate between where the image should start and end in order to translate
* the image so that the point that is touched is what ends up centered at the end
* of the zoom.
* @param t
*/
private void translateImageToCenterTouchPosition(float t) {
float targetX = startTouch.x + t * (endTouch.x - startTouch.x);
float targetY = startTouch.y + t * (endTouch.y - startTouch.y);
PointF curr = transformCoordBitmapToTouch(bitmapX, bitmapY);
matrix.postTranslate(targetX - curr.x, targetY - curr.y);
}
/**
* Use interpolator to get t
* @return
*/
private float interpolate() {
long currTime = System.currentTimeMillis();
float elapsed = (currTime - startTime) / ZOOM_TIME;
elapsed = Math.min(1f, elapsed);
return interpolator.getInterpolation(elapsed);
}
/**
* Interpolate the current targeted zoom and get the delta
* from the current zoom.
* @param t
* @return
*/
private double calculateDeltaScale(float t) {
double zoom = startZoom + t * (targetZoom - startZoom);
return zoom / normalizedScale;
}
}
/**
* This function will transform the coordinates in the touch event to the coordinate
* system of the drawable that the imageview contain
* @param x x-coordinate of touch event
* @param y y-coordinate of touch event
* @param clipToBitmap Touch event may occur within view, but outside image content. True, to clip return value
* to the bounds of the bitmap size.
* @return Coordinates of the point touched, in the coordinate system of the original drawable.
*/
private PointF transformCoordTouchToBitmap(float x, float y, boolean clipToBitmap) {
matrix.getValues(m);
float origW = getDrawable().getIntrinsicWidth();
float origH = getDrawable().getIntrinsicHeight();
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
float finalX = ((x - transX) * origW) / getImageWidth();
float finalY = ((y - transY) * origH) / getImageHeight();
if (clipToBitmap) {
finalX = Math.min(Math.max(finalX, 0), origW);
finalY = Math.min(Math.max(finalY, 0), origH);
}
return new PointF(finalX , finalY);
}
/**
* Inverse of transformCoordTouchToBitmap. This function will transform the coordinates in the
* drawable's coordinate system to the view's coordinate system.
* @param bx x-coordinate in original bitmap coordinate system
* @param by y-coordinate in original bitmap coordinate system
* @return Coordinates of the point in the view's coordinate system.
*/
private PointF transformCoordBitmapToTouch(float bx, float by) {
matrix.getValues(m);
float origW = getDrawable().getIntrinsicWidth();
float origH = getDrawable().getIntrinsicHeight();
float px = bx / origW;
float py = by / origH;
float finalX = m[Matrix.MTRANS_X] + getImageWidth() * px;
float finalY = m[Matrix.MTRANS_Y] + getImageHeight() * py;
return new PointF(finalX , finalY);
}
/**
* Fling launches sequential runnables which apply
* the fling graphic to the image. The values for the translation
* are interpolated by the Scroller.
* @author Ortiz
*
*/
private class Fling implements Runnable {
CompatScroller scroller;
int currX, currY;
Fling(int velocityX, int velocityY) {
setState(State.FLING);
scroller = new CompatScroller(context);
matrix.getValues(m);
int startX = (int) m[Matrix.MTRANS_X];
int startY = (int) m[Matrix.MTRANS_Y];
int minX, maxX, minY, maxY;
if (getImageWidth() > viewWidth) {
minX = viewWidth - (int) getImageWidth();
maxX = 0;
} else {
minX = maxX = startX;
}
if (getImageHeight() > viewHeight) {
minY = viewHeight - (int) getImageHeight();
maxY = 0;
} else {
minY = maxY = startY;
}
scroller.fling(startX, startY, (int) velocityX, (int) velocityY, minX,
maxX, minY, maxY);
currX = startX;
currY = startY;
}
public void cancelFling() {
if (scroller != null) {
setState(State.NONE);
scroller.forceFinished(true);
}
}
@Override
public void run() {
//
// OnTouchImageViewListener is set: TouchImageView listener has been flung by user.
// Listener runnable updated with each frame of fling animation.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
if (scroller.isFinished()) {
scroller = null;
return;
}
if (scroller.computeScrollOffset()) {
int newX = scroller.getCurrX();
int newY = scroller.getCurrY();
int transX = newX - currX;
int transY = newY - currY;
currX = newX;
currY = newY;
matrix.postTranslate(transX, transY);
fixTrans();
setImageMatrix(matrix);
compatPostOnAnimation(this);
}
}
}
@TargetApi(VERSION_CODES.GINGERBREAD)
private class CompatScroller {
Scroller scroller;
OverScroller overScroller;
boolean isPreGingerbread;
public CompatScroller(Context context) {
if (VERSION.SDK_INT < VERSION_CODES.GINGERBREAD) {
isPreGingerbread = true;
scroller = new Scroller(context);
} else {
isPreGingerbread = false;
overScroller = new OverScroller(context);
}
}
public void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY) {
if (isPreGingerbread) {
scroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY);
} else {
overScroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY);
}
}
public void forceFinished(boolean finished) {
if (isPreGingerbread) {
scroller.forceFinished(finished);
} else {
overScroller.forceFinished(finished);
}
}
public boolean isFinished() {
if (isPreGingerbread) {
return scroller.isFinished();
} else {
return overScroller.isFinished();
}
}
public boolean computeScrollOffset() {
if (isPreGingerbread) {
return scroller.computeScrollOffset();
} else {
overScroller.computeScrollOffset();
return overScroller.computeScrollOffset();
}
}
public int getCurrX() {
if (isPreGingerbread) {
return scroller.getCurrX();
} else {
return overScroller.getCurrX();
}
}
public int getCurrY() {
if (isPreGingerbread) {
return scroller.getCurrY();
} else {
return overScroller.getCurrY();
}
}
}
@TargetApi(VERSION_CODES.JELLY_BEAN)
private void compatPostOnAnimation(Runnable runnable) {
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN) {
postOnAnimation(runnable);
} else {
postDelayed(runnable, 1000/60);
}
}
private class ZoomVariables {
public float scale;
public float focusX;
public float focusY;
public ScaleType scaleType;
public ZoomVariables(float scale, float focusX, float focusY, ScaleType scaleType) {
this.scale = scale;
this.focusX = focusX;
this.focusY = focusY;
this.scaleType = scaleType;
}
}
private void printMatrixInfo() {
float[] n = new float[9];
matrix.getValues(n);
Log.d(DEBUG, "Scale: " + n[Matrix.MSCALE_X] + " TransX: " + n[Matrix.MTRANS_X] + " TransY: " + n[Matrix.MTRANS_Y]);
}
}
| |
/*
* This file is part of DropletVanillaCommands.
*
* Copyright (c) 2012 Spout LLC <http://www.spout.org/>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.spout.droplet.vanillacommands.commands;
import org.spout.api.command.CommandArguments;
import org.spout.api.command.CommandSource;
import org.spout.api.command.annotated.Command;
import org.spout.api.command.annotated.Permissible;
import org.spout.api.entity.Player;
import org.spout.api.exception.CommandException;
import org.spout.api.geo.discrete.Point;
import org.spout.vanilla.component.entity.misc.Burn;
import org.spout.vanilla.component.entity.misc.Effects;
import org.spout.vanilla.component.entity.misc.Health;
import org.spout.vanilla.component.entity.misc.Hunger;
import org.spout.vanilla.data.effect.EntityEffect;
import org.spout.vanilla.data.effect.EntityEffectType;
import org.spout.vanilla.data.effect.ExplosionEffect;
import org.spout.vanilla.event.cause.HealthChangeCause;
public class VanillaCommands {
/**
* Used for starving the player.
* @throws CommandException
*/
@Command(aliases = {"starve"}, usage = "<player> <hunger level>", desc = "Sets the hunger level of the player.")
@Permissible("vanillacommands.starve")
public void starve(CommandSource source, CommandArguments args) throws CommandException {
if (args.length() == 0) {
if (source instanceof Player) {
((Player)source).get(Hunger.class).setHunger(0);
source.sendMessage("Succesfully set your hunger to zero");
} else {
source.sendMessage("You are not a player!");
}
} else {
if (args.isPlayer(0)) {
int hungerLevel = 0;
if (args.length() == 2 && args.isInteger(1)) {
hungerLevel = args.getInteger(1);
}
args.getPlayer(0).get(Hunger.class).setHunger(hungerLevel);
source.sendMessage("Succesfully set " + args.getString(0) + "'s hunger to " + hungerLevel);
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
}
/**
* Used for setting the health of the player.
* @throws CommandException
*/
@Command(aliases = {"hurt", "damage"}, usage = "<player> <health level>", desc = "Sets the health level of the player.")
@Permissible("vanillacommands.hurt")
public void hurt(CommandSource source, CommandArguments args) throws CommandException {
if (args.length() == 0) {
if (source instanceof Player) {
((Player)source).get(Health.class).setHealth(1, HealthChangeCause.COMMAND);
source.sendMessage("Succesfully set your health to one");
} else {
source.sendMessage("You are not a player!");
}
} else {
if (args.isPlayer(0)) {
int healthLevel = 0;
if (args.length() == 2 && args.isInteger(1)) {
healthLevel = args.getInteger(1);
}
args.getPlayer(0).get(Health.class).setHealth(healthLevel, HealthChangeCause.COMMAND);
source.sendMessage("Succesfully set " + args.getString(0) + "'s health to " + healthLevel);
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
}
/**
* Used for setting the player on fire
* @throws CommandException
*/
@Command(aliases = {"ignite"}, usage = "<player>", desc = "Sets the player on fire.")
@Permissible("vanillacommands.ignite")
public void ignite(CommandSource source, CommandArguments args) throws CommandException {
if (args.length() == 0) {
if (source instanceof Player) {
((Player)source).get(Burn.class).setOnFire(20, true);
source.sendMessage("Succesfully set you on fire!");
} else {
source.sendMessage("You are not a player!");
}
} else {
if (args.isPlayer(0)) {
args.getPlayer(0).get(Burn.class).setOnFire(20, true);
source.sendMessage("Succesfully set " + args.getString(0) + " on fire!");
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
}
/**
* Used for teleporting the player high up
* @throws CommandException
*/
@Command(aliases = {"fall"}, usage = "<player>", desc = "Makes the player fall.")
@Permissible("vanillacommands.fall")
public void fall(CommandSource source, CommandArguments args) throws CommandException {
if (args.length() == 0) {
if (source instanceof Player) {
Player player = (Player)source;
Point point = player.getScene().getPosition();
Point newPoint = new Point(point.getWorld(), point.getBlockX(), (point.getBlockY() + 60), point.getBlockZ());
player.teleport(newPoint);
source.sendMessage("Succesfully fell!");
} else {
source.sendMessage("You must be a player!");
source.sendMessage("Or use /c fall <player>");
}
} else {
if (args.isPlayer(0)) {
int distance = 60;
if (args.length() == 2 && args.isInteger(1)) {
distance = args.getInteger(1);
}
Player player = args.getPlayer(0);
Point point = player.getScene().getPosition();
Point newPoint = new Point(point.getWorld(), point.getBlockX(), (point.getBlockY() + distance), point.getBlockZ());
source.sendMessage("Succesfully made " + args.getString(0) + " fall " + distance + " blocks!");
player.teleport(newPoint);
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
}
/**
* Used for creating an explosion around the player
* @throws CommandException
*/
@Command(aliases = {"explode"}, usage = "<player>", desc = "Makes the player explode.")
@Permissible("vanillacommands.explode")
public void explode(CommandSource source, CommandArguments args) throws CommandException {
if (args.length() == 0) {
if (source instanceof Player) {
Player player = ((Player)source);
Point point = player.getScene().getPosition();
ExplosionEffect explosion = new ExplosionEffect(10, 4);
explosion.play(player, point);
player.get(Health.class).setHealth(0, HealthChangeCause.COMMAND);
source.sendMessage("Succesfully exploded!");
} else {
source.sendMessage("You are not a player!");
source.sendMessage("Or use /c explode <player>");
}
} else {
if (args.isPlayer(0)) {
Player player = args.getPlayer(0);
Point point = player.getScene().getPosition();
ExplosionEffect explosion = new ExplosionEffect(10, 4);
explosion.play(player, point);
player.get(Health.class).setHealth(0, HealthChangeCause.COMMAND);
source.sendMessage("Succesfully exploded!");
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
}
/**
* Used for blinding a player
* @throws CommandException
*/
@Command(aliases = {"blind"}, usage = "<player>", desc = "Makes the player blind.")
@Permissible("vanillacommands.blind")
public void blind(CommandSource source, CommandArguments args) throws CommandException {
Player player = null;
if (args.length() == 0) {
player = (Player)source;
} else if (args.length() == 1) {
if (args.isPlayer(0)) {
player = args.getPlayer(0);
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
if (player != null) {
player.add(Effects.class).add(new EntityEffect(EntityEffectType.BLINDNESS, 3F));
source.sendMessage("Succesfully made " + player.getName() + " blind!");
} else {
source.sendMessage("Syntax: /c blind <player>");
}
}
/**
* Used for giving the player the Nausea effect
* @throws CommandException
*/
@Command(aliases = {"ill"}, usage = "<player>", desc = "Makes the player ill.")
@Permissible("vanillacommands.ill")
public void ill(CommandSource source, CommandArguments args) throws CommandException {
Player player = null;
if (args.length() == 0) {
player = (Player)source;
} else if (args.length() == 1) {
if (args.isPlayer(0)) {
player = args.getPlayer(0);
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
if (player != null) {
player.add(Effects.class).add(new EntityEffect(EntityEffectType.NAUSEA, 3F));
source.sendMessage("Succesfully made " + player.getName() + " ill!");
} else {
source.sendMessage("Syntax: /c ill <player>");
}
}
/**
* Used for giving the player the hunger effect
* @throws CommandException
*/
@Command(aliases = {"hungry"}, usage = "<player>", desc = "Makes the player hungry.")
@Permissible("vanillacommands.hungry")
public void hungry(CommandSource source, CommandArguments args) throws CommandException {
Player player = null;
if (args.length() == 0) {
player = (Player)source;
} else if (args.length() == 1) {
if (args.isPlayer(0)) {
player = args.getPlayer(0);
} else {
source.sendMessage(args.getString(0) + " is not a player!");
}
}
if (player != null) {
player.add(Effects.class).add(new EntityEffect(EntityEffectType.HUNGER, 3F));
source.sendMessage("Succesfully made " + player.getName() + " hungry!");
} else {
source.sendMessage("Syntax: /c hungry <player>");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.session;
import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath;
import static org.apache.jackrabbit.oak.jcr.session.SessionImpl.checkIndexOnName;
import static org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants.NODE_TYPES_PATH;
import java.io.IOException;
import java.io.InputStream;
import javax.jcr.InvalidSerializedDataException;
import javax.jcr.NamespaceRegistry;
import javax.jcr.PathNotFoundException;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
import javax.jcr.observation.ObservationManager;
import javax.jcr.query.QueryManager;
import javax.jcr.version.Version;
import javax.jcr.version.VersionManager;
import org.apache.jackrabbit.api.JackrabbitWorkspace;
import org.apache.jackrabbit.api.security.authorization.PrivilegeManager;
import org.apache.jackrabbit.commons.xml.ParsingContentHandler;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate;
import org.apache.jackrabbit.oak.jcr.delegate.WorkspaceDelegate;
import org.apache.jackrabbit.oak.jcr.lock.LockDeprecation;
import org.apache.jackrabbit.oak.jcr.lock.LockManagerImpl;
import org.apache.jackrabbit.oak.jcr.query.QueryManagerImpl;
import org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation;
import org.apache.jackrabbit.oak.jcr.version.VersionManagerImpl;
import org.apache.jackrabbit.oak.jcr.xml.ImportHandler;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.plugins.name.ReadWriteNamespaceRegistry;
import org.apache.jackrabbit.oak.plugins.nodetype.write.ReadWriteNodeTypeManager;
import org.jetbrains.annotations.NotNull;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* TODO document
*/
public class WorkspaceImpl implements JackrabbitWorkspace {
private final SessionContext sessionContext;
private final SessionDelegate sessionDelegate;
private final WorkspaceDelegate workspaceDelegate;
private final QueryManagerImpl queryManager;
private final VersionManagerImpl versionManager;
private final ReadWriteNodeTypeManager nodeTypeManager;
public WorkspaceImpl(final SessionContext sessionContext) {
this.sessionContext = sessionContext;
this.sessionDelegate = sessionContext.getSessionDelegate();
this.workspaceDelegate = new WorkspaceDelegate(sessionContext);
this.queryManager = new QueryManagerImpl(sessionContext);
this.versionManager = new VersionManagerImpl(sessionContext);
this.nodeTypeManager = new ReadWriteNodeTypeManager() {
@Override
protected void refresh() throws RepositoryException {
getSession().refresh(true);
}
@Override
protected Tree getTypes() {
return sessionDelegate.getRoot().getTree(NODE_TYPES_PATH);
}
@NotNull
@Override
protected Root getWriteRoot() {
return sessionDelegate.getContentSession().getLatestRoot();
}
@Override
@NotNull
protected ValueFactory getValueFactory() {
return sessionContext.getValueFactory();
}
@NotNull
@Override
protected NamePathMapper getNamePathMapper() {
return sessionContext;
}
};
}
//----------------------------------------------------------< Workspace >---
@Override
@NotNull
public Session getSession() {
return sessionContext.getSession();
}
@Override
public String getName() {
return sessionDelegate.getWorkspaceName();
}
@Override
public void copy(String srcAbsPath, String destAbsPath) throws RepositoryException {
copy(getName(), srcAbsPath, destAbsPath);
}
private String getOakPathOrThrowNotFound(String srcAbsPath) throws PathNotFoundException {
return sessionContext.getOakPathOrThrowNotFound(srcAbsPath);
}
@Override
public void copy(String srcWorkspace,
String srcAbsPath,
final String destAbsPath) throws RepositoryException {
final String srcOakPath = getOakPathOrThrowNotFound(srcAbsPath);
final String destOakPath = getOakPathOrThrowNotFound(destAbsPath);
if (!getName().equals(srcWorkspace)) {
throw new UnsupportedRepositoryOperationException("Not implemented.");
}
sessionDelegate.performVoid(new SessionOperation<Void>("copy", true) {
@Override
public void checkPreconditions() throws RepositoryException {
super.checkPreconditions();
ensureIsAlive();
}
@Override
public void performVoid() throws RepositoryException {
sessionDelegate.checkProtectedNode(getParentPath(destOakPath));
checkIndexOnName(destAbsPath);
workspaceDelegate.copy(srcOakPath, destOakPath);
}
});
}
@Override
public void clone(String srcWorkspace, String srcAbsPath, String destAbsPath, boolean removeExisting) throws RepositoryException {
final String srcOakPath = getOakPathOrThrowNotFound(srcAbsPath);
final String destOakPath = getOakPathOrThrowNotFound(destAbsPath);
sessionDelegate.performVoid(new SessionOperation<Void>("clone", true) {
@Override
public void checkPreconditions() throws RepositoryException {
super.checkPreconditions();
ensureIsAlive();
}
@Override
public void performVoid() throws RepositoryException {
sessionDelegate.checkProtectedNode(getParentPath(srcOakPath));
sessionDelegate.checkProtectedNode(getParentPath(destOakPath));
throw new UnsupportedRepositoryOperationException("Not implemented.");
}
});
}
@Override
public void move(String srcAbsPath, final String destAbsPath) throws RepositoryException {
final String srcOakPath = getOakPathOrThrowNotFound(srcAbsPath);
final String destOakPath = getOakPathOrThrowNotFound(destAbsPath);
ensureIsAlive();
sessionDelegate.checkProtectedNode(getParentPath(srcOakPath));
sessionDelegate.checkProtectedNode(getParentPath(destOakPath));
checkIndexOnName(destAbsPath);
sessionDelegate.move(srcOakPath, destOakPath, false);
}
@Override
public void restore(Version[] versions, boolean removeExisting) throws RepositoryException {
getVersionManager().restore(versions, removeExisting);
}
@Override
public LockManagerImpl getLockManager() throws UnsupportedRepositoryOperationException {
LockDeprecation.handleCall("get LockManager");
return new LockManagerImpl(sessionContext);
}
@Override
public QueryManager getQueryManager() throws RepositoryException {
ensureIsAlive();
return queryManager;
}
@Override
public NamespaceRegistry getNamespaceRegistry() {
return new ReadWriteNamespaceRegistry(sessionDelegate.getRoot()) {
@Override
protected Root getWriteRoot() {
return sessionDelegate.getContentSession().getLatestRoot();
}
@Override
protected void refresh() throws RepositoryException {
getSession().refresh(true);
}
};
}
@Override
public ReadWriteNodeTypeManager getNodeTypeManager() {
return nodeTypeManager;
}
@Override
public ObservationManager getObservationManager() throws RepositoryException {
ensureIsAlive();
return sessionContext.getObservationManager();
}
@Override
public VersionManager getVersionManager() throws RepositoryException {
ensureIsAlive();
return versionManager;
}
@Override
public String[] getAccessibleWorkspaceNames() throws RepositoryException {
ensureIsAlive();
// FIXME: adjust implementation once OAK-118 is being addressed.
return new String[]{getName()};
}
@Override
public ContentHandler getImportContentHandler(String parentAbsPath, int uuidBehavior) throws RepositoryException {
ensureIsAlive();
return new ImportHandler(parentAbsPath, sessionContext, uuidBehavior, true);
}
@Override
public void importXML(String parentAbsPath, InputStream in, int uuidBehavior) throws IOException, RepositoryException {
ensureIsAlive();
try {
ContentHandler handler = getImportContentHandler(parentAbsPath, uuidBehavior);
new ParsingContentHandler(handler).parse(in);
} catch (SAXException e) {
Throwable exception = e.getException();
if (exception instanceof RepositoryException) {
throw (RepositoryException) exception;
} else if (exception instanceof IOException) {
throw (IOException) exception;
} else if (exception instanceof CommitFailedException) {
throw ((CommitFailedException) exception).asRepositoryException();
} else {
throw new InvalidSerializedDataException("XML parse error", e);
}
} finally {
// JCR-2903
if (in != null) {
try {
in.close();
} catch (IOException ignore) {
}
}
}
}
@Override
public void createWorkspace(String name) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.createWorkspace");
}
@Override
public void createWorkspace(String name, String srcWorkspace) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.createWorkspace");
}
@Override
public void deleteWorkspace(String name) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.deleteWorkspace");
}
//------------------------------------------------< JackrabbitWorkspace >---
@Override
public void createWorkspace(String workspaceName, InputSource workspaceTemplate) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.createWorkspace");
}
/**
* @see org.apache.jackrabbit.api.JackrabbitWorkspace#getPrivilegeManager()
*/
@Override
public PrivilegeManager getPrivilegeManager() throws RepositoryException {
return sessionContext.getPrivilegeManager();
}
//------------------------------------------------------------< private >---
private void ensureIsAlive() throws RepositoryException {
// check session status
if (!sessionDelegate.isAlive()) {
throw new RepositoryException("This session has been closed.");
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.