gt stringclasses 1 value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import javax.annotation.concurrent.GuardedBy;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.instrumentation.ChannelCounter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
public class MemoryChannel extends BasicChannelSemantics {
private static Logger LOGGER = LoggerFactory.getLogger(MemoryChannel.class);
private static final Integer defaultCapacity = 100;
private static final Integer defaultTransCapacity = 100;
private static final Integer defaultKeepAlive = 3;
public class MemoryTransaction extends BasicTransactionSemantics {
private LinkedBlockingDeque<Event> takeList;
private LinkedBlockingDeque<Event> putList;
private final ChannelCounter channelCounter;
public MemoryTransaction(int transCapacity, ChannelCounter counter) {
putList = new LinkedBlockingDeque<Event>(transCapacity);
takeList = new LinkedBlockingDeque<Event>(transCapacity);
channelCounter = counter;
}
@Override
protected void doPut(Event event) {
channelCounter.incrementEventPutAttemptCount();
if(!putList.offer(event)) {
throw new ChannelException("Put queue for MemoryTransaction of capacity " +
putList.size() + " full, consider committing more frequently, " +
"increasing capacity or increasing thread count");
}
}
@Override
protected Event doTake() throws InterruptedException {
channelCounter.incrementEventTakeAttemptCount();
if(takeList.remainingCapacity() == 0) {
throw new ChannelException("Take list for MemoryTransaction, capacity " +
takeList.size() + " full, consider committing more frequently, " +
"increasing capacity, or increasing thread count");
}
if(!queueStored.tryAcquire(keepAlive, TimeUnit.SECONDS)) {
return null;
}
Event event;
synchronized(queueLock) {
event = queue.poll();
}
Preconditions.checkNotNull(event, "Queue.poll returned NULL despite semaphore " +
"signalling existence of entry");
takeList.put(event);
return event;
}
@Override
protected void doCommit() throws InterruptedException {
int remainingChange = takeList.size() - putList.size();
if(remainingChange < 0) {
if(!queueRemaining.tryAcquire(-remainingChange, keepAlive, TimeUnit.SECONDS)) {
throw new ChannelException("Space for commit to queue couldn't be acquired" +
" Sinks are likely not keeping up with sources, or the buffer size is too tight");
}
}
int puts = putList.size();
int takes = takeList.size();
synchronized(queueLock) {
if(puts > 0 ) {
while(!putList.isEmpty()) {
if(!queue.offer(putList.removeFirst())) {
throw new RuntimeException("Queue add failed, this shouldn't be able to happen");
}
}
}
putList.clear();
takeList.clear();
}
queueStored.release(puts);
if(remainingChange > 0) {
queueRemaining.release(remainingChange);
}
if (puts > 0) {
channelCounter.addToEventPutSuccessCount(puts);
}
if (takes > 0) {
channelCounter.addToEventTakeSuccessCount(takes);
}
channelCounter.setChannelSize(queue.size());
}
@Override
protected void doRollback() {
int takes = takeList.size();
synchronized(queueLock) {
Preconditions.checkState(queue.remainingCapacity() >= takeList.size(), "Not enough space in memory channel " +
"queue to rollback takes. This should never happen, please report");
while(!takeList.isEmpty()) {
queue.addFirst(takeList.removeLast());
}
putList.clear();
}
queueStored.release(takes);
channelCounter.setChannelSize(queue.size());
}
}
// lock to guard queue, mainly needed to keep it locked down during resizes
// it should never be held through a blocking operation
private Integer queueLock;
@GuardedBy(value = "queueLock")
private LinkedBlockingDeque<Event> queue;
// invariant that tracks the amount of space remaining in the queue(with all uncommitted takeLists deducted)
// we maintain the remaining permits = queue.remaining - takeList.size()
// this allows local threads waiting for space in the queue to commit without denying access to the
// shared lock to threads that would make more space on the queue
private Semaphore queueRemaining;
// used to make "reservations" to grab data from the queue.
// by using this we can block for a while to get data without locking all other threads out
// like we would if we tried to use a blocking call on queue
private Semaphore queueStored;
// maximum items in a transaction queue
private volatile Integer transCapacity;
private volatile int keepAlive;
private ChannelCounter channelCounter;
public MemoryChannel() {
super();
queueLock = 0;
}
@Override
public void configure(Context context) {
String strCapacity = context.getString("capacity");
Integer capacity = null;
if(strCapacity == null) {
capacity = defaultCapacity;
} else {
try {
capacity = Integer.parseInt(strCapacity);
} catch(NumberFormatException e) {
capacity = defaultCapacity;
}
}
String strTransCapacity = context.getString("transactionCapacity");
if(strTransCapacity == null) {
transCapacity = defaultTransCapacity;
} else {
try {
transCapacity = Integer.parseInt(strTransCapacity);
} catch(NumberFormatException e) {
transCapacity = defaultTransCapacity;
}
}
Preconditions.checkState(transCapacity <= capacity);
String strKeepAlive = context.getString("keep-alive");
if (strKeepAlive == null) {
keepAlive = defaultKeepAlive;
} else {
keepAlive = Integer.parseInt(strKeepAlive);
}
if(queue != null) {
try {
resizeQueue(capacity);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
} else {
synchronized(queueLock) {
queue = new LinkedBlockingDeque<Event>(capacity);
queueRemaining = new Semaphore(capacity);
queueStored = new Semaphore(0);
}
}
if (channelCounter == null) {
channelCounter = new ChannelCounter(getName());
}
}
private void resizeQueue(int capacity) throws InterruptedException {
int oldCapacity;
synchronized(queueLock) {
oldCapacity = queue.size() + queue.remainingCapacity();
}
if(oldCapacity == capacity) {
return;
} else if (oldCapacity > capacity) {
if(!queueRemaining.tryAcquire(oldCapacity - capacity, keepAlive, TimeUnit.SECONDS)) {
LOGGER.warn("Couldn't acquire permits to downsize the queue, resizing has been aborted");
} else {
synchronized(queueLock) {
LinkedBlockingDeque<Event> newQueue = new LinkedBlockingDeque<Event>(capacity);
newQueue.addAll(queue);
queue = newQueue;
}
}
} else {
synchronized(queueLock) {
LinkedBlockingDeque<Event> newQueue = new LinkedBlockingDeque<Event>(capacity);
newQueue.addAll(queue);
queue = newQueue;
}
queueRemaining.release(capacity - oldCapacity);
}
}
@Override
public synchronized void start() {
channelCounter.start();
channelCounter.setChannelSize(queue.size());
super.start();
}
@Override
public synchronized void stop() {
channelCounter.setChannelSize(queue.size());
channelCounter.stop();
super.stop();
}
@Override
protected BasicTransactionSemantics createTransaction() {
return new MemoryTransaction(transCapacity, channelCounter);
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.volley.toolbox;
import android.os.SystemClock;
import android.util.Log;
import com.android.volley.Cache;
import com.android.volley.VolleyLog;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Cache implementation that caches files directly onto the hard disk in the specified
* directory. The default disk usage size is 5MB, but is configurable.
*/
public class DiskBasedCache implements Cache {
/** Map of the Key, CacheHeader pairs */
private final Map<String, CacheHeader> mEntries =
new LinkedHashMap<String, CacheHeader>(16, .75f, true);
/** Total amount of space currently used by the cache in bytes. */
private long mTotalSize = 0;
/** The root directory to use for the cache. */
private final File mRootDirectory;
/** The maximum size of the cache in bytes. */
private final int mMaxCacheSizeInBytes;
/** Default maximum disk usage in bytes. */
private static final int DEFAULT_DISK_USAGE_BYTES = 5 * 1024 * 1024;
/** High water mark percentage for the cache */
private static final float HYSTERESIS_FACTOR = 0.9f;
/** Current cache version */
private static final int CACHE_VERSION = 2;
/**
* Constructs an instance of the DiskBasedCache at the specified directory.
* @param rootDirectory The root directory of the cache.
* @param maxCacheSizeInBytes The maximum size of the cache in bytes.
*/
public DiskBasedCache(File rootDirectory, int maxCacheSizeInBytes) {
mRootDirectory = rootDirectory;
mMaxCacheSizeInBytes = maxCacheSizeInBytes;
}
/**
* Constructs an instance of the DiskBasedCache at the specified directory using
* the default maximum cache size of 5MB.
* @param rootDirectory The root directory of the cache.
*/
public DiskBasedCache(File rootDirectory) {
this(rootDirectory, DEFAULT_DISK_USAGE_BYTES);
}
/**
* Clears the cache. Deletes all cached files from disk.
*/
@Override
public synchronized void clear() {
File[] files = mRootDirectory.listFiles();
if (files != null) {
for (File file : files) {
file.delete();
}
}
mEntries.clear();
mTotalSize = 0;
VolleyLog.d("Cache cleared.");
}
/**
* Returns the cache entry with the specified key if it exists, null otherwise.
*/
@Override
public synchronized Entry get(String key) {
CacheHeader entry = mEntries.get(key);
// if the entry does not exist, return.
if (entry == null) {
return null;
}
File file = getFileForKey(key);
CountingInputStream cis = null;
try {
cis = new CountingInputStream(new FileInputStream(file));
CacheHeader.readHeader(cis); // eat header
byte[] data = streamToBytes(cis, (int) (file.length() - cis.bytesRead));
return entry.toCacheEntry(data);
} catch (IOException e) {
VolleyLog.d("%s: %s", file.getAbsolutePath(), e.toString());
remove(key);
return null;
} finally {
if (cis != null) {
try {
cis.close();
} catch (IOException ioe) {
return null;
}
}
}
}
/**
* Initializes the DiskBasedCache by scanning for all files currently in the
* specified root directory. Creates the root directory if necessary.
*/
@Override
public synchronized void initialize() {
if (!mRootDirectory.exists()) {
if (!mRootDirectory.mkdirs()) {
VolleyLog.e("Unable to create cache dir %s", mRootDirectory.getAbsolutePath());
}
return;
}
File[] files = mRootDirectory.listFiles();
if (files == null) {
return;
}
for (File file : files) {
FileInputStream fis = null;
try {
fis = new FileInputStream(file);
CacheHeader entry = CacheHeader.readHeader(fis);
entry.size = file.length();
putEntry(entry.key, entry);
} catch (IOException e) {
if (file != null) {
file.delete();
}
} finally {
try {
if (fis != null) {
fis.close();
}
} catch (IOException ignored) { }
}
}
}
/**
* Invalidates an entry in the cache.
* @param key Cache key
* @param fullExpire True to fully expire the entry, false to soft expire
*/
@Override
public synchronized void invalidate(String key, boolean fullExpire) {
Entry entry = get(key);
if (entry != null) {
entry.softTtl = 0;
if (fullExpire) {
entry.ttl = 0;
}
put(key, entry);
}
}
/**
* Puts the entry with the specified key into the cache.
*/
@Override
public synchronized void put(String key, Entry entry) {
pruneIfNeeded(entry.data.length);
File file = getFileForKey(key);
try {
FileOutputStream fos = new FileOutputStream(file);
CacheHeader e = new CacheHeader(key, entry);
e.writeHeader(fos);
fos.write(entry.data);
fos.close();
putEntry(key, e);
return;
} catch (IOException e) {
}
boolean deleted = file.delete();
if (!deleted) {
VolleyLog.d("Could not clean up file %s", file.getAbsolutePath());
}
}
/**
* Removes the specified key from the cache if it exists.
*/
@Override
public synchronized void remove(String key) {
boolean deleted = getFileForKey(key).delete();
removeEntry(key);
if (!deleted) {
VolleyLog.d("Could not delete cache entry for key=%s, filename=%s",
key, getFilenameForKey(key));
}
}
/**
* Creates a pseudo-unique filename for the specified cache key.
* @param key The key to generate a file name for.
* @return A pseudo-unique filename.
*/
public static String getFilenameForKey(String key) {
int firstHalfLength = key.length() / 2;
String localFilename = String.valueOf(key.substring(0, firstHalfLength).hashCode());
localFilename += String.valueOf(key.substring(firstHalfLength).hashCode());
//Log.d("DiskBaseCache", "key=" + key + " filename=" + localFilename);
return localFilename;
}
/**
* Returns a file object for the given cache key.
*/
public File getFileForKey(String key) {
return new File(mRootDirectory, getFilenameForKey(key));
}
/**
* Prunes the cache to fit the amount of bytes specified.
* @param neededSpace The amount of bytes we are trying to fit into the cache.
*/
private void pruneIfNeeded(int neededSpace) {
if ((mTotalSize + neededSpace) < mMaxCacheSizeInBytes) {
return;
}
if (VolleyLog.DEBUG) {
VolleyLog.v("Pruning old cache entries.");
}
long before = mTotalSize;
int prunedFiles = 0;
long startTime = SystemClock.elapsedRealtime();
Iterator<Map.Entry<String, CacheHeader>> iterator = mEntries.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, CacheHeader> entry = iterator.next();
CacheHeader e = entry.getValue();
boolean deleted = getFileForKey(e.key).delete();
if (deleted) {
mTotalSize -= e.size;
} else {
VolleyLog.d("Could not delete cache entry for key=%s, filename=%s",
e.key, getFilenameForKey(e.key));
}
iterator.remove();
prunedFiles++;
if ((mTotalSize + neededSpace) < mMaxCacheSizeInBytes * HYSTERESIS_FACTOR) {
break;
}
}
if (VolleyLog.DEBUG) {
VolleyLog.v("pruned %d files, %d bytes, %d ms",
prunedFiles, (mTotalSize - before), SystemClock.elapsedRealtime() - startTime);
}
}
/**
* Puts the entry with the specified key into the cache.
* @param key The key to identify the entry by.
* @param entry The entry to cache.
*/
private void putEntry(String key, CacheHeader entry) {
if (!mEntries.containsKey(key)) {
mTotalSize += entry.size;
} else {
CacheHeader oldEntry = mEntries.get(key);
mTotalSize += (entry.size - oldEntry.size);
}
mEntries.put(key, entry);
}
/**
* Removes the entry identified by 'key' from the cache.
*/
private void removeEntry(String key) {
CacheHeader entry = mEntries.get(key);
if (entry != null) {
mTotalSize -= entry.size;
mEntries.remove(key);
}
}
/**
* Reads the contents of an InputStream into a byte[].
* */
private static byte[] streamToBytes(InputStream in, int length) throws IOException {
byte[] bytes = new byte[length];
int count;
int pos = 0;
while (pos < length && ((count = in.read(bytes, pos, length - pos)) != -1)) {
pos += count;
}
if (pos != length) {
throw new IOException("Expected " + length + " bytes, read " + pos + " bytes");
}
return bytes;
}
/**
* Handles holding onto the cache headers for an entry.
*/
private static class CacheHeader {
/** The size of the data identified by this CacheHeader. (This is not
* serialized to disk. */
public long size;
/** The key that identifies the cache entry. */
public String key;
/** ETag for cache coherence. */
public String etag;
/** Date of this response as reported by the server. */
public long serverDate;
/** TTL for this record. */
public long ttl;
/** Soft TTL for this record. */
public long softTtl;
/** Headers from the response resulting in this cache entry. */
public Map<String, String> responseHeaders;
private CacheHeader() { }
/**
* Instantiates a new CacheHeader object
* @param key The key that identifies the cache entry
* @param entry The cache entry.
*/
public CacheHeader(String key, Entry entry) {
this.key = key;
this.size = entry.data.length;
this.etag = entry.etag;
this.serverDate = entry.serverDate;
this.ttl = entry.ttl;
this.softTtl = entry.softTtl;
this.responseHeaders = entry.responseHeaders;
}
/**
* Reads the header off of an InputStream and returns a CacheHeader object.
* @param is The InputStream to read from.
* @throws IOException
*/
public static CacheHeader readHeader(InputStream is) throws IOException {
CacheHeader entry = new CacheHeader();
ObjectInputStream ois = new ObjectInputStream(is);
int version = ois.readByte();
if (version != CACHE_VERSION) {
// don't bother deleting, it'll get pruned eventually
throw new IOException();
}
entry.key = ois.readUTF();
entry.etag = ois.readUTF();
if (entry.etag.equals("")) {
entry.etag = null;
}
entry.serverDate = ois.readLong();
entry.ttl = ois.readLong();
entry.softTtl = ois.readLong();
entry.responseHeaders = readStringStringMap(ois);
return entry;
}
/**
* Creates a cache entry for the specified data.
*/
public Entry toCacheEntry(byte[] data) {
Entry e = new Entry();
e.data = data;
e.etag = etag;
e.serverDate = serverDate;
e.ttl = ttl;
e.softTtl = softTtl;
e.responseHeaders = responseHeaders;
return e;
}
/**
* Writes the contents of this CacheHeader to the specified OutputStream.
*/
public boolean writeHeader(OutputStream os) {
try {
ObjectOutputStream oos = new ObjectOutputStream(os);
oos.writeByte(CACHE_VERSION);
oos.writeUTF(key);
oos.writeUTF(etag == null ? "" : etag);
oos.writeLong(serverDate);
oos.writeLong(ttl);
oos.writeLong(softTtl);
writeStringStringMap(responseHeaders, oos);
oos.flush();
return true;
} catch (IOException e) {
VolleyLog.d("%s", e.toString());
return false;
}
}
/**
* Writes all entries of {@code map} into {@code oos}.
*/
private static void writeStringStringMap(Map<String, String> map, ObjectOutputStream oos)
throws IOException {
if (map != null) {
oos.writeInt(map.size());
for (Map.Entry<String, String> entry : map.entrySet()) {
oos.writeUTF(entry.getKey());
oos.writeUTF(entry.getValue());
}
} else {
oos.writeInt(0);
}
}
/**
* @return a string to string map which contains the entries read from {@code ois}
* previously written by {@link #writeStringStringMap}
*/
private static Map<String, String> readStringStringMap(ObjectInputStream ois)
throws IOException {
int size = ois.readInt();
Map<String, String> result = (size == 0)
? Collections.<String, String>emptyMap()
: new HashMap<String, String>(size);
for (int i = 0; i < size; i++) {
String key = ois.readUTF().intern();
String value = ois.readUTF().intern();
result.put(key, value);
}
return result;
}
}
private static class CountingInputStream extends FilterInputStream {
private int bytesRead = 0;
private CountingInputStream(InputStream in) {
super(in);
}
@Override
public int read() throws IOException {
int result = super.read();
if (result != -1) {
bytesRead++;
}
return result;
}
@Override
public int read(byte[] buffer, int offset, int count) throws IOException {
int result = super.read(buffer, offset, count);
if (result != -1) {
bytesRead += result;
}
return result;
}
}
}
| |
/*
* Copyright 2016 HuntBugs contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package one.util.huntbugs.detect;
import com.strobel.assembler.metadata.JvmType;
import com.strobel.assembler.metadata.MethodDefinition;
import com.strobel.assembler.metadata.TypeReference;
import com.strobel.decompiler.ast.AstCode;
import com.strobel.decompiler.ast.Expression;
import one.util.huntbugs.flow.Inf;
import one.util.huntbugs.registry.MethodContext;
import one.util.huntbugs.registry.anno.AstNodes;
import one.util.huntbugs.registry.anno.AstVisitor;
import one.util.huntbugs.registry.anno.WarningDefinition;
import one.util.huntbugs.util.Exprs;
import one.util.huntbugs.util.Methods;
import one.util.huntbugs.util.NodeChain;
import one.util.huntbugs.util.Nodes;
import one.util.huntbugs.warning.Role.NumberRole;
import one.util.huntbugs.warning.Roles;
/**
* @author Tagir Valeev
*
*/
@WarningDefinition(category = "Correctness", name = "RemOne", maxScore = 80)
@WarningDefinition(category = "Correctness", name = "CompareBitAndIncompatible", maxScore = 75)
@WarningDefinition(category = "Correctness", name = "CompareBitOrIncompatible", maxScore = 75)
@WarningDefinition(category = "RedundantCode", name = "UselessOrWithZero", maxScore = 60)
@WarningDefinition(category = "RedundantCode", name = "UselessAndWithMinusOne", maxScore = 60)
@WarningDefinition(category = "RedundantCode", name = "UselessAndWithZero", maxScore = 70)
@WarningDefinition(category = "Correctness", name = "BitCheckGreaterNegative", maxScore = 80)
@WarningDefinition(category = "Correctness", name = "BitShiftInvalidAmount", maxScore = 75)
@WarningDefinition(category = "Correctness", name = "BitShiftWrongPriority", maxScore = 70)
@WarningDefinition(category = "BadPractice", name = "BitCheckGreater", maxScore = 35)
@WarningDefinition(category = "Correctness", name = "BitOrSignedByte", maxScore = 50)
@WarningDefinition(category = "Correctness", name = "BitAddSignedByte", maxScore = 35)
public class BadMath {
private static final NumberRole COMPARED_TO = NumberRole.forName("COMPARED_TO");
private static final NumberRole AND_OPERAND = NumberRole.forName("AND_OPERAND");
private static final NumberRole OR_OPERAND = NumberRole.forName("OR_OPERAND");
@AstVisitor(nodes = AstNodes.EXPRESSIONS)
public void checkWrongPriority(Expression expr, MethodContext mc, MethodDefinition md) {
if(expr.getCode() == AstCode.Shl) {
Expression leftOp = expr.getArguments().get(0);
Expression rightOp = expr.getArguments().get(1);
if(rightOp.getCode() == AstCode.Add) {
Expression leftAddend = rightOp.getArguments().get(0);
Object leftConst = Nodes.getConstant(leftAddend);
Expression rightAddend = rightOp.getArguments().get(1);
if(leftConst instanceof Integer && !Integer.valueOf(1).equals(Nodes.getConstant(leftOp))) {
int priority = 0;
int c = (Integer)leftConst;
if(c < 32 || (c < 64 && leftOp.getExpectedType().getSimpleType() == JvmType.Long)) {
if(!Methods.isHashCodeMethod(md) || !Inf.BACKLINK.findTransitiveUsages(expr, false).allMatch(e -> e.getCode() == AstCode.Return)) {
priority += 10;
if(c == 16) {
priority += 5;
} else if(c != 8) {
priority += 10;
}
if(rightAddend.getCode() != AstCode.And) {
priority += 10;
}
}
mc.report("BitShiftWrongPriority", priority, expr, Roles.NUMBER.create(c));
}
}
}
}
}
@AstVisitor(nodes = AstNodes.EXPRESSIONS)
public void visit(Expression expr, NodeChain nc, MethodContext mc) {
TypeReference inferredType = expr.getInferredType();
if (inferredType == null)
return;
JvmType exprType = inferredType.getSimpleType();
switch (expr.getCode()) {
case Rem:
if (isConst(expr.getArguments().get(1), 1)) {
mc.report("RemOne", 0, expr.getArguments().get(0));
}
break;
case Add:
checkSignedByte(expr, mc);
break;
case Or:
checkSignedByte(expr, mc);
// passthru
case Xor:
if (exprType == JvmType.Long || exprType == JvmType.Integer) {
Nodes.ifBinaryWithConst(expr, (child, constant) -> {
if (constant instanceof Number && ((Number) constant).longValue() == 0
&& !Nodes.isCompoundAssignment(nc.getNode())
&& (nc.getParent() == null || !Nodes.isCompoundAssignment(nc.getParent().getNode()))) {
mc.report("UselessOrWithZero", 0, child, Roles.OPERATION.create(expr));
}
});
}
break;
case And:
Nodes.ifBinaryWithConst(expr, (child, constant) -> {
if (constant instanceof Number) {
long val = ((Number) constant).longValue();
if (val == -1 && !Nodes.isCompoundAssignment(nc.getNode()))
mc.report("UselessAndWithMinusOne", 0, child, Roles.NUMBER.create((Number) constant));
else if (val == 0)
mc.report("UselessAndWithZero", 0, child);
}
});
break;
case CmpGt:
case CmpLt: {
Expression bitAnd = Exprs.getChild(expr, expr.getCode() == AstCode.CmpGt ? 0 : 1);
Object zero = Nodes.getConstant(expr.getArguments().get(expr.getCode() == AstCode.CmpGt ? 1 : 0));
if (isIntegral(zero) && ((Number) zero).longValue() == 0 && bitAnd.getCode() == AstCode.And) {
Nodes.ifBinaryWithConst(bitAnd, (flags, mask) -> {
if (isIntegral(mask)) {
if (mask instanceof Integer && ((Integer) mask) < 0 || mask instanceof Long
&& ((Long) mask) < 0) {
mc.report("BitCheckGreaterNegative", 0, expr, Roles.NUMBER.create((Number) mask));
} else {
mc.report("BitCheckGreater", 0, expr, Roles.NUMBER.create((Number) mask));
}
}
});
}
break;
}
case CmpEq:
case CmpNe:
Nodes.ifBinaryWithConst(expr, (child, outerConst) -> {
if (!isIntegral(outerConst))
return;
if (child.getCode() != AstCode.And && child.getCode() != AstCode.Or)
return;
long outerVal = ((Number) outerConst).longValue();
Nodes.ifBinaryWithConst(child, (grandChild, innerConst) -> {
if (!isIntegral(innerConst))
return;
long innerVal = ((Number) innerConst).longValue();
if (child.getCode() == AstCode.And) {
if ((outerVal & ~innerVal) != 0) {
mc.report("CompareBitAndIncompatible", 0, expr, AND_OPERAND.create(innerVal),
COMPARED_TO.create(outerVal));
}
} else {
if ((~outerVal & innerVal) != 0) {
mc.report("CompareBitOrIncompatible", 0, expr, OR_OPERAND.create(innerVal),
COMPARED_TO.create(outerVal));
}
}
});
});
break;
case Shl:
case Shr:
case UShr: {
Object constant = Nodes.getConstant(expr.getArguments().get(1));
if (constant instanceof Integer) {
int bits = (int) constant;
if (bits < 0 || bits > 63 || (bits > 31 && exprType == JvmType.Integer)) {
mc.report("BitShiftInvalidAmount", 0, expr, Roles.NUMBER.create(bits), Roles.OPERATION.create(expr),
Roles.MAX_VALUE.create(exprType == JvmType.Integer ? 31 : 63));
}
}
}
default:
}
}
private void checkSignedByte(Expression expr, MethodContext mc) {
JvmType type = expr.getInferredType().getSimpleType();
if (type != JvmType.Integer && type != JvmType.Long)
return;
if (Inf.BACKLINK.findUsages(expr).stream().allMatch(e -> e.getCode() == AstCode.I2B))
return;
if (Exprs.bothChildrenMatch(expr, BadMath::isByte, BadMath::isLow8BitsClear)) {
mc.report(expr.getCode() == AstCode.Add ? "BitAddSignedByte" : "BitOrSignedByte", 0, expr);
}
}
private static boolean isByte(Expression expr) {
if (expr.getCode() == AstCode.I2L)
return isByte(Exprs.getChild(expr, 0));
TypeReference type = expr.getInferredType();
return type != null && type.getInternalName().equals("B");
}
private static boolean isLow8BitsClear(Expression arg) {
Object value = Nodes.getConstant(arg);
if (value instanceof Number) {
long num = ((Number) value).longValue();
return num != 0x100 && (num & 0xFF) == 0;
}
if (arg.getCode() == AstCode.Shl) {
Object shiftAmount = Nodes.getConstant(arg.getArguments().get(1));
return shiftAmount instanceof Number && (((Number) shiftAmount).intValue() & 0x1F) >= 8;
}
if (arg.getCode() == AstCode.And) {
Object leftOp = Nodes.getConstant(arg.getArguments().get(0));
Object rightOp = Nodes.getConstant(arg.getArguments().get(1));
if (leftOp instanceof Number && (((Number) leftOp).longValue() & 0xFF) == 0)
return true;
if (rightOp instanceof Number && (((Number) rightOp).longValue() & 0xFF) == 0)
return true;
}
return false;
}
private static boolean isConst(Expression expr, long wantedValue) {
Object constant = Nodes.getConstant(expr);
return isIntegral(constant) && ((Number) constant).longValue() == wantedValue;
}
private static boolean isIntegral(Object constant) {
return constant instanceof Integer || constant instanceof Long;
}
}
| |
package org.jfree.data.time;
import java.io.Serializable;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import org.jfree.date.SerialDate;
public class Month extends RegularTimePeriod implements Serializable {
private static final long serialVersionUID = -5090216912548722570L;
private long firstMillisecond;
private long lastMillisecond;
private int month;
private int year;
public Month() {
this(new Date());
}
public Month(int month, int year) {
if (month < 1 || month > 12) {
throw new IllegalArgumentException("Month outside valid range.");
}
this.month = month;
this.year = year;
peg(Calendar.getInstance());
}
public Month(int month, Year year) {
if (month < 1 || month > 12) {
throw new IllegalArgumentException("Month outside valid range.");
}
this.month = month;
this.year = year.getYear();
peg(Calendar.getInstance());
}
public Month(Date time) {
this(time, TimeZone.getDefault());
}
public Month(Date time, TimeZone zone) {
this(time, zone, Locale.getDefault());
}
public Month(Date time, TimeZone zone, Locale locale) {
Calendar calendar = Calendar.getInstance(zone, locale);
calendar.setTime(time);
this.month = calendar.get(2) + 1;
this.year = calendar.get(1);
peg(calendar);
}
public Year getYear() {
return new Year(this.year);
}
public int getYearValue() {
return this.year;
}
public int getMonth() {
return this.month;
}
public long getFirstMillisecond() {
return this.firstMillisecond;
}
public long getLastMillisecond() {
return this.lastMillisecond;
}
public void peg(Calendar calendar) {
this.firstMillisecond = getFirstMillisecond(calendar);
this.lastMillisecond = getLastMillisecond(calendar);
}
public RegularTimePeriod previous() {
if (this.month != 1) {
return new Month(this.month - 1, this.year);
}
if (this.year > SerialDate.MINIMUM_YEAR_SUPPORTED) {
return new Month(12, this.year - 1);
}
return null;
}
public RegularTimePeriod next() {
if (this.month != 12) {
return new Month(this.month + 1, this.year);
}
if (this.year < SerialDate.MAXIMUM_YEAR_SUPPORTED) {
return new Month(1, this.year + 1);
}
return null;
}
public long getSerialIndex() {
return (((long) this.year) * 12) + ((long) this.month);
}
public String toString() {
return SerialDate.monthCodeToString(this.month) + " " + this.year;
}
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof Month)) {
return false;
}
Month that = (Month) obj;
if (this.month != that.month) {
return false;
}
if (this.year != that.year) {
return false;
}
return true;
}
public int hashCode() {
return ((this.month + 629) * 37) + this.year;
}
public int compareTo(Object o1) {
if (o1 instanceof Month) {
Month m = (Month) o1;
int result = this.year - m.getYearValue();
return result == 0 ? this.month - m.getMonth() : result;
} else if (o1 instanceof RegularTimePeriod) {
return 0;
} else {
return 1;
}
}
public long getFirstMillisecond(Calendar calendar) {
calendar.set(this.year, this.month - 1, 1, 0, 0, 0);
calendar.set(14, 0);
return calendar.getTimeInMillis();
}
public long getLastMillisecond(Calendar calendar) {
int eom = SerialDate.lastDayOfMonth(this.month, this.year);
calendar.set(this.year, this.month - 1, eom, 23, 59, 59);
calendar.set(14, Millisecond.LAST_MILLISECOND_IN_SECOND);
return calendar.getTimeInMillis();
}
public static Month parseMonth(String s) {
if (s == null) {
return null;
}
boolean yearIsFirst;
String s1;
String s2;
Year year;
int month;
s = s.trim();
int i = findSeparator(s);
if (i == -1) {
yearIsFirst = true;
s1 = s.substring(0, 5);
s2 = s.substring(5);
} else {
s1 = s.substring(0, i).trim();
s2 = s.substring(i + 1, s.length()).trim();
if (evaluateAsYear(s1) == null) {
yearIsFirst = false;
} else if (evaluateAsYear(s2) == null) {
yearIsFirst = true;
} else {
yearIsFirst = s1.length() > s2.length();
}
}
if (yearIsFirst) {
year = evaluateAsYear(s1);
month = SerialDate.stringToMonthCode(s2);
} else {
year = evaluateAsYear(s2);
month = SerialDate.stringToMonthCode(s1);
}
if (month == -1) {
throw new TimePeriodFormatException("Can't evaluate the month.");
} else if (year != null) {
return new Month(month, year);
} else {
throw new TimePeriodFormatException("Can't evaluate the year.");
}
}
private static int findSeparator(String s) {
int result = s.indexOf(45);
if (result == -1) {
result = s.indexOf(44);
}
if (result == -1) {
result = s.indexOf(32);
}
if (result == -1) {
return s.indexOf(46);
}
return result;
}
private static Year evaluateAsYear(String s) {
Year result = null;
try {
result = Year.parseYear(s);
} catch (TimePeriodFormatException e) {
}
return result;
}
}
| |
/**
* Most of the code in the Qalingo project is copyrighted Hoteia and licensed
* under the Apache License Version 2.0 (release version 0.8.0)
* http://www.apache.org/licenses/LICENSE-2.0
*
* Copyright (c) Hoteia, 2012-2014
* http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com
*
*/
package org.hoteia.qalingo.core.web.mvc.controller.oauth;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.hoteia.qalingo.core.domain.AttributeDefinition;
import org.hoteia.qalingo.core.domain.Customer;
import org.hoteia.qalingo.core.domain.CustomerAttribute;
import org.hoteia.qalingo.core.domain.EngineSetting;
import org.hoteia.qalingo.core.domain.EngineSettingValue;
import org.hoteia.qalingo.core.domain.Market;
import org.hoteia.qalingo.core.domain.MarketArea;
import org.hoteia.qalingo.core.domain.enumtype.CustomerNetworkOrigin;
import org.hoteia.qalingo.core.domain.enumtype.FoUrls;
import org.hoteia.qalingo.core.domain.enumtype.OAuthType;
import org.hoteia.qalingo.core.mapper.JsonMapper;
import org.hoteia.qalingo.core.security.helper.SecurityUtil;
import org.hoteia.qalingo.core.security.util.SecurityRequestUtil;
import org.hoteia.qalingo.core.service.AttributeService;
import org.hoteia.qalingo.core.web.resolver.RequestData;
import org.hoteia.tools.scribe.mapping.oauth.windowslive.json.pojo.UserPojo;
import org.scribe.builder.ServiceBuilder;
import org.scribe.builder.api.LiveApi;
import org.scribe.model.OAuthRequest;
import org.scribe.model.Response;
import org.scribe.model.Token;
import org.scribe.model.Verb;
import org.scribe.model.Verifier;
import org.scribe.oauth.OAuthService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
/**
*
*/
@Controller("callBackOAuthWindowsLiveController")
public class CallBackOAuthWindowsLiveController extends AbstractOAuthFrontofficeController {
protected final Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
protected AttributeService attributeService;
@Autowired
protected SecurityRequestUtil securityRequestUtil;
@Autowired
protected SecurityUtil securityUtil;
@Autowired
protected JsonMapper jsonMapper;
@RequestMapping("/callback-oauth-windows-live.html*")
public ModelAndView callBackWindowsLive(final HttpServletRequest request, final HttpServletResponse response) throws Exception {
final RequestData requestData = requestUtil.getRequestData(request);
// SANITY CHECK
if(!requestUtil.hasKnownCustomerLogged(request)){
try {
// CLIENT ID
EngineSetting clientIdEngineSetting = engineSettingService.getSettingOAuthAppKeyOrId();
EngineSettingValue clientIdEngineSettingValue = clientIdEngineSetting.getEngineSettingValue(OAuthType.WINDOWS_LIVE.name());
// CLIENT SECRET
EngineSetting clientSecretEngineSetting = engineSettingService.getSettingOAuthAppSecret();
EngineSettingValue clientSecretEngineSettingValue = clientSecretEngineSetting.getEngineSettingValue(OAuthType.WINDOWS_LIVE.name());
// CLIENT PERMISSIONS
EngineSetting permissionsEngineSetting = engineSettingService.getSettingOAuthAppPermissions();
EngineSettingValue permissionsEngineSettingValue = permissionsEngineSetting.getEngineSettingValue(OAuthType.WINDOWS_LIVE.name());
if(clientIdEngineSettingValue != null
&& clientSecretEngineSetting != null
&& permissionsEngineSettingValue != null){
final String clientId = clientIdEngineSettingValue.getValue();
final String clientSecret = clientSecretEngineSettingValue.getValue();
final String permissions = permissionsEngineSettingValue.getValue();
final String windowsLiveCallBackURL = urlService.buildAbsoluteUrl(requestData, urlService.buildOAuthCallBackUrl(requestData, OAuthType.WINDOWS_LIVE.getPropertyKey().toLowerCase()));
OAuthService service = new ServiceBuilder()
.provider(LiveApi.class)
.apiKey(clientId)
.apiSecret(clientSecret)
.scope(permissions)
.callback(windowsLiveCallBackURL)
.build();
final String code = request.getParameter("code");
if(StringUtils.isNotEmpty(code)){
Verifier verifier = new Verifier(code);
Token accessToken = service.getAccessToken(EMPTY_TOKEN, verifier);
OAuthRequest oauthRequest = new OAuthRequest(Verb.GET, LIVE_ME_URL);
service.signRequest(accessToken, oauthRequest);
Response oauthResponse = oauthRequest.send();
int responseCode = oauthResponse.getCode();
String responseBody = oauthResponse.getBody();
if(responseCode == 200){
handleAuthenticationData(request, response, requestData, OAuthType.WINDOWS_LIVE, responseBody);
} else {
logger.error("Callback With " + OAuthType.WINDOWS_LIVE.name() + " failed!");
}
} else {
logger.error("Callback With " + OAuthType.WINDOWS_LIVE.name() + " failed!");
}
}
} catch (Exception e) {
logger.error("Callback With " + OAuthType.WINDOWS_LIVE.name() + " failed!");
}
}
// DEFAULT FALLBACK VALUE
if(!response.isCommitted()){
response.sendRedirect(urlService.generateUrl(FoUrls.LOGIN, requestData));
}
return null;
}
protected void handleAuthenticationData(HttpServletRequest request, HttpServletResponse response, RequestData requestData, OAuthType type, String jsonData) throws Exception {
UserPojo userPojo = null;
try {
userPojo = jsonMapper.getJsonMapper().readValue(jsonData, UserPojo.class);
} catch (JsonGenerationException e) {
logger.error(e.getMessage());
} catch (JsonMappingException e) {
logger.error(e.getMessage());
}
if(userPojo != null){
final String email = userPojo.getEmails().getPreferred();
final String firstName = userPojo.getFirstName();
final String lastName = userPojo.getLastName();
final String gender = userPojo.getGender();
final String locale = userPojo.getLocale();
if(StringUtils.isNotEmpty(email)){
Customer customer = customerService.getCustomerByLoginOrEmail(email);
if(customer == null){
final Market currentMarket = requestData.getMarket();
final MarketArea currentMarketArea = requestData.getMarketArea();
// CREATE A NEW CUSTOMER
customer = new Customer();
// customer = setCommonCustomerInformation(request, customer);
customer.setLogin(email);
customer.setPassword(securityUtil.generateAndEncodePassword());
customer.setEmail(email);
customer.setFirstname(firstName);
customer.setLastname(lastName);
if(StringUtils.isNotEmpty(gender)){
customer.setGender(gender);
}
customer.setNetworkOrigin(CustomerNetworkOrigin.WINDOWS_LIVE.getPropertyKey());
CustomerAttribute attribute = new CustomerAttribute();
AttributeDefinition attributeDefinition = attributeService.getAttributeDefinitionByCode(CustomerAttribute.CUSTOMER_ATTRIBUTE_SCREENAME);
attribute.setAttributeDefinition(attributeDefinition);
String screenName = "";
if(StringUtils.isNotEmpty(lastName)){
if(StringUtils.isNotEmpty(lastName)){
screenName = lastName;
if(screenName.length() > 1){
screenName = screenName.substring(0, 1);
}
if(!screenName.endsWith(".")){
screenName = screenName + ". ";
}
}
}
screenName = screenName + firstName;
attribute.setShortStringValue(screenName);
customer.getAttributes().add(attribute);
if(StringUtils.isNotEmpty(locale)){
customer.setDefaultLocale(locale);
}
// Save the new customer
customer = webManagementService.buildAndSaveNewCustomer(requestData, currentMarket, currentMarketArea, customer);
// Save the email confirmation
webManagementService.buildAndSaveCustomerNewAccountMail(requestData, customer);
}
// Redirect to the edit page
if(StringUtils.isNotEmpty(customer.getEmail())){
// Login the new customer
securityRequestUtil.authenticationCustomer(request, customer);
// Update the customer session
requestUtil.updateCurrentCustomer(request, customer);
String targetUrl = urlService.generateRedirectUrl(FoUrls.PERSONAL_EDIT, requestUtil.getRequestData(request));
String lastUrl = requestUtil.getCurrentRequestUrlNotSecurity(request);
// SANITY CHECK
if (lastUrl.contains("cart-") || lastUrl.contains("checkout-")) {
// STAY ON THE CHECKOUT - REDIRECT ON THE ADDRESSES PAGES
targetUrl = urlService.generateRedirectUrl(FoUrls.CART_AUTH, requestUtil.getRequestData(request));
}
response.sendRedirect(targetUrl);
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.utils;
import org.apache.commons.io.FileUtils;
import org.apache.storm.generated.LSApprovedWorkers;
import org.apache.storm.generated.LSSupervisorAssignments;
import org.apache.storm.generated.LSSupervisorId;
import org.apache.storm.generated.LSTopoHistory;
import org.apache.storm.generated.LSTopoHistoryList;
import org.apache.storm.generated.LSWorkerHeartbeat;
import org.apache.storm.generated.LocalAssignment;
import org.apache.storm.generated.LocalStateData;
import org.apache.storm.generated.ThriftSerializedObject;
import org.apache.thrift.TBase;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A simple, durable, atomic K/V database. *Very inefficient*, should only be used for occasional reads/writes.
* Every read/write hits disk.
*/
public class LocalState {
public static final Logger LOG = LoggerFactory.getLogger(LocalState.class);
public static final String LS_WORKER_HEARTBEAT = "worker-heartbeat";
public static final String LS_ID = "supervisor-id";
public static final String LS_LOCAL_ASSIGNMENTS = "local-assignments";
public static final String LS_APPROVED_WORKERS = "approved-workers";
public static final String LS_TOPO_HISTORY = "topo-hist";
private VersionedStore _vs;
public LocalState(String backingDir) throws IOException {
LOG.debug("New Local State for {}", backingDir);
_vs = new VersionedStore(backingDir);
}
public synchronized Map<String, TBase> snapshot() {
int attempts = 0;
while(true) {
try {
return deserializeLatestVersion();
} catch (Exception e) {
attempts++;
if (attempts >= 10) {
throw new RuntimeException(e);
}
}
}
}
private Map<String, TBase> deserializeLatestVersion() throws IOException {
Map<String, TBase> result = new HashMap<>();
TDeserializer td = new TDeserializer();
for (Map.Entry<String, ThriftSerializedObject> ent: partialDeserializeLatestVersion(td).entrySet()) {
result.put(ent.getKey(), deserialize(ent.getValue(), td));
}
return result;
}
private TBase deserialize(ThriftSerializedObject obj, TDeserializer td) {
try {
Class<?> clazz = Class.forName(obj.get_name());
TBase instance = (TBase) clazz.newInstance();
td.deserialize(instance, obj.get_bits());
return instance;
} catch(Exception e) {
throw new RuntimeException(e);
}
}
private Map<String, ThriftSerializedObject> partialDeserializeLatestVersion(TDeserializer td) {
try {
String latestPath = _vs.mostRecentVersionPath();
Map<String, ThriftSerializedObject> result = new HashMap<>();
if (latestPath != null) {
byte[] serialized = FileUtils.readFileToByteArray(new File(latestPath));
if (serialized.length == 0) {
LOG.warn("LocalState file '{}' contained no data, resetting state", latestPath);
} else {
if (td == null) {
td = new TDeserializer();
}
LocalStateData data = new LocalStateData();
td.deserialize(data, serialized);
result = data.get_serialized_parts();
}
}
return result;
} catch(Exception e) {
throw new RuntimeException(e);
}
}
private synchronized Map<String, ThriftSerializedObject> partialSnapshot(TDeserializer td) {
int attempts = 0;
while(true) {
try {
return partialDeserializeLatestVersion(td);
} catch (Exception e) {
attempts++;
if (attempts >= 10) {
throw new RuntimeException(e);
}
}
}
}
public TBase get(String key) {
TDeserializer td = new TDeserializer();
Map<String, ThriftSerializedObject> partial = partialSnapshot(td);
ThriftSerializedObject tso = partial.get(key);
TBase ret = null;
if (tso != null) {
ret = deserialize(tso, td);
}
return ret;
}
public void put(String key, TBase val) {
put(key, val, true);
}
public synchronized void put(String key, TBase val, boolean cleanup) {
Map<String, ThriftSerializedObject> curr = partialSnapshot(null);
TSerializer ser = new TSerializer();
curr.put(key, serialize(val, ser));
persistInternal(curr, ser, cleanup);
}
public void remove(String key) {
remove(key, true);
}
public synchronized void remove(String key, boolean cleanup) {
Map<String, ThriftSerializedObject> curr = partialSnapshot(null);
curr.remove(key);
persistInternal(curr, null, cleanup);
}
public synchronized void cleanup(int keepVersions) throws IOException {
_vs.cleanup(keepVersions);
}
public List<LSTopoHistory> getTopoHistoryList() {
LSTopoHistoryList lsTopoHistoryListWrapper = (LSTopoHistoryList) get(LS_TOPO_HISTORY);
if (null != lsTopoHistoryListWrapper) {
return lsTopoHistoryListWrapper.get_topo_history();
}
return null;
}
/**
* Remove topologies from local state which are older than cutOffAge.
* @param cutOffAge
*/
public void filterOldTopologies(long cutOffAge) {
LSTopoHistoryList lsTopoHistoryListWrapper = (LSTopoHistoryList) get(LS_TOPO_HISTORY);
List<LSTopoHistory> filteredTopoHistoryList = new ArrayList<>();
if (null != lsTopoHistoryListWrapper) {
for (LSTopoHistory topoHistory : lsTopoHistoryListWrapper.get_topo_history()) {
if (topoHistory.get_time_stamp() > cutOffAge) {
filteredTopoHistoryList.add(topoHistory);
}
}
}
put(LS_TOPO_HISTORY, new LSTopoHistoryList(filteredTopoHistoryList));
}
public void addTopologyHistory(LSTopoHistory lsTopoHistory) {
LSTopoHistoryList lsTopoHistoryListWrapper = (LSTopoHistoryList) get(LS_TOPO_HISTORY);
List<LSTopoHistory> currentTopoHistoryList = new ArrayList<>();
if (null != lsTopoHistoryListWrapper) {
currentTopoHistoryList.addAll(lsTopoHistoryListWrapper.get_topo_history());
}
currentTopoHistoryList.add(lsTopoHistory);
put(LS_TOPO_HISTORY, new LSTopoHistoryList(currentTopoHistoryList));
}
public String getSupervisorId() {
LSSupervisorId lsSupervisorId = (LSSupervisorId) get(LS_ID);
if (null != lsSupervisorId) {
return lsSupervisorId.get_supervisor_id();
}
return null;
}
public void setSupervisorId(String supervisorId) {
put(LS_ID, new LSSupervisorId(supervisorId));
}
public Map<String, Integer> getApprovedWorkers() {
LSApprovedWorkers lsApprovedWorkers = (LSApprovedWorkers) get(LS_APPROVED_WORKERS);
if (null != lsApprovedWorkers) {
return lsApprovedWorkers.get_approved_workers();
}
return null;
}
public void setApprovedWorkers(Map<String, Integer> approvedWorkers) {
put(LS_APPROVED_WORKERS, new LSApprovedWorkers(approvedWorkers));
}
public LSWorkerHeartbeat getWorkerHeartBeat() {
return (LSWorkerHeartbeat) get(LS_WORKER_HEARTBEAT);
}
public void setWorkerHeartBeat(LSWorkerHeartbeat workerHeartBeat) {
put(LS_WORKER_HEARTBEAT, workerHeartBeat, false);
}
public Map<Integer, LocalAssignment> getLocalAssignmentsMap() {
LSSupervisorAssignments assignments = (LSSupervisorAssignments) get(LS_LOCAL_ASSIGNMENTS);
if (null != assignments) {
return assignments.get_assignments();
}
return null;
}
public void setLocalAssignmentsMap(Map<Integer, LocalAssignment> localAssignmentMap) {
put(LS_LOCAL_ASSIGNMENTS, new LSSupervisorAssignments(localAssignmentMap));
}
private void persistInternal(Map<String, ThriftSerializedObject> serialized, TSerializer ser, boolean cleanup) {
try {
if (ser == null) {
ser = new TSerializer();
}
byte[] toWrite = ser.serialize(new LocalStateData(serialized));
String newPath = _vs.createVersion();
File file = new File(newPath);
FileUtils.writeByteArrayToFile(file, toWrite);
if (toWrite.length != file.length()) {
throw new IOException("Tried to serialize " + toWrite.length +
" bytes to " + file.getCanonicalPath() + ", but " +
file.length() + " bytes were written.");
}
_vs.succeedVersion(newPath);
if(cleanup) _vs.cleanup(4);
} catch(Exception e) {
throw new RuntimeException(e);
}
}
private ThriftSerializedObject serialize(TBase o, TSerializer ser) {
try {
return new ThriftSerializedObject(o.getClass().getName(), ByteBuffer.wrap(ser.serialize(o)));
} catch(Exception e) {
throw new RuntimeException(e);
}
}
}
| |
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.launchscript;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import com.github.dockerjava.api.DockerClient;
import com.github.dockerjava.api.command.DockerCmd;
import com.github.dockerjava.api.model.Frame;
import com.github.dockerjava.core.CompressArchiveUtil;
import com.github.dockerjava.core.DockerClientBuilder;
import com.github.dockerjava.core.DockerClientConfig;
import com.github.dockerjava.core.command.AttachContainerResultCallback;
import com.github.dockerjava.core.command.BuildImageResultCallback;
import com.github.dockerjava.jaxrs.AbstrSyncDockerCmdExec;
import com.github.dockerjava.jaxrs.DockerCmdExecFactoryImpl;
import org.assertj.core.api.Condition;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.springframework.boot.ansi.AnsiColor;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.junit.Assume.assumeThat;
/**
* Integration tests for Spring Boot's launch script on OSs that use SysVinit.
*
* @author Andy Wilkinson
*/
@RunWith(Parameterized.class)
public class SysVinitLaunchScriptIT {
private final SpringBootDockerCmdExecFactory commandExecFactory = new SpringBootDockerCmdExecFactory();
private static final char ESC = 27;
private final String os;
private final String version;
@Parameters(name = "{0} {1}")
public static List<Object[]> parameters() {
List<Object[]> parameters = new ArrayList<Object[]>();
for (File os : new File("src/test/resources/conf").listFiles()) {
for (File version : os.listFiles()) {
parameters.add(new Object[] { os.getName(), version.getName() });
}
}
return parameters;
}
public SysVinitLaunchScriptIT(String os, String version) {
this.os = os;
this.version = version;
}
@Test
public void statusWhenStopped() throws Exception {
String output = doTest("status-when-stopped.sh");
assertThat(output).contains("Status: 3");
assertThat(output).has(coloredString(AnsiColor.RED, "Not running"));
}
@Test
public void statusWhenStarted() throws Exception {
String output = doTest("status-when-started.sh");
assertThat(output).contains("Status: 0");
assertThat(output).has(
coloredString(AnsiColor.GREEN, "Started [" + extractPid(output) + "]"));
}
@Test
public void statusWhenKilled() throws Exception {
String output = doTest("status-when-killed.sh");
assertThat(output).contains("Status: 1");
assertThat(output).has(coloredString(AnsiColor.RED,
"Not running (process " + extractPid(output) + " not found)"));
}
@Test
public void stopWhenStopped() throws Exception {
String output = doTest("stop-when-stopped.sh");
assertThat(output).contains("Status: 0");
assertThat(output)
.has(coloredString(AnsiColor.YELLOW, "Not running (pidfile not found)"));
}
@Test
public void startWhenStarted() throws Exception {
String output = doTest("start-when-started.sh");
assertThat(output).contains("Status: 0");
assertThat(output).has(coloredString(AnsiColor.YELLOW,
"Already running [" + extractPid(output) + "]"));
}
@Test
public void restartWhenStopped() throws Exception {
String output = doTest("restart-when-stopped.sh");
assertThat(output).contains("Status: 0");
assertThat(output)
.has(coloredString(AnsiColor.YELLOW, "Not running (pidfile not found)"));
assertThat(output).has(
coloredString(AnsiColor.GREEN, "Started [" + extractPid(output) + "]"));
}
@Test
public void restartWhenStarted() throws Exception {
String output = doTest("restart-when-started.sh");
assertThat(output).contains("Status: 0");
assertThat(output).has(coloredString(AnsiColor.GREEN,
"Started [" + extract("PID1", output) + "]"));
assertThat(output).has(coloredString(AnsiColor.GREEN,
"Stopped [" + extract("PID1", output) + "]"));
assertThat(output).has(coloredString(AnsiColor.GREEN,
"Started [" + extract("PID2", output) + "]"));
}
@Test
public void startWhenStopped() throws Exception {
String output = doTest("start-when-stopped.sh");
assertThat(output).contains("Status: 0");
assertThat(output).has(
coloredString(AnsiColor.GREEN, "Started [" + extractPid(output) + "]"));
}
@Test
public void basicLaunch() throws Exception {
doLaunch("basic-launch.sh");
}
@Test
public void launchWithSingleCommandLineArgument() throws Exception {
doLaunch("launch-with-single-command-line-argument.sh");
}
@Test
public void launchWithMultipleCommandLineArguments() throws Exception {
doLaunch("launch-with-multiple-command-line-arguments.sh");
}
@Test
public void launchWithSingleRunArg() throws Exception {
doLaunch("launch-with-single-run-arg.sh");
}
@Test
public void launchWithMultipleRunArgs() throws Exception {
doLaunch("launch-with-multiple-run-args.sh");
}
@Test
public void launchWithSingleJavaOpt() throws Exception {
doLaunch("launch-with-single-java-opt.sh");
}
@Test
public void launchWithMultipleJavaOpts() throws Exception {
doLaunch("launch-with-multiple-java-opts.sh");
}
@Test
public void launchWithUseOfStartStopDaemonDisabled() throws Exception {
// CentOS doesn't have start-stop-daemon
assumeThat(this.os, is(not("CentOS")));
doLaunch("launch-with-use-of-start-stop-daemon-disabled.sh");
}
private void doLaunch(String script) throws Exception {
assertThat(doTest(script)).contains("Launched");
}
private String doTest(String script) throws Exception {
DockerClient docker = createClient();
String imageId = buildImage(docker);
String container = createContainer(docker, imageId, script);
try {
copyFilesToContainer(docker, container, script);
docker.startContainerCmd(container).exec();
StringBuilder output = new StringBuilder();
AttachContainerResultCallback resultCallback = docker
.attachContainerCmd(container).withStdOut(true).withStdErr(true)
.withFollowStream(true).withLogs(true)
.exec(new AttachContainerResultCallback() {
@Override
public void onNext(Frame item) {
output.append(new String(item.getPayload()));
super.onNext(item);
}
});
resultCallback.awaitCompletion(60, TimeUnit.SECONDS).close();
docker.waitContainerCmd(container).exec();
return output.toString();
}
finally {
docker.removeContainerCmd(container).exec();
}
}
private DockerClient createClient() {
DockerClientConfig config = DockerClientConfig.createDefaultConfigBuilder()
.withVersion("1.19").build();
DockerClient docker = DockerClientBuilder.getInstance(config)
.withDockerCmdExecFactory(this.commandExecFactory).build();
return docker;
}
private String buildImage(DockerClient docker) {
BuildImageResultCallback resultCallback = new BuildImageResultCallback();
String dockerfile = "src/test/resources/conf/" + this.os + "/" + this.version
+ "/Dockerfile";
String tag = "spring-boot-it/" + this.os.toLowerCase() + ":" + this.version;
docker.buildImageCmd(new File(dockerfile)).withTag(tag).exec(resultCallback);
String imageId = resultCallback.awaitImageId();
return imageId;
}
private String createContainer(DockerClient docker, String imageId,
String testScript) {
return docker.createContainerCmd(imageId).withTty(false).withCmd("/bin/bash",
"-c", "chmod +x " + testScript + " && ./" + testScript).exec().getId();
}
private void copyFilesToContainer(DockerClient docker, final String container,
String script) {
copyToContainer(docker, container, findApplication());
copyToContainer(docker, container,
new File("src/test/resources/scripts/test-functions.sh"));
copyToContainer(docker, container,
new File("src/test/resources/scripts/" + script));
}
private void copyToContainer(DockerClient docker, final String container,
final File file) {
this.commandExecFactory.createCopyToContainerCmdExec()
.exec(new CopyToContainerCmd(container, file));
}
private File findApplication() {
File targetDir = new File("target");
for (File file : targetDir.listFiles()) {
if (file.getName().startsWith("spring-boot-launch-script-tests")
&& file.getName().endsWith(".jar")
&& !file.getName().endsWith("-sources.jar")) {
return file;
}
}
throw new IllegalStateException(
"Could not find test application in target directory. Have you built it (mvn package)?");
}
private Condition<String> coloredString(AnsiColor color, String string) {
String colorString = ESC + "[0;" + color + "m" + string + ESC + "[0m";
return new Condition<String>() {
@Override
public boolean matches(String value) {
return containsString(colorString).matches(value);
}
};
}
private String extractPid(String output) {
return extract("PID", output);
}
private String extract(String label, String output) {
Pattern pattern = Pattern.compile(".*" + label + ": ([0-9]+).*", Pattern.DOTALL);
java.util.regex.Matcher matcher = pattern.matcher(output);
if (matcher.matches()) {
return matcher.group(1);
}
throw new IllegalArgumentException(
"Failed to extract " + label + " from output: " + output);
}
private static final class CopyToContainerCmdExec
extends AbstrSyncDockerCmdExec<CopyToContainerCmd, Void> {
private CopyToContainerCmdExec(WebTarget baseResource,
DockerClientConfig dockerClientConfig) {
super(baseResource, dockerClientConfig);
}
@Override
protected Void execute(CopyToContainerCmd command) {
try {
InputStream streamToUpload = new FileInputStream(CompressArchiveUtil
.archiveTARFiles(command.getFile().getParentFile(),
Arrays.asList(command.getFile()),
command.getFile().getName()));
WebTarget webResource = getBaseResource().path("/containers/{id}/archive")
.resolveTemplate("id", command.getContainer());
webResource.queryParam("path", ".")
.queryParam("noOverwriteDirNonDir", false).request()
.put(Entity.entity(streamToUpload, "application/x-tar")).close();
return null;
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
}
}
private static final class CopyToContainerCmd implements DockerCmd<Void> {
private final String container;
private final File file;
private CopyToContainerCmd(String container, File file) {
this.container = container;
this.file = file;
}
public String getContainer() {
return this.container;
}
public File getFile() {
return this.file;
}
@Override
public void close() {
}
}
private static final class SpringBootDockerCmdExecFactory
extends DockerCmdExecFactoryImpl {
private SpringBootDockerCmdExecFactory() {
withClientRequestFilters(new ClientRequestFilter() {
@Override
public void filter(ClientRequestContext requestContext)
throws IOException {
// Workaround for https://go-review.googlesource.com/#/c/3821/
requestContext.getHeaders().add("Connection", "close");
}
});
}
private CopyToContainerCmdExec createCopyToContainerCmdExec() {
return new CopyToContainerCmdExec(getBaseResource(), getDockerClientConfig());
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive.metastore;
import com.facebook.presto.hive.HiveCluster;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ListeningExecutorService;
import io.airlift.units.Duration;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.hive.metastore.MockHiveMetastoreClient.BAD_DATABASE;
import static com.facebook.presto.hive.metastore.MockHiveMetastoreClient.TEST_DATABASE;
import static com.facebook.presto.hive.metastore.MockHiveMetastoreClient.TEST_PARTITION1;
import static com.facebook.presto.hive.metastore.MockHiveMetastoreClient.TEST_PARTITION2;
import static com.facebook.presto.hive.metastore.MockHiveMetastoreClient.TEST_TABLE;
import static com.google.common.util.concurrent.MoreExecutors.listeningDecorator;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
@Test(singleThreaded = true)
public class TestCachingHiveMetastore
{
private MockHiveMetastoreClient mockClient;
private CachingHiveMetastore metastore;
private ThriftHiveMetastoreStats stats;
@BeforeMethod
public void setUp()
throws Exception
{
mockClient = new MockHiveMetastoreClient();
MockHiveCluster mockHiveCluster = new MockHiveCluster(mockClient);
ListeningExecutorService executor = listeningDecorator(newCachedThreadPool(daemonThreadsNamed("test-%s")));
ThriftHiveMetastore thriftHiveMetastore = new ThriftHiveMetastore(mockHiveCluster);
metastore = new CachingHiveMetastore(
new BridgingHiveMetastore(thriftHiveMetastore),
executor,
new Duration(5, TimeUnit.MINUTES),
new Duration(1, TimeUnit.MINUTES),
1000);
stats = thriftHiveMetastore.getStats();
}
@Test
public void testGetAllDatabases()
throws Exception
{
assertEquals(mockClient.getAccessCount(), 0);
assertEquals(metastore.getAllDatabases(), ImmutableList.of(TEST_DATABASE));
assertEquals(mockClient.getAccessCount(), 1);
assertEquals(metastore.getAllDatabases(), ImmutableList.of(TEST_DATABASE));
assertEquals(mockClient.getAccessCount(), 1);
metastore.flushCache();
assertEquals(metastore.getAllDatabases(), ImmutableList.of(TEST_DATABASE));
assertEquals(mockClient.getAccessCount(), 2);
}
@Test
public void testGetAllTable()
throws Exception
{
assertEquals(mockClient.getAccessCount(), 0);
assertEquals(metastore.getAllTables(TEST_DATABASE).get(), ImmutableList.of(TEST_TABLE));
assertEquals(mockClient.getAccessCount(), 1);
assertEquals(metastore.getAllTables(TEST_DATABASE).get(), ImmutableList.of(TEST_TABLE));
assertEquals(mockClient.getAccessCount(), 1);
metastore.flushCache();
assertEquals(metastore.getAllTables(TEST_DATABASE).get(), ImmutableList.of(TEST_TABLE));
assertEquals(mockClient.getAccessCount(), 2);
}
public void testInvalidDbGetAllTAbles()
throws Exception
{
assertFalse(metastore.getAllTables(BAD_DATABASE).isPresent());
}
@Test
public void testGetTable()
throws Exception
{
assertEquals(mockClient.getAccessCount(), 0);
assertNotNull(metastore.getTable(TEST_DATABASE, TEST_TABLE));
assertEquals(mockClient.getAccessCount(), 1);
assertNotNull(metastore.getTable(TEST_DATABASE, TEST_TABLE));
assertEquals(mockClient.getAccessCount(), 1);
metastore.flushCache();
assertNotNull(metastore.getTable(TEST_DATABASE, TEST_TABLE));
assertEquals(mockClient.getAccessCount(), 2);
}
public void testInvalidDbGetTable()
throws Exception
{
assertFalse(metastore.getTable(BAD_DATABASE, TEST_TABLE).isPresent());
assertEquals(stats.getGetTable().getThriftExceptions().getTotalCount(), 0);
assertEquals(stats.getGetTable().getTotalFailures().getTotalCount(), 0);
}
@Test
public void testGetPartitionNames()
throws Exception
{
ImmutableList<String> expectedPartitions = ImmutableList.of(TEST_PARTITION1, TEST_PARTITION2);
assertEquals(mockClient.getAccessCount(), 0);
assertEquals(metastore.getPartitionNames(TEST_DATABASE, TEST_TABLE).get(), expectedPartitions);
assertEquals(mockClient.getAccessCount(), 1);
assertEquals(metastore.getPartitionNames(TEST_DATABASE, TEST_TABLE).get(), expectedPartitions);
assertEquals(mockClient.getAccessCount(), 1);
metastore.flushCache();
assertEquals(metastore.getPartitionNames(TEST_DATABASE, TEST_TABLE).get(), expectedPartitions);
assertEquals(mockClient.getAccessCount(), 2);
}
@Test
public void testInvalidGetPartitionNames()
throws Exception
{
assertEquals(metastore.getPartitionNames(BAD_DATABASE, TEST_TABLE).get(), ImmutableList.of());
}
@Test
public void testGetPartitionNamesByParts()
throws Exception
{
ImmutableList<String> parts = ImmutableList.of();
ImmutableList<String> expectedPartitions = ImmutableList.of(TEST_PARTITION1, TEST_PARTITION2);
assertEquals(mockClient.getAccessCount(), 0);
assertEquals(metastore.getPartitionNamesByParts(TEST_DATABASE, TEST_TABLE, parts).get(), expectedPartitions);
assertEquals(mockClient.getAccessCount(), 1);
assertEquals(metastore.getPartitionNamesByParts(TEST_DATABASE, TEST_TABLE, parts).get(), expectedPartitions);
assertEquals(mockClient.getAccessCount(), 1);
metastore.flushCache();
assertEquals(metastore.getPartitionNamesByParts(TEST_DATABASE, TEST_TABLE, parts).get(), expectedPartitions);
assertEquals(mockClient.getAccessCount(), 2);
}
public void testInvalidGetPartitionNamesByParts()
throws Exception
{
ImmutableList<String> parts = ImmutableList.of();
assertFalse(metastore.getPartitionNamesByParts(BAD_DATABASE, TEST_TABLE, parts).isPresent());
}
@Test
public void testGetPartitionsByNames()
throws Exception
{
assertEquals(mockClient.getAccessCount(), 0);
metastore.getTable(TEST_DATABASE, TEST_TABLE);
assertEquals(mockClient.getAccessCount(), 1);
// Select half of the available partitions and load them into the cache
assertEquals(metastore.getPartitionsByNames(TEST_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION1)).size(), 1);
assertEquals(mockClient.getAccessCount(), 2);
// Now select all of the partitions
assertEquals(metastore.getPartitionsByNames(TEST_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION1, TEST_PARTITION2)).size(), 2);
// There should be one more access to fetch the remaining partition
assertEquals(mockClient.getAccessCount(), 3);
// Now if we fetch any or both of them, they should not hit the client
assertEquals(metastore.getPartitionsByNames(TEST_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION1)).size(), 1);
assertEquals(metastore.getPartitionsByNames(TEST_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION2)).size(), 1);
assertEquals(metastore.getPartitionsByNames(TEST_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION1, TEST_PARTITION2)).size(), 2);
assertEquals(mockClient.getAccessCount(), 3);
metastore.flushCache();
// Fetching both should only result in one batched access
assertEquals(metastore.getPartitionsByNames(TEST_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION1, TEST_PARTITION2)).size(), 2);
assertEquals(mockClient.getAccessCount(), 4);
}
public void testInvalidGetPartitionsByNames()
throws Exception
{
Map<String, Optional<Partition>> partitionsByNames = metastore.getPartitionsByNames(BAD_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION1));
assertEquals(partitionsByNames.size(), 1);
Optional<Partition> onlyElement = Iterables.getOnlyElement(partitionsByNames.values());
assertFalse(onlyElement.isPresent());
}
@Test
public void testNoCacheExceptions()
throws Exception
{
// Throw exceptions on usage
mockClient.setThrowException(true);
try {
metastore.getAllDatabases();
}
catch (RuntimeException ignored) {
}
assertEquals(mockClient.getAccessCount(), 1);
// Second try should hit the client again
try {
metastore.getAllDatabases();
}
catch (RuntimeException ignored) {
}
assertEquals(mockClient.getAccessCount(), 2);
}
private static class MockHiveCluster
implements HiveCluster
{
private final HiveMetastoreClient client;
private MockHiveCluster(HiveMetastoreClient client)
{
this.client = client;
}
@Override
public HiveMetastoreClient createMetastoreClient()
{
return client;
}
}
}
| |
package edu.emich.honors.emuhonorscollege.activities;
import android.app.ActionBar;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Configuration;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.ActionBarDrawerToggle;
import android.view.Gravity;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.Space;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.LinkedList;
import edu.emich.honors.emuhonorscollege.HonorsApplication;
import edu.emich.honors.emuhonorscollege.R;
import edu.emich.honors.emuhonorscollege.datatypes.Requirement;
import edu.emich.honors.emuhonorscollege.datatypes.RequirementsList;
import edu.emich.honors.emuhonorscollege.datatypes.User;
import edu.emich.honors.emuhonorscollege.datatypes.enums.HonorsType;
public class ChecklistActivity extends ActionBarActivity {
private final String COMPLETED_GREEN = "#669900";
private final String IN_PROGRESS_YELLOW = "#FFBB33";
private final int COLLAPSED = -90;
private final int EXPANDED = 0;
private ListView mDrawerList;
private DrawerLayout mDrawerLayout;
private ArrayAdapter<String> mAdapter;
private ActionBarDrawerToggle mDrawerToggle;
private String mActivityTitle;
private int indentationSize = 100;
private User user;
private RequirementsList currentRequirementsList;
private HonorsType requirementsListToShow = HonorsType.DEPARTMENTAL; // Should be set by checklist selection
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.checklist);
if (savedInstanceState != null) {
user = ((User) savedInstanceState.getSerializable("User"));
} else {
user = ((HonorsApplication) this.getApplication()).getCurrentUser();
}
currentRequirementsList = user.getHandbook().getRequirementsList(requirementsListToShow);
// Menu Setup
mDrawerList = (ListView) findViewById(R.id.navList);
mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
mActivityTitle = getTitle().toString();
addDrawerItems();
setupDrawer();
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
// Checklist Setup
LinearLayout parentLayout = (LinearLayout) findViewById(R.id.checklist_linear_layout);
TextView honorsTypeTitle = new TextView(this);
honorsTypeTitle.setText(currentRequirementsList.getHonorsType().toString());
honorsTypeTitle.setTextSize(40);
honorsTypeTitle.setGravity(Gravity.CENTER_HORIZONTAL);
honorsTypeTitle.setTextColor(Color.BLACK);
parentLayout.addView(honorsTypeTitle);
buildCheckList(currentRequirementsList.getRequirements(), parentLayout);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putSerializable("User", user);
super.onSaveInstanceState(outState);
}
public void buildCheckList(ArrayList<Requirement> listOfRequirements, LinearLayout parentLayout) {
for (final Requirement requirement : listOfRequirements) {
buildRequirementRow(requirement, parentLayout);
}
}
private void buildRequirementRow(final Requirement requirement, LinearLayout parentLayout) {
if (requirement.hasComponent()) {
LinearLayout requirementBlock = new LinearLayout(this);
requirementBlock.setOrientation(LinearLayout.VERTICAL);
requirementBlock.setLayoutParams(new ActionBar.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
parentLayout.addView(requirementBlock);
parentLayout = requirementBlock;
}
LinearLayout requirementRow = new LinearLayout(this);
requirementRow.setOrientation(LinearLayout.HORIZONTAL);
requirementRow.setLayoutParams(new ActionBar.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
CheckBox requirementCheckbox = new CheckBox(this);
requirementCheckbox.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onCheck((CheckBox) v, requirement);
}
});
requirementCheckbox.setChecked(requirement.isCompleted());
requirementCheckbox.setLayoutParams(new ActionBar.LayoutParams(75, ViewGroup.LayoutParams.MATCH_PARENT, Gravity.CENTER));
requirementCheckbox.setHeight(ActionBar.LayoutParams.MATCH_PARENT);
if (requirement.getHierarchyLevel() == 0) {
requirementCheckbox.setClickable(false);
} else {
requirementRow.setVisibility(View.GONE);
for (int numberOfPaddingBlocks = 0; numberOfPaddingBlocks < requirement.getHierarchyLevel(); numberOfPaddingBlocks++) {
Space space = new Space(this);
space.setMinimumWidth(indentationSize);
requirementRow.addView(space);
}
}
requirementRow.addView(requirementCheckbox);
final ImageView dropDownArrow = new ImageView(this);
dropDownArrow.setImageResource(R.drawable.arrow_dropdown);
dropDownArrow.setLayoutParams(new ActionBar.LayoutParams(125, ViewGroup.LayoutParams.MATCH_PARENT, Gravity.CENTER));
dropDownArrow.setPadding(25, 0, 25, 0);
dropDownArrow.setRotation(COLLAPSED);
dropDownArrow.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (dropDownArrow.getRotation() == COLLAPSED) {
dropDownArrow.setRotation(EXPANDED);
} else {
dropDownArrow.setRotation(COLLAPSED);
}
toggleComponentDropDown(v);
}
});
if (requirement.hasComponent()) {
dropDownArrow.setVisibility(View.VISIBLE);
} else {
dropDownArrow.setVisibility(View.INVISIBLE);
}
requirementRow.addView(dropDownArrow);
TextView requirementTitle = new TextView(this);
requirementTitle.setText(requirement.getName());
if (requirement.getHierarchyLevel() == 0) // Heading Requirement
{
requirementTitle.setTextSize(26);
} else { // Component
requirementTitle.setTextSize(18);
}
if (requirement.isCompleted()) {
requirementTitle.setTextColor(Color.parseColor(COMPLETED_GREEN));
} else if (requirement.isInProgress()) {
requirementTitle.setTextColor(Color.parseColor(IN_PROGRESS_YELLOW));
} else {
requirementTitle.setTextColor(Color.BLACK);
}
final AlertDialog.Builder descriptionDialogBuilder = new AlertDialog.Builder(this);
descriptionDialogBuilder.setTitle(requirement.getName());
descriptionDialogBuilder.setMessage(requirement.getDescription());
descriptionDialogBuilder.setPositiveButton("Okay", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
requirementTitle.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
descriptionDialogBuilder.show();
return false;
}
});
requirementRow.addView(requirementTitle);
parentLayout.addView(requirementRow);
for (Requirement component : requirement.getComponents()) {
buildRequirementRow(component, parentLayout);
}
}
private void toggleComponentDropDown(View view) {
ViewParent requirementRow = view.getParent();
ViewGroup requirementBlock = (ViewGroup) requirementRow.getParent();
for (int rowIndex = 1; rowIndex < requirementBlock.getChildCount(); rowIndex++) {
View componentRow = requirementBlock.getChildAt(rowIndex);
if (view.getRotation() == COLLAPSED)
componentRow.setVisibility(View.GONE);
else
componentRow.setVisibility(View.VISIBLE);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
// Activate the navigation drawer toggle
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void onCheck(final CheckBox checkbox, final Requirement requirement) {
if (checkbox.isChecked()) {
displayCoachingStep(requirement, checkbox);
} else // The checkbox was already checked before!
{
AlertDialog.Builder markRequirementIncompleteAlert = new AlertDialog.Builder(this);
markRequirementIncompleteAlert.setMessage("Are you sure you'd like to mark this requirement as incomplete? You will lose all recorded progress for this requirement.");
markRequirementIncompleteAlert.setPositiveButton("Yes", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
requirement.resetProgress();
checkbox.setChecked(false);
ViewGroup row = (ViewGroup) checkbox.getParent();
for (int viewIndex = 0; viewIndex < row.getChildCount(); viewIndex++) {
View view = row.getChildAt(viewIndex);
if (view instanceof TextView) {
((TextView) view).setTextColor(Color.BLACK);
}
}
updateRequirementTextColor(checkbox, requirement);
// Write to Database Here
}
});
markRequirementIncompleteAlert.setNegativeButton("No", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
checkbox.setChecked(true);
}
});
markRequirementIncompleteAlert.show();
}
currentRequirementsList.updateRequirement(requirement);
}
private void updateRequirementTextColor(CheckBox checkbox, Requirement requirement) {
ViewGroup row = (ViewGroup) checkbox.getParent();
TextView requirementTextView = null;
for (int viewIndex = 0; viewIndex < row.getChildCount(); viewIndex++) {
View view = row.getChildAt(viewIndex);
if (view instanceof TextView && !(view instanceof CheckBox)) {
requirementTextView = (TextView) view;
break;
}
}
if (checkbox.isChecked()) {
requirementTextView.setTextColor(Color.parseColor(COMPLETED_GREEN));
} else {
requirementTextView.setTextColor(Color.BLACK);
}
ViewGroup block = (ViewGroup) row.getParent();
TextView parentRequirementTextView = null;
CheckBox parentRequirementCheckBox = null;
for (int viewIndex = 0; viewIndex < block.getChildCount(); viewIndex++) {
View view = block.getChildAt(viewIndex);
if (view instanceof LinearLayout) {
ViewGroup parentRow = (ViewGroup) view;
for (int viewIndex2 = 0; viewIndex2 < parentRow.getChildCount(); viewIndex2++) {
View view2 = parentRow.getChildAt(viewIndex2);
if (view2 instanceof CheckBox) {
parentRequirementCheckBox = (CheckBox) view2;
} else if (view2 instanceof TextView) {
parentRequirementTextView = (TextView) view2;
}
}
break;
}
}
if (requirement.getParentRequirement().isCompleted()) {
parentRequirementTextView.setTextColor(Color.parseColor(COMPLETED_GREEN));
parentRequirementCheckBox.setChecked(true);
} else if (requirement.getParentRequirement().isInProgress()) {
parentRequirementTextView.setTextColor(Color.parseColor(IN_PROGRESS_YELLOW));
parentRequirementCheckBox.setChecked(false);
} else {
parentRequirementTextView.setTextColor(Color.BLACK);
parentRequirementCheckBox.setChecked(false);
}
}
private void displayCoachingStep(final Requirement requirement, final CheckBox currentCheckBox) {
LinkedList<String> coachingSteps = requirement.getCoachingSteps();
String coachingStep = coachingSteps.peek();
final boolean isLastCoachingStep;
if (coachingSteps.peekLast().equals(coachingStep)) {
isLastCoachingStep = true;
} else {
isLastCoachingStep = false;
}
AlertDialog.Builder coachingAlert = new AlertDialog.Builder(this);
CoachingStepPositiveListener coachingStepPositiveListener = new CoachingStepPositiveListener(isLastCoachingStep, requirement, currentCheckBox);
CoachingStepNegativeListener coachingStepNegativeListener = new CoachingStepNegativeListener(currentCheckBox);
coachingAlert.setMessage(coachingStep);
coachingAlert.setPositiveButton("Yes", coachingStepPositiveListener);
coachingAlert.setNegativeButton("No", coachingStepNegativeListener);
coachingAlert.show();
}
private void addDrawerItems() {
String[] osArray = {"Settings", "In Progress Requirements"};
mAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, osArray);
mDrawerList.setAdapter(mAdapter);
mDrawerList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
switch (position) {
case 0:
Intent a = new Intent(ChecklistActivity.this, SettingsActivity.class);
startActivity(a);
break;
case 1:
Intent b = new Intent(ChecklistActivity.this, InProgressActivity.class);
startActivity(b);
break;
default:
}
}
});
}
private void setupDrawer() {
mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, R.string.drawer_open, R.string.drawer_close) {
/** Called when a drawer has settled in a completely open state. */
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
getSupportActionBar().setTitle("Navigation");
invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu()
}
/** Called when a drawer has settled in a completely closed state. */
public void onDrawerClosed(View view) {
super.onDrawerClosed(view);
getSupportActionBar().setTitle(mActivityTitle);
invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu()
}
};
mDrawerToggle.setDrawerIndicatorEnabled(true);
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
// Sync the toggle state after onRestoreInstanceState has occurred.
mDrawerToggle.syncState();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mDrawerToggle.onConfigurationChanged(newConfig);
}
private class CoachingStepPositiveListener implements DialogInterface.OnClickListener {
private boolean isLastCoachingStep;
private CheckBox checkbox;
private Requirement requirement;
private LinkedList<String> coachingSteps;
public CoachingStepPositiveListener(boolean isLastCoachingStep, Requirement requirement, CheckBox checkbox) {
this.isLastCoachingStep = isLastCoachingStep;
this.checkbox = checkbox;
this.requirement = requirement;
this.coachingSteps = requirement.getCoachingSteps();
}
@Override
public void onClick(DialogInterface dialogInterface, int which) {
if (isLastCoachingStep) {
checkbox.setChecked(true);
requirement.setInProgress(false);
requirement.setCompleted(true);
coachingSteps.removeFirst();
updateRequirementTextColor(checkbox, requirement);
// Write to Database Here
} else {
coachingSteps.removeFirst();
requirement.setInProgress(true);
displayCoachingStep(requirement, checkbox);
}
}
}
private class CoachingStepNegativeListener implements DialogInterface.OnClickListener {
private CheckBox checkbox;
public CoachingStepNegativeListener(CheckBox checkbox) {
this.checkbox = checkbox;
}
@Override
public void onClick(DialogInterface dialogInterface, int which) {
checkbox.setChecked(false);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets.command;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import org.apache.geode.cache.DynamicRegionFactory;
import org.apache.geode.cache.RegionDestroyedException;
import org.apache.geode.cache.ResourceException;
import org.apache.geode.cache.client.internal.PutAllOp;
import org.apache.geode.cache.operations.RemoveAllOperationContext;
import org.apache.geode.distributed.internal.DistributionStats;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.PutAllPartialResultException;
import org.apache.geode.internal.cache.ha.ThreadIdentifier;
import org.apache.geode.internal.cache.tier.CachedRegionHelper;
import org.apache.geode.internal.cache.tier.Command;
import org.apache.geode.internal.cache.tier.MessageType;
import org.apache.geode.internal.cache.tier.sockets.BaseCommand;
import org.apache.geode.internal.cache.tier.sockets.CacheServerStats;
import org.apache.geode.internal.cache.tier.sockets.ChunkedMessage;
import org.apache.geode.internal.cache.tier.sockets.Message;
import org.apache.geode.internal.cache.tier.sockets.Part;
import org.apache.geode.internal.cache.tier.sockets.ServerConnection;
import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.i18n.LocalizedStrings;
import org.apache.geode.internal.logging.log4j.LocalizedMessage;
import org.apache.geode.internal.security.AuthorizeRequest;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.internal.util.Breadcrumbs;
public class RemoveAll extends BaseCommand {
private final static RemoveAll singleton = new RemoveAll();
public static Command getCommand() {
return singleton;
}
protected RemoveAll() {}
@Override
public void cmdExecute(final Message clientMessage, final ServerConnection serverConnection,
final SecurityService securityService, long startp) throws IOException, InterruptedException {
long start = startp; // copy this since we need to modify it
Part regionNamePart = null, numberOfKeysPart = null, keyPart = null;
String regionName = null;
int numberOfKeys = 0;
Object key = null;
Part eventPart = null;
boolean replyWithMetaData = false;
VersionedObjectList response = null;
StringBuilder errMessage = new StringBuilder();
CachedRegionHelper crHelper = serverConnection.getCachedRegionHelper();
CacheServerStats stats = serverConnection.getCacheServerStats();
serverConnection.setAsTrue(REQUIRES_RESPONSE);
serverConnection.setAsTrue(REQUIRES_CHUNKED_RESPONSE);
{
long oldStart = start;
start = DistributionStats.getStatTime();
stats.incReadRemoveAllRequestTime(start - oldStart);
}
try {
// Retrieve the data from the message parts
// part 0: region name
regionNamePart = clientMessage.getPart(0);
regionName = regionNamePart.getString();
if (regionName == null) {
String txt =
LocalizedStrings.RemoveAll_THE_INPUT_REGION_NAME_FOR_THE_REMOVEALL_REQUEST_IS_NULL
.toLocalizedString();
logger.warn(LocalizedMessage.create(LocalizedStrings.TWO_ARG_COLON,
new Object[] {serverConnection.getName(), txt}));
errMessage.append(txt);
writeChunkedErrorResponse(clientMessage, MessageType.PUT_DATA_ERROR, errMessage.toString(),
serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
LocalRegion region = (LocalRegion) serverConnection.getCache().getRegion(regionName);
if (region == null) {
String reason = " was not found during removeAll request";
writeRegionDestroyedEx(clientMessage, regionName, reason, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
// part 1: eventID
eventPart = clientMessage.getPart(1);
ByteBuffer eventIdPartsBuffer = ByteBuffer.wrap(eventPart.getSerializedForm());
long threadId = EventID.readEventIdPartsFromOptmizedByteArray(eventIdPartsBuffer);
long sequenceId = EventID.readEventIdPartsFromOptmizedByteArray(eventIdPartsBuffer);
EventID eventId =
new EventID(serverConnection.getEventMemberIDByteArray(), threadId, sequenceId);
Breadcrumbs.setEventId(eventId);
// part 2: flags
int flags = clientMessage.getPart(2).getInt();
boolean clientIsEmpty = (flags & PutAllOp.FLAG_EMPTY) != 0;
boolean clientHasCCEnabled = (flags & PutAllOp.FLAG_CONCURRENCY_CHECKS) != 0;
// part 3: callbackArg
Object callbackArg = clientMessage.getPart(3).getObject();
// part 4: number of keys
numberOfKeysPart = clientMessage.getPart(4);
numberOfKeys = numberOfKeysPart.getInt();
if (logger.isDebugEnabled()) {
StringBuilder buffer = new StringBuilder();
buffer.append(serverConnection.getName()).append(": Received removeAll request from ")
.append(serverConnection.getSocketString()).append(" for region ").append(regionName)
.append(callbackArg != null ? (" callbackArg " + callbackArg) : "").append(" with ")
.append(numberOfKeys).append(" keys.");
logger.debug(buffer);
}
ArrayList<Object> keys = new ArrayList<Object>(numberOfKeys);
ArrayList<VersionTag> retryVersions = new ArrayList<VersionTag>(numberOfKeys);
for (int i = 0; i < numberOfKeys; i++) {
keyPart = clientMessage.getPart(5 + i);
key = keyPart.getStringOrObject();
if (key == null) {
String txt =
LocalizedStrings.RemoveAll_ONE_OF_THE_INPUT_KEYS_FOR_THE_REMOVEALL_REQUEST_IS_NULL
.toLocalizedString();
logger.warn(LocalizedMessage.create(LocalizedStrings.TWO_ARG_COLON,
new Object[] {serverConnection.getName(), txt}));
errMessage.append(txt);
writeChunkedErrorResponse(clientMessage, MessageType.PUT_DATA_ERROR,
errMessage.toString(), serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
if (clientMessage.isRetry()) {
// Constuct the thread id/sequence id information for this element of the bulk op
// The sequence id is constructed from the base sequence id and the offset
EventID entryEventId = new EventID(eventId, i);
// For PRs, the thread id assigned as a fake thread id.
if (region instanceof PartitionedRegion) {
PartitionedRegion pr = (PartitionedRegion) region;
int bucketId = pr.getKeyInfo(key).getBucketId();
long entryThreadId =
ThreadIdentifier.createFakeThreadIDForBulkOp(bucketId, entryEventId.getThreadID());
entryEventId = new EventID(entryEventId.getMembershipID(), entryThreadId,
entryEventId.getSequenceID());
}
VersionTag tag = findVersionTagsForRetriedBulkOp(region, entryEventId);
retryVersions.add(tag);
// FIND THE VERSION TAG FOR THIS KEY - but how? all we have is the
// removeAll eventId, not individual eventIds for entries, right?
} else {
retryVersions.add(null);
}
keys.add(key);
} // for
if (clientMessage.getNumberOfParts() == (5 + numberOfKeys + 1)) {// it means optional timeout
// has been
// added
int timeout = clientMessage.getPart(5 + numberOfKeys).getInt();
serverConnection.setRequestSpecificTimeout(timeout);
}
securityService.authorizeRegionWrite(regionName);
AuthorizeRequest authzRequest = serverConnection.getAuthzRequest();
if (authzRequest != null) {
if (DynamicRegionFactory.regionIsDynamicRegionList(regionName)) {
authzRequest.createRegionAuthorize(regionName);
} else {
RemoveAllOperationContext removeAllContext =
authzRequest.removeAllAuthorize(regionName, keys, callbackArg);
callbackArg = removeAllContext.getCallbackArg();
}
}
response = region.basicBridgeRemoveAll(keys, retryVersions, serverConnection.getProxyID(),
eventId, callbackArg);
if (!region.getConcurrencyChecksEnabled() || clientIsEmpty || !clientHasCCEnabled) {
// the client only needs this if versioning is being used and the client
// has storage
if (logger.isTraceEnabled()) {
logger.trace(
"setting removeAll response to null. region-cc-enabled={}; clientIsEmpty={}; client-cc-enabled={}",
region.getConcurrencyChecksEnabled(), clientIsEmpty, clientHasCCEnabled);
}
response = null;
}
if (region instanceof PartitionedRegion) {
PartitionedRegion pr = (PartitionedRegion) region;
if (pr.getNetworkHopType() != PartitionedRegion.NETWORK_HOP_NONE) {
writeReplyWithRefreshMetadata(clientMessage, response, serverConnection, pr,
pr.getNetworkHopType());
pr.clearNetworkHopData();
replyWithMetaData = true;
}
}
} catch (RegionDestroyedException rde) {
writeChunkedException(clientMessage, rde, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
} catch (ResourceException re) {
writeChunkedException(clientMessage, re, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
} catch (PutAllPartialResultException pre) {
writeChunkedException(clientMessage, pre, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
} catch (Exception ce) {
// If an interrupted exception is thrown , rethrow it
checkForInterrupt(serverConnection, ce);
// If an exception occurs during the op, preserve the connection
writeChunkedException(clientMessage, ce, serverConnection);
serverConnection.setAsTrue(RESPONDED);
// if (logger.fineEnabled()) {
logger.warn(LocalizedMessage.create(LocalizedStrings.Generic_0_UNEXPECTED_EXCEPTION,
serverConnection.getName()), ce);
// }
return;
} finally {
long oldStart = start;
start = DistributionStats.getStatTime();
stats.incProcessRemoveAllTime(start - oldStart);
}
if (logger.isDebugEnabled()) {
logger.debug("{}: Sending removeAll response back to {} for region {}{}",
serverConnection.getName(), serverConnection.getSocketString(), regionName,
(logger.isTraceEnabled() ? ": " + response : ""));
}
// Increment statistics and write the reply
if (!replyWithMetaData) {
writeReply(clientMessage, response, serverConnection);
}
serverConnection.setAsTrue(RESPONDED);
stats.incWriteRemoveAllResponseTime(DistributionStats.getStatTime() - start);
}
@Override
protected void writeReply(Message origMsg, ServerConnection serverConnection) throws IOException {
throw new UnsupportedOperationException();
}
protected void writeReply(Message origMsg, VersionedObjectList response,
ServerConnection servConn) throws IOException {
servConn.getCache().getCancelCriterion().checkCancelInProgress(null);
ChunkedMessage replyMsg = servConn.getChunkedResponseMessage();
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setTransactionId(origMsg.getTransactionId());
int listSize = (response == null) ? 0 : response.size();
if (response != null) {
response.setKeys(null);
}
if (logger.isDebugEnabled()) {
logger.debug("sending chunked response header. version list size={}{}", listSize,
(logger.isTraceEnabled() ? " list=" + response : ""));
}
replyMsg.sendHeader();
if (listSize > 0) {
int chunkSize = 2 * MAXIMUM_CHUNK_SIZE;
// Chunker will stream over the list in its toData method
VersionedObjectList.Chunker chunk =
new VersionedObjectList.Chunker(response, chunkSize, false, false);
for (int i = 0; i < listSize; i += chunkSize) {
boolean lastChunk = (i + chunkSize >= listSize);
replyMsg.setNumberOfParts(1);
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setLastChunk(lastChunk);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addObjPart(chunk);
if (logger.isDebugEnabled()) {
logger.debug("sending chunk at index {} last chunk={} numParts={}", i, lastChunk,
replyMsg.getNumberOfParts());
}
replyMsg.sendChunk(servConn);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("sending only header");
}
replyMsg.addObjPart(null);
replyMsg.setLastChunk(true);
replyMsg.sendChunk(servConn);
}
servConn.setAsTrue(RESPONDED);
if (logger.isTraceEnabled()) {
logger.trace("{}: rpl tx: {}", servConn.getName(), origMsg.getTransactionId());
}
}
@Override
protected void writeReplyWithRefreshMetadata(Message origMsg, ServerConnection serverConnection,
PartitionedRegion pr, byte nwHop) throws IOException {
throw new UnsupportedOperationException();
}
private void writeReplyWithRefreshMetadata(Message origMsg, VersionedObjectList response,
ServerConnection servConn, PartitionedRegion pr, byte nwHop) throws IOException {
servConn.getCache().getCancelCriterion().checkCancelInProgress(null);
ChunkedMessage replyMsg = servConn.getChunkedResponseMessage();
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.sendHeader();
int listSize = (response == null) ? 0 : response.size();
if (logger.isDebugEnabled()) {
logger.debug(
"sending chunked response header with metadata refresh status. Version list size = {}{}",
listSize, (logger.isTraceEnabled() ? "; list=" + response : ""));
}
if (response != null) {
response.setKeys(null);
}
replyMsg.setNumberOfParts(1);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addBytesPart(new byte[] {pr.getMetadataVersion(), nwHop});
if (listSize > 0) {
replyMsg.setLastChunk(false);
replyMsg.sendChunk(servConn);
int chunkSize = 2 * MAXIMUM_CHUNK_SIZE; // MAXIMUM_CHUNK_SIZE
// Chunker will stream over the list in its toData method
VersionedObjectList.Chunker chunk =
new VersionedObjectList.Chunker(response, chunkSize, false, false);
for (int i = 0; i < listSize; i += chunkSize) {
boolean lastChunk = (i + chunkSize >= listSize);
replyMsg.setNumberOfParts(1); // resets the message
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setLastChunk(lastChunk);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addObjPart(chunk);
if (logger.isDebugEnabled()) {
logger.debug("sending chunk at index {} last chunk={} numParts={}", i, lastChunk,
replyMsg.getNumberOfParts());
}
replyMsg.sendChunk(servConn);
}
} else {
replyMsg.setLastChunk(true);
if (logger.isDebugEnabled()) {
logger.debug("sending first and only part of chunked message");
}
replyMsg.sendChunk(servConn);
}
pr.getPrStats().incPRMetaDataSentCount();
if (logger.isTraceEnabled()) {
logger.trace("{}: rpl with REFRESH_METADATA tx: {}", servConn.getName(),
origMsg.getTransactionId());
}
}
}
| |
/* ==========================================================================
* Copyright 2006 Mevenide Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================================================================
*/
package org.jetbrains.idea.maven.execution;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.RunManager;
import com.intellij.execution.configurations.JavaParameters;
import com.intellij.execution.configurations.ParametersList;
import com.intellij.execution.impl.EditConfigurationsDialog;
import com.intellij.execution.impl.RunManagerImpl;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JavaSdk;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.impl.JavaAwareProjectJdkTableImpl;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.encoding.EncodingProjectManager;
import com.intellij.util.EnvironmentUtil;
import com.intellij.util.PathUtil;
import com.intellij.util.io.ZipUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.artifactResolver.MavenArtifactResolvedM31RtMarker;
import org.jetbrains.idea.maven.artifactResolver.MavenArtifactResolvedM3RtMarker;
import org.jetbrains.idea.maven.artifactResolver.common.MavenModuleMap;
import org.jetbrains.idea.maven.model.MavenConstants;
import org.jetbrains.idea.maven.project.MavenGeneralSettings;
import org.jetbrains.idea.maven.project.MavenProject;
import org.jetbrains.idea.maven.project.MavenProjectsManager;
import org.jetbrains.idea.maven.server.MavenServerUtil;
import org.jetbrains.idea.maven.utils.MavenSettings;
import org.jetbrains.idea.maven.utils.MavenUtil;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import java.io.*;
import java.util.*;
import java.util.zip.ZipOutputStream;
import static org.jetbrains.idea.maven.server.MavenServerManager.verifyMavenSdkRequirements;
/**
* @author Ralf Quebbemann
*/
public class MavenExternalParameters {
private static final Logger LOG = Logger.getInstance(MavenExternalParameters.class);
public static final String MAVEN_LAUNCHER_CLASS = "org.codehaus.classworlds.Launcher";
@NonNls private static final String MAVEN_OPTS = "MAVEN_OPTS";
@Deprecated // Use createJavaParameters(Project,MavenRunnerParameters, MavenGeneralSettings,MavenRunnerSettings,MavenRunConfiguration)
public static JavaParameters createJavaParameters(@Nullable final Project project,
@NotNull final MavenRunnerParameters parameters,
@Nullable MavenGeneralSettings coreSettings,
@Nullable MavenRunnerSettings runnerSettings) throws ExecutionException {
return createJavaParameters(project, parameters, coreSettings, runnerSettings, null);
}
public static JavaParameters createJavaParameters(@Nullable final Project project,
@NotNull final MavenRunnerParameters parameters) throws ExecutionException {
return createJavaParameters(project, parameters, null, null, null);
}
/**
* @param project
* @param parameters
* @param coreSettings
* @param runnerSettings
* @param runConfiguration used to creation fix if maven home not found
* @return
* @throws ExecutionException
*/
public static JavaParameters createJavaParameters(@Nullable final Project project,
@NotNull final MavenRunnerParameters parameters,
@Nullable MavenGeneralSettings coreSettings,
@Nullable MavenRunnerSettings runnerSettings,
@Nullable MavenRunConfiguration runConfiguration) throws ExecutionException {
final JavaParameters params = new JavaParameters();
ApplicationManager.getApplication().assertReadAccessAllowed();
if (coreSettings == null) {
coreSettings = project == null ? new MavenGeneralSettings() : MavenProjectsManager.getInstance(project).getGeneralSettings();
}
if (runnerSettings == null) {
runnerSettings = project == null ? new MavenRunnerSettings() : MavenRunner.getInstance(project).getState();
}
params.setWorkingDirectory(parameters.getWorkingDirFile());
Sdk jdk = getJdk(project, runnerSettings, project != null && MavenRunner.getInstance(project).getState() == runnerSettings);
params.setJdk(jdk);
final String mavenHome = resolveMavenHome(coreSettings, project, runConfiguration);
final String mavenVersion = MavenUtil.getMavenVersion(mavenHome);
String sdkConfigLocation = "Settings | Build, Execution, Deployment | Build Tools | Maven | Runner | JRE";
verifyMavenSdkRequirements(jdk, mavenVersion, sdkConfigLocation);
params.getProgramParametersList().add("-Didea.version=" + MavenUtil.getIdeaVersionToPassToMavenProcess());
if (StringUtil.compareVersionNumbers(mavenVersion, "3.3") >= 0) {
params.getVMParametersList().addProperty("maven.multiModuleProjectDirectory",
MavenServerUtil.findMavenBasedir(parameters.getWorkingDirFile()).getPath());
}
addVMParameters(params.getVMParametersList(), mavenHome, runnerSettings);
File confFile = MavenUtil.getMavenConfFile(new File(mavenHome));
if (!confFile.isFile()) {
throw new ExecutionException("Configuration file is not exists in maven home: " + confFile.getAbsolutePath());
}
if (project != null && parameters.isResolveToWorkspace()) {
try {
String resolverJar = getArtifactResolverJar(mavenVersion);
confFile = patchConfFile(confFile, resolverJar);
File modulesPathsFile = dumpModulesPaths(project);
params.getVMParametersList().addProperty(MavenModuleMap.PATHS_FILE_PROPERTY, modulesPathsFile.getAbsolutePath());
}
catch (IOException e) {
LOG.error(e);
throw new ExecutionException("Failed to run maven configuration", e);
}
}
params.getVMParametersList().addProperty("classworlds.conf", confFile.getPath());
for (String path : getMavenClasspathEntries(mavenHome)) {
params.getClassPath().add(path);
}
params.setEnv(new HashMap<>(runnerSettings.getEnvironmentProperties()));
params.setPassParentEnvs(runnerSettings.isPassParentEnv());
params.setMainClass(MAVEN_LAUNCHER_CLASS);
EncodingManager encodingManager = project == null
? EncodingManager.getInstance()
: EncodingProjectManager.getInstance(project);
params.setCharset(encodingManager.getDefaultCharset());
addMavenParameters(params.getProgramParametersList(), mavenHome, coreSettings, runnerSettings, parameters);
return params;
}
private static File patchConfFile(File conf, String library) throws IOException {
File tmpConf = FileUtil.createTempFile("idea-", "-mvn.conf");
tmpConf.deleteOnExit();
patchConfFile(conf, tmpConf, library);
return tmpConf;
}
private static void patchConfFile(File originalConf, File dest, String library) throws IOException {
Scanner sc = new Scanner(originalConf);
try {
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dest)));
try {
boolean patched = false;
while (sc.hasNextLine()) {
String line = sc.nextLine();
out.append(line);
out.newLine();
if (!patched && "[plexus.core]".equals(line)) {
out.append("load ").append(library);
out.newLine();
patched = true;
}
}
}
finally {
out.close();
}
}
finally {
sc.close();
}
}
private static String getArtifactResolverJar(@Nullable String mavenVersion) throws IOException {
Class marker;
if (mavenVersion != null && mavenVersion.compareTo("3.1.0") >= 0) {
marker = MavenArtifactResolvedM31RtMarker.class;
}
else if (mavenVersion != null && mavenVersion.compareTo("3.0.0") >= 0) {
marker = MavenArtifactResolvedM3RtMarker.class;
}
else {
try {
marker = Class.forName("org.jetbrains.idea.maven.artifactResolver.MavenArtifactResolvedM2RtMarker");
}
catch (ClassNotFoundException e) {
LOG.error("Cannot find Maven2 artifact resolved, falling back to Maven3", e);
marker = MavenArtifactResolvedM3RtMarker.class;
}
}
File classDirOrJar = new File(PathUtil.getJarPathForClass(marker));
if (!classDirOrJar.isDirectory()) {
return classDirOrJar.getAbsolutePath(); // it's a jar in IDEA installation.
}
// it's a classes directory, we are in development mode.
File tempFile = FileUtil.createTempFile("idea-", "-artifactResolver.jar");
tempFile.deleteOnExit();
ZipOutputStream zipOutput = new ZipOutputStream(new FileOutputStream(tempFile));
try {
ZipUtil.addDirToZipRecursively(zipOutput, null, classDirOrJar, "", null, null);
File m2Module = new File(PathUtil.getJarPathForClass(MavenModuleMap.class));
String commonClassesPath = MavenModuleMap.class.getPackage().getName().replace('.', '/');
ZipUtil.addDirToZipRecursively(zipOutput, null, new File(m2Module, commonClassesPath), commonClassesPath, null, null);
}
finally {
zipOutput.close();
}
return tempFile.getAbsolutePath();
}
private static File dumpModulesPaths(@NotNull Project project) throws IOException {
ApplicationManager.getApplication().assertReadAccessAllowed();
Properties res = new Properties();
MavenProjectsManager manager = MavenProjectsManager.getInstance(project);
for (Module module : ModuleManager.getInstance(project).getModules()) {
if (manager.isMavenizedModule(module)) {
MavenProject mavenProject = manager.findProject(module);
if (mavenProject != null && !manager.isIgnored(mavenProject)) {
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ":pom"
+ ':' + mavenProject.getMavenId().getVersion(),
mavenProject.getFile().getPath());
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ':' + mavenProject.getPackaging()
+ ':' + mavenProject.getMavenId().getVersion(),
mavenProject.getOutputDirectory());
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ":test-jar"
+ ':' + mavenProject.getMavenId().getVersion(),
mavenProject.getTestOutputDirectory());
addArtifactFileMapping(res, mavenProject, "sources");
addArtifactFileMapping(res, mavenProject, "test-sources");
addArtifactFileMapping(res, mavenProject, "javadoc");
addArtifactFileMapping(res, mavenProject, "test-javadoc");
}
}
}
File file = new File(PathManager.getSystemPath(), "Maven/idea-projects-state-" + project.getLocationHash() + ".properties");
FileUtil.ensureExists(file.getParentFile());
OutputStream out = new BufferedOutputStream(new FileOutputStream(file));
try {
res.store(out, null);
}
finally {
out.close();
}
return file;
}
private static void addArtifactFileMapping(@NotNull Properties res, @NotNull MavenProject mavenProject, @NotNull String classifier) {
File file = new File(mavenProject.getBuildDirectory(), mavenProject.getFinalName() + '-' + classifier + ".jar");
if (file.exists()) {
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ':' + classifier
+ ':' + mavenProject.getMavenId().getVersion(),
file.getPath());
}
}
@NotNull
private static Sdk getJdk(@Nullable Project project, MavenRunnerSettings runnerSettings, boolean isGlobalRunnerSettings)
throws ExecutionException {
String name = runnerSettings.getJreName();
if (name.equals(MavenRunnerSettings.USE_INTERNAL_JAVA)) {
return JavaAwareProjectJdkTableImpl.getInstanceEx().getInternalJdk();
}
if (name.equals(MavenRunnerSettings.USE_PROJECT_JDK)) {
if (project != null) {
Sdk res = ProjectRootManager.getInstance(project).getProjectSdk();
if (res != null) {
return res;
}
Module[] modules = ModuleManager.getInstance(project).getModules();
for (Module module : modules) {
Sdk sdk = ModuleRootManager.getInstance(module).getSdk();
if (sdk != null && sdk.getSdkType() instanceof JavaSdkType) {
return sdk;
}
}
}
if (project == null) {
Sdk recent = ProjectJdkTable.getInstance().findMostRecentSdkOfType(JavaSdk.getInstance());
if (recent != null) return recent;
return JavaAwareProjectJdkTableImpl.getInstanceEx().getInternalJdk();
}
throw new ProjectJdkSettingsOpenerExecutionException("Project JDK is not specified. <a href=''>Configure</a>", project);
}
if (name.equals(MavenRunnerSettings.USE_JAVA_HOME)) {
final String javaHome = EnvironmentUtil.getEnvironmentMap().get("JAVA_HOME");
if (StringUtil.isEmptyOrSpaces(javaHome)) {
throw new ExecutionException(RunnerBundle.message("maven.java.home.undefined"));
}
return JavaSdk.getInstance().createJdk("", javaHome);
}
for (Sdk projectJdk : ProjectJdkTable.getInstance().getAllJdks()) {
if (projectJdk.getName().equals(name)) {
return projectJdk;
}
}
if (isGlobalRunnerSettings) {
throw new ExecutionException(RunnerBundle.message("maven.java.not.found.default.config", name));
}
else {
throw new ExecutionException(RunnerBundle.message("maven.java.not.found", name));
}
}
public static void addVMParameters(ParametersList parametersList, String mavenHome, MavenRunnerSettings runnerSettings) {
parametersList.addParametersString(System.getenv(MAVEN_OPTS));
parametersList.addParametersString(runnerSettings.getVmOptions());
parametersList.addProperty("maven.home", mavenHome);
}
private static void addMavenParameters(ParametersList parametersList,
String mavenHome,
MavenGeneralSettings coreSettings,
MavenRunnerSettings runnerSettings,
MavenRunnerParameters parameters) {
encodeCoreAndRunnerSettings(coreSettings, mavenHome, parametersList);
if (runnerSettings.isSkipTests()) {
parametersList.addProperty("skipTests", "true");
}
for (Map.Entry<String, String> entry : runnerSettings.getMavenProperties().entrySet()) {
if (entry.getKey().length() > 0) {
parametersList.addProperty(entry.getKey(), entry.getValue());
}
}
for (String goal : parameters.getGoals()) {
parametersList.add(goal);
}
if (parameters.getPomFileName() != null && !FileUtil.namesEqual(MavenConstants.POM_XML, parameters.getPomFileName())) {
parametersList.add("-f");
parametersList.add(parameters.getPomFileName());
}
addOption(parametersList, "P", encodeProfiles(parameters.getProfilesMap()));
}
private static void addOption(ParametersList cmdList, @NonNls String key, @NonNls String value) {
if (!StringUtil.isEmptyOrSpaces(value)) {
cmdList.add("-" + key);
cmdList.add(value);
}
}
@NotNull
public static String resolveMavenHome(@NotNull MavenGeneralSettings coreSettings) throws ExecutionException {
return resolveMavenHome(coreSettings, null, null);
}
/**
* @param coreSettings
* @param project used to creation fix if maven home not found
* @param runConfiguration used to creation fix if maven home not found
* @return
* @throws ExecutionException
*/
@NotNull
public static String resolveMavenHome(@NotNull MavenGeneralSettings coreSettings,
@Nullable Project project,
@Nullable MavenRunConfiguration runConfiguration) throws ExecutionException {
final File file = MavenUtil.resolveMavenHomeDirectory(coreSettings.getMavenHome());
if (file == null) {
throw createExecutionException(RunnerBundle.message("external.maven.home.no.default"),
RunnerBundle.message("external.maven.home.no.default.with.fix"),
coreSettings, project, runConfiguration);
}
if (!file.exists()) {
throw createExecutionException(RunnerBundle.message("external.maven.home.does.not.exist", file.getPath()),
RunnerBundle.message("external.maven.home.does.not.exist.with.fix", file.getPath()),
coreSettings, project, runConfiguration);
}
if (!MavenUtil.isValidMavenHome(file)) {
throw createExecutionException(RunnerBundle.message("external.maven.home.invalid", file.getPath()),
RunnerBundle.message("external.maven.home.invalid.with.fix", file.getPath()),
coreSettings, project, runConfiguration);
}
try {
return file.getCanonicalPath();
}
catch (IOException e) {
throw new ExecutionException(e.getMessage(), e);
}
}
private static ExecutionException createExecutionException(String text,
String textWithFix,
@NotNull MavenGeneralSettings coreSettings,
@Nullable Project project,
@Nullable MavenRunConfiguration runConfiguration) {
Project notNullProject = project;
if (notNullProject == null) {
if (runConfiguration == null) return new ExecutionException(text);
notNullProject = runConfiguration.getProject();
if (notNullProject == null) return new ExecutionException(text);
}
if (coreSettings == MavenProjectsManager.getInstance(notNullProject).getGeneralSettings()) {
return new ProjectSettingsOpenerExecutionException(textWithFix, notNullProject);
}
if (runConfiguration != null) {
Project runCfgProject = runConfiguration.getProject();
if (runCfgProject != null) {
if (((RunManagerImpl)RunManager.getInstance(runCfgProject)).getSettings(runConfiguration) != null) {
return new RunConfigurationOpenerExecutionException(textWithFix, runConfiguration);
}
}
}
return new ExecutionException(text);
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static List<String> getMavenClasspathEntries(final String mavenHome) {
File mavenHomeBootAsFile = new File(new File(mavenHome, "core"), "boot");
// if the dir "core/boot" does not exist we are using a Maven version > 2.0.5
// in this case the classpath must be constructed from the dir "boot"
if (!mavenHomeBootAsFile.exists()) {
mavenHomeBootAsFile = new File(mavenHome, "boot");
}
List<String> classpathEntries = new ArrayList<>();
File[] files = mavenHomeBootAsFile.listFiles();
if (files != null) {
for (File file : files) {
if (file.getName().contains("classworlds")) {
classpathEntries.add(file.getAbsolutePath());
}
}
}
return classpathEntries;
}
private static void encodeCoreAndRunnerSettings(MavenGeneralSettings coreSettings, String mavenHome,
ParametersList cmdList) {
if (coreSettings.isWorkOffline()) {
cmdList.add("--offline");
}
boolean atLeastMaven3 = MavenUtil.isMaven3(mavenHome);
if (!atLeastMaven3) {
addIfNotEmpty(cmdList, coreSettings.getPluginUpdatePolicy().getCommandLineOption());
if (!coreSettings.isUsePluginRegistry()) {
cmdList.add("--no-plugin-registry");
}
}
if (coreSettings.getOutputLevel() == MavenExecutionOptions.LoggingLevel.DEBUG) {
cmdList.add("--debug");
}
if (coreSettings.isNonRecursive()) {
cmdList.add("--non-recursive");
}
if (coreSettings.isPrintErrorStackTraces()) {
cmdList.add("--errors");
}
if (coreSettings.isAlwaysUpdateSnapshots()) {
cmdList.add("--update-snapshots");
}
if (StringUtil.isNotEmpty(coreSettings.getThreads())) {
cmdList.add("-T", coreSettings.getThreads());
}
addIfNotEmpty(cmdList, coreSettings.getFailureBehavior().getCommandLineOption());
addIfNotEmpty(cmdList, coreSettings.getChecksumPolicy().getCommandLineOption());
addOption(cmdList, "s", coreSettings.getUserSettingsFile());
if (!StringUtil.isEmptyOrSpaces(coreSettings.getLocalRepository())) {
cmdList.addProperty("maven.repo.local", coreSettings.getLocalRepository());
}
}
private static void addIfNotEmpty(ParametersList parametersList, @Nullable String value) {
if (!StringUtil.isEmptyOrSpaces(value)) {
parametersList.add(value);
}
}
private static String encodeProfiles(Map<String, Boolean> profiles) {
StringBuilder stringBuilder = new StringBuilder();
for (Map.Entry<String, Boolean> entry : profiles.entrySet()) {
if (stringBuilder.length() != 0) {
stringBuilder.append(",");
}
if (!entry.getValue()) {
stringBuilder.append("!");
}
stringBuilder.append(entry.getKey());
}
return stringBuilder.toString();
}
private static class ProjectSettingsOpenerExecutionException extends WithHyperlinkExecutionException {
private final Project myProject;
ProjectSettingsOpenerExecutionException(final String s, Project project) {
super(s);
myProject = project;
}
@Override
protected void hyperlinkClicked() {
ShowSettingsUtil.getInstance().showSettingsDialog(myProject, MavenSettings.DISPLAY_NAME);
}
}
private static class ProjectJdkSettingsOpenerExecutionException extends WithHyperlinkExecutionException {
private final Project myProject;
ProjectJdkSettingsOpenerExecutionException(final String s, Project project) {
super(s);
myProject = project;
}
@Override
protected void hyperlinkClicked() {
ProjectSettingsService.getInstance(myProject).openProjectSettings();
}
}
private static class RunConfigurationOpenerExecutionException extends WithHyperlinkExecutionException {
private final MavenRunConfiguration myRunConfiguration;
RunConfigurationOpenerExecutionException(final String s, MavenRunConfiguration runConfiguration) {
super(s);
myRunConfiguration = runConfiguration;
}
@Override
protected void hyperlinkClicked() {
Project project = myRunConfiguration.getProject();
EditConfigurationsDialog dialog = new EditConfigurationsDialog(project);
dialog.show();
}
}
private static abstract class WithHyperlinkExecutionException extends ExecutionException
implements HyperlinkListener, NotificationListener {
WithHyperlinkExecutionException(String s) {
super(s);
}
protected abstract void hyperlinkClicked();
@Override
public final void hyperlinkUpdate(HyperlinkEvent e) {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
hyperlinkClicked();
}
}
@Override
public final void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) {
hyperlinkUpdate(event);
}
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* See {@link VkExportMemoryAllocateInfo}.
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkExportMemoryAllocateInfoKHR {
* VkStructureType sType;
* void const * pNext;
* VkExternalMemoryHandleTypeFlags handleTypes;
* }</code></pre>
*/
public class VkExportMemoryAllocateInfoKHR extends VkExportMemoryAllocateInfo {
/**
* Creates a {@code VkExportMemoryAllocateInfoKHR} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkExportMemoryAllocateInfoKHR(ByteBuffer container) {
super(container);
}
/** Sets the specified value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; }
/** Sets the {@link VK11#VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO} value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR sType$Default() { return sType(VK11.VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO); }
/** Sets the specified value to the {@code pNext} field. */
@Override
public VkExportMemoryAllocateInfoKHR pNext(@NativeType("void const *") long value) { npNext(address(), value); return this; }
/** Sets the specified value to the {@code handleTypes} field. */
@Override
public VkExportMemoryAllocateInfoKHR handleTypes(@NativeType("VkExternalMemoryHandleTypeFlags") int value) { nhandleTypes(address(), value); return this; }
/** Initializes this struct with the specified values. */
@Override
public VkExportMemoryAllocateInfoKHR set(
int sType,
long pNext,
int handleTypes
) {
sType(sType);
pNext(pNext);
handleTypes(handleTypes);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkExportMemoryAllocateInfoKHR set(VkExportMemoryAllocateInfoKHR src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkExportMemoryAllocateInfoKHR malloc() {
return wrap(VkExportMemoryAllocateInfoKHR.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkExportMemoryAllocateInfoKHR calloc() {
return wrap(VkExportMemoryAllocateInfoKHR.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated with {@link BufferUtils}. */
public static VkExportMemoryAllocateInfoKHR create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkExportMemoryAllocateInfoKHR.class, memAddress(container), container);
}
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance for the specified memory address. */
public static VkExportMemoryAllocateInfoKHR create(long address) {
return wrap(VkExportMemoryAllocateInfoKHR.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkExportMemoryAllocateInfoKHR createSafe(long address) {
return address == NULL ? null : wrap(VkExportMemoryAllocateInfoKHR.class, address);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkExportMemoryAllocateInfoKHR.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkExportMemoryAllocateInfoKHR.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkExportMemoryAllocateInfoKHR malloc(MemoryStack stack) {
return wrap(VkExportMemoryAllocateInfoKHR.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkExportMemoryAllocateInfoKHR calloc(MemoryStack stack) {
return wrap(VkExportMemoryAllocateInfoKHR.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** An array of {@link VkExportMemoryAllocateInfoKHR} structs. */
public static class Buffer extends VkExportMemoryAllocateInfo.Buffer {
private static final VkExportMemoryAllocateInfoKHR ELEMENT_FACTORY = VkExportMemoryAllocateInfoKHR.create(-1L);
/**
* Creates a new {@code VkExportMemoryAllocateInfoKHR.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkExportMemoryAllocateInfoKHR#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkExportMemoryAllocateInfoKHR getElementFactory() {
return ELEMENT_FACTORY;
}
/** Sets the specified value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer sType(@NativeType("VkStructureType") int value) { VkExportMemoryAllocateInfoKHR.nsType(address(), value); return this; }
/** Sets the {@link VK11#VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO} value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer sType$Default() { return sType(VK11.VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO); }
/** Sets the specified value to the {@code pNext} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer pNext(@NativeType("void const *") long value) { VkExportMemoryAllocateInfoKHR.npNext(address(), value); return this; }
/** Sets the specified value to the {@code handleTypes} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer handleTypes(@NativeType("VkExternalMemoryHandleTypeFlags") int value) { VkExportMemoryAllocateInfoKHR.nhandleTypes(address(), value); return this; }
}
}
| |
package com.palmelf.core.web.servlet;
import com.palmelf.core.util.AppUtil;
import com.palmelf.core.util.ContextUtil;
import com.palmelf.core.util.FileUtil;
import com.palmelf.eoffice.model.system.AppUser;
import com.palmelf.eoffice.model.system.FileAttach;
import com.palmelf.eoffice.service.system.FileAttachService;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.Date;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.tools.zip.ZipEntry;
import org.apache.tools.zip.ZipFile;
public class JasperUploadServlet extends HttpServlet {
private Log logger = LogFactory.getLog(FileUploadServlet.class);
private ServletConfig servletConfig = null;
private FileAttachService fileAttachService = (FileAttachService) AppUtil
.getBean("fileAttachService");
private String uploadPath = "";
private String tempPath = "";
private String fileCat = "others";
private String filePath = "";
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
req.setCharacterEncoding("UTF-8");
resp.setCharacterEncoding("UTF-8");
String extractZip = req.getParameter("extractZip");
try {
DiskFileItemFactory factory = new DiskFileItemFactory();
factory.setSizeThreshold(4096);
factory.setRepository(new File(this.tempPath));
ServletFileUpload fu = new ServletFileUpload(factory);
List<FileItem> fileItems = fu.parseRequest(req);
for (FileItem fi : fileItems) {
if ("file_cat".equals(fi.getFieldName())) {
this.fileCat = fi.getString();
}
if ("file_path".equals(fi.getFieldName())) {
this.filePath = fi.getString();
}
}
Iterator i = fileItems.iterator();
while (i.hasNext()) {
FileItem fi = (FileItem) i.next();
String fileContentType = fi.getContentType();
if (fileContentType == null) {
continue;
}
if (fi.getContentType() == null) {
continue;
}
String path = fi.getName();
int start = path.lastIndexOf("\\");
String fileName = path.substring(start + 1);
String relativeFullPath = null;
String generName = FileUtil.generateFilename(fileName);
int sindex = generName.lastIndexOf("/");
int eindex = generName.lastIndexOf(".");
String generDir = generName.substring(sindex + 1, eindex);
generName = generName.substring(0, sindex) + "/" + generDir
+ "/"
+ generName.substring(sindex + 1, generName.length());
if (!"".equals(this.filePath))
relativeFullPath = this.filePath;
else {
relativeFullPath = this.fileCat + "/" + generName;
}
int index = relativeFullPath.lastIndexOf("/");
File dirPath = new File(this.uploadPath + "/"
+ relativeFullPath.substring(0, index + 1));
if (!dirPath.exists()) {
dirPath.mkdirs();
}
File temFile = new File(this.uploadPath + "/"
+ relativeFullPath);
fi.write(temFile);
if (fileContentType.equals("application/zip")) {
byte[] b = new byte[1024];
ZipFile zipFile = new ZipFile(temFile);
Enumeration enumeration = zipFile.getEntries();
ZipEntry zipEntry = null;
while (enumeration.hasMoreElements()) {
zipEntry = (ZipEntry) enumeration.nextElement();
if (zipEntry.getName().endsWith(".jasper")) {
int indx = relativeFullPath.lastIndexOf("/");
relativeFullPath = relativeFullPath.substring(0,
indx);
relativeFullPath = relativeFullPath + "/"
+ zipEntry.getName();
}
File loadFile = new File(dirPath + "/"
+ zipEntry.getName());
if (zipEntry.isDirectory()) {
loadFile.mkdirs();
} else {
if (!loadFile.getParentFile().exists()) {
loadFile.getParentFile().mkdirs();
}
OutputStream outputStream = new FileOutputStream(
loadFile);
InputStream inputStream = zipFile
.getInputStream(zipEntry);
int length;
while ((length = inputStream.read(b)) > 0) {
outputStream.write(b, 0, length);
}
outputStream.close();
inputStream.close();
}
}
zipFile.close();
temFile.delete();
}
FileAttach file = null;
if (!"".equals(this.filePath)) {
file = this.fileAttachService.getByPath(this.filePath);
}
if (file == null) {
this.logger.debug("relativeFullPath=" + relativeFullPath);
file = new FileAttach();
file.setCreatetime(new Date());
AppUser curUser = ContextUtil.getCurrentUser();
if (curUser != null)
file.setCreator(curUser.getFullname());
else {
file.setCreator("UNKown");
}
int dotIndex = fileName.lastIndexOf(".");
file.setExt(fileName.substring(dotIndex + 1));
file.setFileName(fileName);
file.setFilePath(relativeFullPath);
file.setFileType(this.fileCat);
file.setNote(fi.getSize() + " bytes");
file.setTotalBytes(new Double(fi.getSize()));
this.fileAttachService.save(file);
}
StringBuffer sb = new StringBuffer("{success:true");
sb.append(",fileId:")
.append(file.getFileId())
.append(",fileName:'")
.append(file.getFileName())
.append("',filePath:'")
.append(file.getFilePath())
.append("',message:'upload file success.("
+ fi.getSize() + " bytes)'");
sb.append("}");
resp.setContentType("text/html;charset=UTF-8");
PrintWriter writer = resp.getWriter();
writer.println(sb.toString());
}
} catch (Exception e) {
e.printStackTrace();
resp.getWriter().write(
"{'success':false,'message':'error..." + e.getMessage()
+ "'}");
}
}
public static String make8859toGB(String str) {
try {
String str8859 = new String(str.getBytes("8859_1"), "GB2312");
return str8859;
} catch (UnsupportedEncodingException ioe) {
ioe.printStackTrace();
}
return str;
}
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
this.servletConfig = config;
}
@Override
public void init() throws ServletException {
this.uploadPath = getServletContext().getRealPath("/attachFiles/");
File uploadPathFile = new File(this.uploadPath);
if (!uploadPathFile.exists()) {
uploadPathFile.mkdirs();
}
this.tempPath = (this.uploadPath + "/temp");
File tempPathFile = new File(this.tempPath);
if (!tempPathFile.exists())
tempPathFile.mkdirs();
}
public boolean saveFileToDisk(String officefileNameDisk) {
File officeFileUpload = null;
FileItem officeFileItem = null;
boolean result = true;
try {
if ((!"".equalsIgnoreCase(officefileNameDisk))
&& (officeFileItem != null)) {
officeFileUpload = new File(this.uploadPath
+ officefileNameDisk);
officeFileItem.write(officeFileUpload);
}
} catch (FileNotFoundException localFileNotFoundException) {
} catch (Exception e) {
e.printStackTrace();
result = false;
}
return result;
}
}
| |
/*=========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.cache.CacheEvent;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.InterestPolicy;
import com.gemstone.gemfire.cache.RegionDestroyedException;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.SubscriptionAttributes;
import com.gemstone.gemfire.distributed.Role;
import com.gemstone.gemfire.distributed.internal.DistributionAdvisor;
import com.gemstone.gemfire.distributed.internal.DistributionManager;
import com.gemstone.gemfire.distributed.internal.MembershipListener;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.DSCODE;
import com.gemstone.gemfire.internal.InternalDataSerializer;
import com.gemstone.gemfire.internal.cache.partitioned.PRLocallyDestroyedException;
import com.gemstone.gemfire.internal.cache.persistence.DiskStoreID;
import com.gemstone.gemfire.internal.cache.persistence.PersistentMemberID;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.logging.log4j.LogMarker;
/**
* Adds bookkeeping info and cache-specific behavior to DistributionAdvisor.
* Adds bit-encoded flags in addition to object info.
* @author Eric Zoerner
*
*/
@SuppressWarnings("deprecation")
public class CacheDistributionAdvisor extends DistributionAdvisor {
private static final Logger logger = LogService.getLogger();
// moved ROLLOVER_* constants to DistributionAdvisor
/** bit masks */
private static final int INTEREST_MASK = 0x01;
private static final int REPLICATE_MASK = 0x02;
private static final int LOADER_MASK = 0x04;
private static final int WRITER_MASK = 0x08;
private static final int LISTENER_MASK = 0x10;
private static final int DIST_ACK_MASK = 0x20;
private static final int GLOBAL_MASK = 0x40;
private static final int IN_RECOVERY_MASK = 0x80;
private static final int PERSISTENT_MASK = 0x100;
private static final int PROXY_MASK = 0x200;
private static final int PRELOADED_MASK = 0x400;
private static final int IS_PARTITIONED_MASK = 0x800;
private static final int REGION_INITIALIZED_MASK = 0x1000;
private static final int IS_GATEWAY_ENABLED_MASK = 0x2000;
//provider is no longer a supported attribute.
// private static final int IS_GII_PROVIDER_MASK = 0x4000;
private static final int PERSISTENT_ID_MASK = 0x4000;
/** does this member require operation notification (PartitionedRegions) */
protected static final int REQUIRES_NOTIFICATION_MASK = 0x8000;
private static final int HAS_CACHE_SERVER_MASK = 0x10000;
private static final int REQUIRES_OLD_VALUE_MASK = 0x20000;
private static final int MEMBER_UNINITIALIZED_MASK = 0x40000;
private static final int PERSISTENCE_INITIALIZED_MASK = 0x80000;
//Important below mentioned bit masks are not available
/**
* Using following masks for gatewaysender queue startup polic informations.
*/
// private static final int HUB_STARTUP_POLICY_MASK = 0x07<<20;
private static final int GATEWAY_SENDER_IDS_MASK = 0x200000;
private static final int ASYNC_EVENT_QUEUE_IDS_MASK = 0x400000;
// moved initializ* to DistributionAdvisor
// moved membershipVersion to DistributionAdvisor
// moved previousVersionOpCount to DistributionAdvisor
// moved currentVersionOpCount to DistributionAdvisor
// moved removedProfiles to DistributionAdvisor
/** Creates a new instance of CacheDistributionAdvisor */
protected CacheDistributionAdvisor(CacheDistributionAdvisee region) {
super(region);
}
public static CacheDistributionAdvisor createCacheDistributionAdvisor(CacheDistributionAdvisee region) {
CacheDistributionAdvisor advisor = new CacheDistributionAdvisor(region);
advisor.initialize();
return advisor;
}
@Override
public String toString() {
return "CacheDistributionAdvisor for region " + getAdvisee().getFullPath();
}
// moved toStringWithProfiles to DistributionAdvisor
// moved initializationGate to DistributionAdvisor
// moved isInitialized to DistributionAdvisor
// moved addMembershipListenerAndAdviseGeneric to DistributionAdvisor
/**
* Returns a the set of members that either want all events or are caching data.
* @param excludeInRecovery if true then members in recovery are excluded
*/
private Set adviseAllEventsOrCached(final boolean excludeInRecovery) throws IllegalStateException {
getAdvisee().getCancelCriterion().checkCancelInProgress(null);
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
if (excludeInRecovery && cp.inRecovery) {
return false;
}
return cp.cachedOrAllEventsWithListener();
}
});
}
/**
* Provide recipient information for an update or create operation.
*
*/
Set adviseUpdate(final EntryEventImpl event) throws IllegalStateException {
if (event.hasNewValue() || event.getOperation().isPutAll()) {
// only need to distribute it to guys that want all events or cache data
return adviseAllEventsOrCached(true/*fixes 41147*/);
} else {
// The new value is null so this is a create with a null value,
// in which case we only need to distribute this message to replicates
// or all events that are not a proxy or if a proxy has a listener
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
DataPolicy dp = cp.dataPolicy;
return dp.withReplication() ||
(cp.allEvents() && (dp.withStorage() || cp.hasCacheListener));
}
});
}
}
/**
* Provide recipient information for TX lock and commit.
* @return Set of Serializable members that the current transaction
* will be distributed to.
* Currently this is any other member who has this region defined.
* No reference to Set kept by advisor so caller is free to modify it
*/
public Set<InternalDistributedMember> adviseTX() throws IllegalStateException {
boolean isMetaDataWithTransactions = getAdvisee() instanceof LocalRegion &&
((LocalRegion) getAdvisee()).isMetaRegionWithTransactions();
Set<InternalDistributedMember> badList = Collections.emptySet();
if(!TXManagerImpl.ALLOW_PERSISTENT_TRANSACTIONS && !isMetaDataWithTransactions) {
badList = adviseFilter(new Filter() {
public boolean include (Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile prof = (CacheProfile)profile;
return (prof.isPersistent());
}
});
}
if (badList.isEmpty()) {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
return cp.cachedOrAllEvents();
}
});
} else {
StringBuffer badIds = new StringBuffer();
Iterator biI = badList.iterator();
while(biI.hasNext()) {
badIds.append(biI.next().toString());
if (biI.hasNext()) badIds.append(", ");
}
throw new IllegalStateException(LocalizedStrings.CacheDistributionAdvisor_ILLEGAL_REGION_CONFIGURATION_FOR_MEMBERS_0.toLocalizedString(badIds.toString()));
}
}
/**
* Provide recipient information for netLoad
* @return Set of Serializable members that have a CacheLoader installed;
* no reference to Set kept by advisor so caller is free to modify it
*/
public Set adviseNetLoad() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile prof = (CacheProfile)profile;
// if region in cache is not yet initialized, exclude
if (!prof.regionInitialized // fix for bug 41102
|| prof.memberUnInitialized) {
return false;
}
return prof.hasCacheLoader;
}
});
}
public FilterRoutingInfo adviseFilterRouting(CacheEvent event, Set cacheOpRecipients){
FilterProfile fp = ((LocalRegion)event.getRegion()).getFilterProfile();
if (fp != null) {
return fp.getFilterRoutingInfoPart1(event, this.profiles, cacheOpRecipients);
}
return null;
}
/**
* Same as adviseGeneric except in recovery excluded.
*/
public Set adviseCacheOp() {
return adviseAllEventsOrCached(true);
}
/**
* Same as adviseCacheOp but only includes members that are playing the
* specified role.
* @since 5.0
*/
public Set adviseCacheOpRole(final Role role) {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
// if region in cache is not yet initialized, exclude
if (!cp.regionInitialized) {
return false;
}
// if member is not yet initialized, exclude
if (cp.memberUnInitialized) {
return false;
}
if (!cp.cachedOrAllEventsWithListener()) {
return false;
}
return cp.getDistributedMember().getRoles().contains(role);
}
});
}
/* *
* Same as adviseGeneric but excludes guys in recover
*/
public Set adviseInvalidateRegion() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
return !cp.inRecovery;
}
});
}
/** Same as adviseGeneric
*/
public Set adviseDestroyRegion() {
return adviseGeneric();
}
/**
* Provide recipient information for netWrite
* @return Set of Serializable member ids that have a CacheWriter installed;
* no reference to Set kept by advisor so caller is free to modify it
*/
public Set adviseNetWrite() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile prof = (CacheProfile)profile;
// if region in cache is in recovery, exclude
if (prof.inRecovery) {
return false;
}
return prof.hasCacheWriter;
}
});
}
public Set<InternalDistributedMember> adviseInitializedReplicates() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
if (cp.dataPolicy.withReplication() && cp.regionInitialized
&& !cp.memberUnInitialized) {
return true;
}
return false;
}
});
}
/**
* Provide recipient information for netSearch
* @return Set of Serializable member ids that have the region and
* are have storage (no need to search an empty cache)
*/
public Set adviseNetSearch() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
// if region in cache is not yet initialized, exclude
if (!cp.regionInitialized) {
return false;
}
// if member is not yet initialized, exclude
if (cp.memberUnInitialized) {
return false;
}
DataPolicy dp = cp.dataPolicy;
return dp.withStorage();
}
});
}
// moved dumpProfiles to DistributionAdvisor
public InitialImageAdvice adviseInitialImage(InitialImageAdvice previousAdvice) {
return adviseInitialImage(previousAdvice, false);
}
@SuppressWarnings("synthetic-access")
public InitialImageAdvice adviseInitialImage(InitialImageAdvice previousAdvice, boolean persistent) {
initializationGate();
if (logger.isTraceEnabled(LogMarker.DA)) {
dumpProfiles("AdviseInitialImage");
}
Profile[] allProfiles = this.profiles; // volatile read
if (allProfiles.length == 0) {
return new InitialImageAdvice();
}
Set<InternalDistributedMember> replicates = new HashSet<InternalDistributedMember>();
Set<InternalDistributedMember> others = new HashSet<InternalDistributedMember>();
Set<InternalDistributedMember> preloaded = new HashSet<InternalDistributedMember>();
Set<InternalDistributedMember> empties = new HashSet<InternalDistributedMember>();
Set<InternalDistributedMember> uninitialized = new HashSet<InternalDistributedMember>();
Set<InternalDistributedMember> nonPersistent = new HashSet<InternalDistributedMember>();
Map<InternalDistributedMember, CacheProfile> memberProfiles = new HashMap<InternalDistributedMember, CacheProfile>();
for (int i = 0; i < allProfiles.length; i++) {
CacheProfile profile = (CacheProfile)allProfiles[i];
//Make sure that we don't return a member that was in the previous initial image advice.
//Unless that member has changed it's profile since the last time we checked.
if(previousAdvice != null) {
CacheProfile previousProfile = previousAdvice.memberProfiles.get(profile.getDistributedMember());
if (previousProfile != null
&& previousProfile.getSerialNumber() == profile.getSerialNumber()
&& previousProfile.getVersion() == profile.getVersion()) {
continue;
}
}
// if region in cache is in recovery, exclude
if (profile.inRecovery) {
uninitialized.add(profile.getDistributedMember());
continue;
}
// No need to do a GII from uninitialized member.
if(!profile.regionInitialized) {
uninitialized.add(profile.getDistributedMember());
continue;
}
if (profile.dataPolicy.withReplication()) {
if(!persistent || profile.dataPolicy.withPersistence()) {
//If the local member is persistent, we only want
//to include persistent replicas in the set of replicates.
replicates.add(profile.getDistributedMember());
} else {
nonPersistent.add(profile.getDistributedMember());
}
memberProfiles.put(profile.getDistributedMember(), profile);
}
else
if (profile.dataPolicy.isPreloaded()) {
preloaded.add(profile.getDistributedMember());
memberProfiles.put(profile.getDistributedMember(), profile);
}
else
if (profile.dataPolicy.withStorage()) {
// don't bother asking proxy members for initial image
others.add(profile.getDistributedMember());
memberProfiles.put(profile.getDistributedMember(), profile);
} else {
empties.add(profile.getDistributedMember());
}
}
InitialImageAdvice advice = new InitialImageAdvice(replicates, others,
preloaded, empties, uninitialized, nonPersistent, memberProfiles);
if (logger.isDebugEnabled()) {
logger.debug(advice);
}
return advice;
}
/**
* returns the set of all the members in the system which requires old values
* and are not yet finished with initialization (including GII).
* @since 5.5
*/
public Set adviseRequiresOldValueInCacheOp( ) {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
return cp.requiresOldValueInEvents && !cp.regionInitialized;
}
});
}
// moved adviseProfileExchange to DistributionAdvisor
// moved getProfile to DistributionAdvisor
// moved exchangeProfiles to DistributionAdvisor
// moved getDistributionManager to DistributionAdvisor
/** Instantiate new distribution profile for this member */
@Override
protected Profile instantiateProfile(
InternalDistributedMember memberId, int version) {
return new CacheProfile(memberId, version);
}
@Override
protected boolean evaluateProfiles(Profile newProfile, Profile oldProfile) {
boolean result = super.evaluateProfiles(newProfile, oldProfile);
if (result) {
CacheProfile newCP = (CacheProfile)newProfile;
CacheProfile oldCP = (CacheProfile)oldProfile;
if ((oldCP == null || !oldCP.regionInitialized) && newCP.regionInitialized) {
// invoke membership listeners, if any
CacheDistributionAdvisee advisee = (CacheDistributionAdvisee)getAdvisee();
advisee.remoteRegionInitialized(newCP);
}
}
return result;
}
/**
* Profile information for a remote counterpart.
*/
public static class CacheProfile extends DistributionAdvisor.Profile {
public DataPolicy dataPolicy = DataPolicy.REPLICATE;
public InterestPolicy interestPolicy = InterestPolicy.DEFAULT;
public boolean hasCacheLoader = false;
public boolean hasCacheWriter = false;
public boolean hasCacheListener = false;
public Scope scope = Scope.DISTRIBUTED_NO_ACK;
public boolean inRecovery = false;
public Set<String> gatewaySenderIds = Collections.emptySet();
public Set<String> asyncEventQueueIds = Collections.emptySet();
/**
* Will be null if the profile doesn't need to have the attributes
*/
public SubscriptionAttributes subscriptionAttributes = null;
public boolean isPartitioned = false;
public boolean isGatewayEnabled = false;
public boolean isPersistent = false;
// moved initialMembershipVersion to DistributionAdvisor.Profile
// moved serialNumber to DistributionAdvisor.Profile
/**
* this member's client interest / continuous query profile. This is used
* for event processing to reduce the number of times CQs are executed and
* to have the originating member for an event pay the cpu cost of executing
* filters on the event.
*/
public FilterProfile filterProfile;
/**
* Some cache listeners require old values in cache operation messages,
* at least during GII
*/
public boolean requiresOldValueInEvents;
/**
* Whether the region has completed initialization, including GII.
* This information may be incorrect for a PartitionedRegion, but
* may be relied upon for DistributedRegions (including BucketRegions)
*
* @since prpersist this field is now overloaded for partitioned regions with persistence.
* In the case of pr persistence, this field indicates that the region has finished
* recovery from disk.
*/
public boolean regionInitialized;
/**
* True when member is still not ready to receive cache operations. Note
* that {@link #regionInitialized} may be still true so other members can
* proceed with GII etc. Currently used by SQLFabric to indicate that DDL
* replay is in progress and so cache operations/functions should not be
* routed to that node.
*/
public boolean memberUnInitialized = false;
/**
* True when a members persistent store is initialized. Note that
* regionInitialized may be true when this is false in the case of createBucketAtomically.
* With createBucketAtomically, the peristent store is not created until
* the EndBucketCreationMessage is sent.
*/
public boolean persistenceInitialized;
public PersistentMemberID persistentID;
/**
* This member has any cache servers. This is not actively maintained for
* local profiles (i.e., a profile representing this vm)
*/
public boolean hasCacheServer = false;
/** for internal use, required for DataSerializer.readObject */
public CacheProfile() {
}
/** used for routing computation */
public CacheProfile(FilterProfile localProfile) {
this.filterProfile = localProfile;
}
public CacheProfile(InternalDistributedMember memberId, int version) {
super(memberId, version);
}
public CacheProfile(CacheProfile toCopy) {
super(toCopy.getDistributedMember(), toCopy.version);
setIntInfo(toCopy.getIntInfo());
}
/** Return the profile data information that can be stored in an int */
protected int getIntInfo() {
int s = 0;
if (this.dataPolicy.withReplication()) {
s |= REPLICATE_MASK;
if (this.dataPolicy.isPersistentReplicate()) {
s |= PERSISTENT_MASK;
}
} else {
if (this.dataPolicy.isEmpty()) s |= PROXY_MASK;
if (this.dataPolicy.isPreloaded()) s |= PRELOADED_MASK;
}
if (this.subscriptionAttributes != null
&& this.subscriptionAttributes.getInterestPolicy().isAll()) {
s |= INTEREST_MASK;
}
if (this.hasCacheLoader) s |= LOADER_MASK;
if (this.hasCacheWriter) s |= WRITER_MASK;
if (this.hasCacheListener) s |= LISTENER_MASK;
if (this.scope.isDistributedAck()) s |= DIST_ACK_MASK;
if (this.scope.isGlobal()) s |= GLOBAL_MASK;
if (this.inRecovery) s |= IN_RECOVERY_MASK;
if (this.isPartitioned) s |= IS_PARTITIONED_MASK;
if (this.isGatewayEnabled) s |= IS_GATEWAY_ENABLED_MASK;
if (this.isPersistent) s |= PERSISTENT_MASK;
if (this.regionInitialized) s|= REGION_INITIALIZED_MASK;
if (this.memberUnInitialized) s |= MEMBER_UNINITIALIZED_MASK;
if (this.persistentID != null) s|= PERSISTENT_ID_MASK;
if (this.hasCacheServer) s|= HAS_CACHE_SERVER_MASK;
if (this.requiresOldValueInEvents) s|= REQUIRES_OLD_VALUE_MASK;
if (this.persistenceInitialized) s|= PERSISTENCE_INITIALIZED_MASK;
if (!this.gatewaySenderIds.isEmpty()) s |= GATEWAY_SENDER_IDS_MASK;
if (!this.asyncEventQueueIds.isEmpty()) s |= ASYNC_EVENT_QUEUE_IDS_MASK;
Assert.assertTrue(!this.scope.isLocal());
return s;
}
private boolean hasGatewaySenderIds(int bits) {
return (bits & GATEWAY_SENDER_IDS_MASK) != 0;
}
private boolean hasAsyncEventQueueIds(int bits) {
return (bits & ASYNC_EVENT_QUEUE_IDS_MASK) != 0;
}
/**
* @param bits
* @return true if the serialized message has a persistentID
*/
private boolean hasPersistentID(int bits) {
return (bits & PERSISTENT_ID_MASK) != 0;
}
public boolean isPersistent() {
return this.dataPolicy.withPersistence();
}
/** Set the profile data information that is stored in a short */
protected void setIntInfo(int s) {
if ((s & REPLICATE_MASK) != 0) {
if ((s & PERSISTENT_MASK) != 0) {
this.dataPolicy = DataPolicy.PERSISTENT_REPLICATE;
}
else {
this.dataPolicy = DataPolicy.REPLICATE;
}
}
else
if ((s & PROXY_MASK) != 0) {
this.dataPolicy = DataPolicy.EMPTY;
}
else
if ((s & PRELOADED_MASK) != 0) {
this.dataPolicy = DataPolicy.PRELOADED;
}
else { // CACHED
this.dataPolicy = DataPolicy.NORMAL;
}
if((s & IS_PARTITIONED_MASK) != 0) {
if((s & PERSISTENT_MASK) != 0) {
this.dataPolicy = DataPolicy.PERSISTENT_PARTITION;
} else {
this.dataPolicy = DataPolicy.PARTITION;
}
}
if ((s & INTEREST_MASK) != 0) {
this.subscriptionAttributes = new SubscriptionAttributes(InterestPolicy.ALL);
} else {
this.subscriptionAttributes = new SubscriptionAttributes(InterestPolicy.CACHE_CONTENT);
}
this.hasCacheLoader = (s & LOADER_MASK) != 0;
this.hasCacheWriter = (s & WRITER_MASK) != 0;
this.hasCacheListener = (s & LISTENER_MASK) != 0;
this.scope = (s & DIST_ACK_MASK) != 0 ? Scope.DISTRIBUTED_ACK :
((s & GLOBAL_MASK) != 0 ? Scope.GLOBAL : Scope.DISTRIBUTED_NO_ACK);
this.inRecovery = (s & IN_RECOVERY_MASK) != 0;
this.isPartitioned = (s & IS_PARTITIONED_MASK) != 0;
this.isGatewayEnabled = (s & IS_GATEWAY_ENABLED_MASK) != 0;
this.isPersistent = (s & PERSISTENT_MASK) != 0;
this.regionInitialized = ( (s & REGION_INITIALIZED_MASK) != 0 );
this.memberUnInitialized = (s & MEMBER_UNINITIALIZED_MASK) != 0;
this.hasCacheServer = ( (s & HAS_CACHE_SERVER_MASK) != 0 );
this.requiresOldValueInEvents = ((s & REQUIRES_OLD_VALUE_MASK) != 0);
this.persistenceInitialized = (s & PERSISTENCE_INITIALIZED_MASK) != 0;
}
/**
* Sets the SubscriptionAttributes for the region that this profile is on
* @since 5.0
*/
public void setSubscriptionAttributes(SubscriptionAttributes sa) {
this.subscriptionAttributes = sa;
}
/**
* Return true if cached or allEvents and a listener
*/
public boolean cachedOrAllEventsWithListener() {
// to fix bug 36804 to ignore hasCacheListener
// return this.dataPolicy.withStorage() ||
// (allEvents() && this.hasCacheListener);
return cachedOrAllEvents();
}
/**
* Return true if cached or allEvents
*/
public boolean cachedOrAllEvents() {
return this.dataPolicy.withStorage() || allEvents();
}
/**
* Return true if subscribed to all events
*/
public boolean allEvents() {
return this.subscriptionAttributes.getInterestPolicy().isAll();
}
/**
* Used to process an incoming cache profile.
*/
@Override
public void processIncoming(DistributionManager dm, String adviseePath,
boolean removeProfile, boolean exchangeProfiles,
final List<Profile> replyProfiles) {
try {
Assert.assertTrue(adviseePath != null, "adviseePath was null");
LocalRegion lclRgn;
int oldLevel = LocalRegion
.setThreadInitLevelRequirement(LocalRegion.ANY_INIT);
try {
lclRgn = LocalRegion.getRegionFromPath(dm.getSystem(), adviseePath);
} finally {
LocalRegion.setThreadInitLevelRequirement(oldLevel);
}
if (lclRgn instanceof CacheDistributionAdvisee) {
if (lclRgn.isUsedForPartitionedRegionBucket()) {
if (!((BucketRegion)lclRgn).isPartitionedRegionOpen()) {
return;
}
}
handleCacheDistributionAdvisee((CacheDistributionAdvisee)lclRgn,
adviseePath, removeProfile, exchangeProfiles, true,
replyProfiles);
}
else {
if (lclRgn == null) {
handleCacheDistributionAdvisee(PartitionedRegionHelper
.getProxyBucketRegion(GemFireCacheImpl.getInstance(), adviseePath,
false), adviseePath, removeProfile, exchangeProfiles,
false, replyProfiles);
}
else {
if (logger.isDebugEnabled()) {
logger.debug("While processing UpdateAttributes message, region has local scope: {}", adviseePath);
}
}
}
} catch (PRLocallyDestroyedException fre) {
if (logger.isDebugEnabled()) {
logger.debug("<Region Locally destroyed> /// {}", this);
}
} catch (RegionDestroyedException e) {
if (logger.isDebugEnabled()) {
logger.debug("<region destroyed> /// {}", this);
}
}
}
@Override
public void cleanUp() {
if(this.filterProfile != null) {
this.filterProfile.cleanUp();
}
}
/**
* Attempts to process this message with the specified
* <code>CacheDistributionAdvisee</code>.
*
* @param cda
* the CacheDistributionAdvisee to apply this profile to
* @param isRealRegion
* true if CacheDistributionAdvisee is a real region
*/
private void handleCacheDistributionAdvisee(CacheDistributionAdvisee cda,
String adviseePath, boolean removeProfile, boolean exchangeProfiles,
boolean isRealRegion, final List<Profile> replyProfiles) {
if (cda != null) {
handleDistributionAdvisee(cda, removeProfile, isRealRegion
&& exchangeProfiles, replyProfiles);
if (logger.isDebugEnabled()) {
logger.debug("While processing UpdateAttributes message, handled advisee: {}", cda);
}
}
else {
if (logger.isDebugEnabled()) {
logger.debug("While processing UpdateAttributes message, region not found: {}", adviseePath);
}
}
}
@Override
public int getDSFID() {
return CACHE_PROFILE;
}
@Override
public void toData(DataOutput out) throws IOException {
super.toData(out);
out.writeInt(getIntInfo());
if(persistentID != null) {
InternalDataSerializer.invokeToData(persistentID, out);
}
if (!gatewaySenderIds.isEmpty()) {
writeSet(gatewaySenderIds, out);
}
if (!asyncEventQueueIds.isEmpty()) {
writeSet(asyncEventQueueIds, out);
}
DataSerializer.writeObject(this.filterProfile, out);
}
private void writeSet(Set<String> set, DataOutput out) throws IOException {
// to fix bug 47205 always serialize the Set as a HashSet.
out.writeByte(DSCODE.HASH_SET);
InternalDataSerializer.writeSet(set, out);
}
@Override
public void fromData(DataInput in) throws IOException, ClassNotFoundException {
super.fromData(in);
int bits = in.readInt();
setIntInfo(bits);
if(hasPersistentID(bits)) {
persistentID = new PersistentMemberID();
InternalDataSerializer.invokeFromData(persistentID, in);
}
if (hasGatewaySenderIds(bits)) {
gatewaySenderIds = DataSerializer.readObject(in);
}
if (hasAsyncEventQueueIds(bits)) {
asyncEventQueueIds = DataSerializer.readObject(in);
}
this.filterProfile = DataSerializer.readObject(in);
}
@Override
public StringBuilder getToStringHeader() {
return new StringBuilder("CacheProfile");
}
@Override
public void fillInToString(StringBuilder sb) {
super.fillInToString(sb);
sb.append("; dataPolicy=" + this.dataPolicy);
sb.append("; hasCacheLoader=" + this.hasCacheLoader);
sb.append("; hasCacheWriter=" + this.hasCacheWriter);
sb.append("; hasCacheListener=" + this.hasCacheListener);
sb.append("; hasCacheServer=").append(this.hasCacheServer);
sb.append("; scope=" + this.scope);
sb.append("; regionInitialized=").append(
String.valueOf(this.regionInitialized));
sb.append("; memberUnInitialized=").append(
String.valueOf(this.memberUnInitialized));
sb.append("; inRecovery=" + this.inRecovery);
sb.append("; subcription=" + this.subscriptionAttributes);
sb.append("; isPartitioned=" + this.isPartitioned);
sb.append("; isGatewayEnabled=" + this.isGatewayEnabled);
sb.append("; isPersistent=" + this.isPersistent);
sb.append("; persistentID=" + this.persistentID);
if (this.filterProfile != null) {
sb.append("; ").append(this.filterProfile);
}
sb.append("; gatewaySenderIds =" + this.gatewaySenderIds);
sb.append("; asyncEventQueueIds =" + this.asyncEventQueueIds);
}
}
/** Recipient information used for getInitialImage operation */
public static class InitialImageAdvice {
public Set<InternalDistributedMember> getOthers() {
return this.others;
}
public void setOthers(Set<InternalDistributedMember> others) {
this.others = others;
}
public Set<InternalDistributedMember> getReplicates() {
return this.replicates;
}
public Set<InternalDistributedMember> getNonPersistent() {
return this.nonPersistent;
}
public Set<InternalDistributedMember> getPreloaded() {
return this.preloaded;
}
public Set<InternalDistributedMember> getEmpties() {
return this.empties;
}
public Set<InternalDistributedMember> getUninitialized() {
return this.uninitialized;
}
/** Set of replicate recipients */
protected final Set<InternalDistributedMember> replicates;
/** Set of peers that are preloaded */
protected final Set<InternalDistributedMember> preloaded;
/** Set of tertiary recipients which are not replicates, in which case
* they should all be queried and a superset taken of their images.
* To be used only if the image cannot be obtained from the replicates set.
*/
protected Set<InternalDistributedMember> others;
/** Set of members that might be data feeds and have EMPTY data policy */
protected final Set<InternalDistributedMember> empties;
/** Set of members that may not have finished initializing their caches */
protected final Set<InternalDistributedMember> uninitialized;
/** Set of members that are replicates but not persistent*/
protected final Set<InternalDistributedMember> nonPersistent;
private final Map<InternalDistributedMember, CacheProfile> memberProfiles;
protected InitialImageAdvice(Set<InternalDistributedMember> replicates,
Set<InternalDistributedMember> others,
Set<InternalDistributedMember> preloaded,
Set<InternalDistributedMember> empties,
Set<InternalDistributedMember> uninitialized,
Set<InternalDistributedMember> nonPersistent,
Map<InternalDistributedMember, CacheProfile> memberProfiles) {
this.replicates = replicates;
this.others = others;
this.preloaded = preloaded;
this.empties = empties;
this.uninitialized = uninitialized;
this.nonPersistent = nonPersistent;
this.memberProfiles = memberProfiles;
}
public InitialImageAdvice() {
this(Collections.EMPTY_SET,
Collections.EMPTY_SET,
Collections.EMPTY_SET,
Collections.EMPTY_SET,
Collections.EMPTY_SET,
Collections.EMPTY_SET,
Collections.<InternalDistributedMember, CacheProfile>emptyMap());
}
@Override
public String toString() {
return "InitialImageAdvice("
+ "replicates=" + this.replicates
+ "; others=" + this.others
+ "; preloaded=" + this.preloaded
+ "; empty=" + this.empties
+ "; initializing=" + this.uninitialized
+ ")";
}
}
// moved putProfile, doPutProfile, and putProfile to DistributionAdvisor
// moved isNewerProfile to DistributionAdvisor
// moved isNewerSerialNumber to DistributionAdvisor
// moved forceNewMembershipVersion to DistributionAdvisor
// moved startOperation to DistributionAdvisor
// moved endOperation to DistributionAdvisor
/**
* Provide only the new replicates given a set of existing
* memberIds
* @param oldRecipients the <code>Set</code> of memberIds that have received the message
* @return the set of new replicate's memberIds
* @since 5.1
*/
public Set adviseNewReplicates(final Set oldRecipients)
{
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
if (cp.dataPolicy.withReplication()
&& !oldRecipients.contains(cp.getDistributedMember())) {
return true;
}
return false;
}
});
}
// moved waitForCurrentOperations to DistributionAdvisor
// moved removeId, doRemoveId, removeIdWithSerial, and updateRemovedProfiles to DistributionAdvisor
/**
* Provide all the replicates including persistent replicates.
*
* @return the set of replicate's memberIds
* @since 5.8
*/
public Set<InternalDistributedMember> adviseReplicates() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
if (cp.dataPolicy.withReplication()) {
return true;
}
return false;
}
});
}
/**
* Provide only the preloadeds given a set of existing memberIds
*
* @return the set of preloaded's memberIds
* @since prPersistSprint1
*/
public Set advisePreloadeds() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
if (cp.dataPolicy.withPreloaded()) {
return true;
}
return false;
}
});
}
/**
* Provide only the empty's (having DataPolicy.EMPTY) given a set of existing
* memberIds
*
* @return the set of replicate's memberIds
* @since 5.8
*/
public Set adviseEmptys() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
if (cp.dataPolicy.isEmpty()) {
return true;
}
return false;
}
});
}
/**
* Provide only the normals (having DataPolicy.NORMAL) given a set of existing memberIds
*
* @return the set of normal's memberIds
* @since 5.8
*/
public Set adviseNormals() {
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
if (cp.dataPolicy.isNormal()) {
return true;
}
return false;
}
});
}
@Override
protected void profileRemoved(Profile profile) {
if (logger.isDebugEnabled()) {
logger.debug("CDA: removing profile {}", profile);
}
if (getAdvisee() instanceof LocalRegion && profile != null) {
((LocalRegion)getAdvisee()).removeMemberFromCriticalList(profile.getDistributedMember());
}
}
/**
* Returns the list of all persistent members.
* For most cases, adviseInitializedPersistentMembers is more appropriate. These
* method includes members that are still in the process of GII.
*/
public Map<InternalDistributedMember, PersistentMemberID> advisePersistentMembers() {
initializationGate();
Map<InternalDistributedMember, PersistentMemberID> result = new HashMap<InternalDistributedMember, PersistentMemberID>();
Profile[] snapshot = this.profiles;
for(Profile profile : snapshot) {
CacheProfile cp = (CacheProfile) profile;
if(cp.persistentID != null) {
result.put(cp.getDistributedMember(), cp.persistentID);
}
}
return result;
}
public Map<InternalDistributedMember, PersistentMemberID> adviseInitializedPersistentMembers() {
initializationGate();
Map<InternalDistributedMember, PersistentMemberID> result = new HashMap<InternalDistributedMember, PersistentMemberID>();
Profile[] snapshot = this.profiles;
for(Profile profile : snapshot) {
CacheProfile cp = (CacheProfile) profile;
if(cp.persistentID != null && cp.persistenceInitialized) {
result.put(cp.getDistributedMember(), cp.persistentID);
}
}
return result;
}
public Set adviseCacheServers() {
getAdvisee().getCancelCriterion().checkCancelInProgress(null);
return adviseFilter(new Filter() {
public boolean include(Profile profile) {
assert profile instanceof CacheProfile;
CacheProfile cp = (CacheProfile)profile;
return cp.hasCacheServer;
}
});
}
//Overrided for bucket regions. This listener also receives events
//about PR joins and leaves.
public void addMembershipAndProxyListener(MembershipListener listener) {
addMembershipListener(listener);
}
public void removeMembershipAndProxyListener(MembershipListener listener) {
removeMembershipListener(listener);
}
@Override
public boolean removeId(ProfileId memberId, boolean crashed,
boolean destroyed, boolean fromMembershipListener) {
boolean isPersistent = false;
DiskStoreID persistentId = null;
CacheDistributionAdvisee advisee = (CacheDistributionAdvisee)getAdvisee();
if (advisee.getAttributes().getDataPolicy().withPersistence()) {
isPersistent = true;
CacheProfile profile = (CacheProfile)getProfile(memberId);
if (profile != null && profile.persistentID != null) {
persistentId = ((CacheProfile)getProfile(memberId)).persistentID.diskStoreId;
}
}
boolean result = super.removeId(memberId, crashed, destroyed, fromMembershipListener);
// bug #48962 - record members that leave during GII so IIOp knows about them
if (advisee instanceof DistributedRegion) {
DistributedRegion r = (DistributedRegion)advisee;
if (!r.isInitialized() && !r.isUsedForPartitionedRegionBucket()) {
if (logger.isDebugEnabled()) {
logger.debug("recording that {} has left during initialization of {}", memberId, r.getName());
}
ImageState state = r.getImageState();
if (isPersistent) {
if (persistentId != null) {
state.addLeftMember(persistentId);
}
} else {
state.addLeftMember((InternalDistributedMember)memberId);
}
}
}
return result;
}
public List<Set<String>> adviseSameGatewaySenderIds(
final Set<String> allGatewaySenderIds) {
final List<Set<String>> differSenderIds = new ArrayList<Set<String>>();
fetchProfiles(new Filter() {
public boolean include(final Profile profile) {
if (profile instanceof CacheProfile) {
final CacheProfile cp = (CacheProfile)profile;
if (allGatewaySenderIds.equals(cp.gatewaySenderIds)) {
return true;
}else{
differSenderIds.add(allGatewaySenderIds);
differSenderIds.add(cp.gatewaySenderIds);
return false;
}
}
return false;
}
});
return differSenderIds;
}
public List<Set<String>> adviseSameAsyncEventQueueIds(
final Set<String> allAsyncEventIds) {
final List<Set<String>> differAsycnQueueIds = new ArrayList<Set<String>>();
List l = fetchProfiles(new Filter() {
public boolean include(final Profile profile) {
if (profile instanceof CacheProfile) {
final CacheProfile cp = (CacheProfile)profile;
if (allAsyncEventIds.equals(cp.asyncEventQueueIds)) {
return true;
}else{
differAsycnQueueIds.add(allAsyncEventIds);
differAsycnQueueIds.add(cp.asyncEventQueueIds);
return false;
}
}
return false;
}
});
return differAsycnQueueIds;
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
package com.google.devtools.build.lib.rules.android;
import static com.google.devtools.build.lib.analysis.config.CompilationMode.OPT;
import com.google.devtools.build.lib.actions.ActionAnalysisMetadata;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ArtifactRoot;
import com.google.devtools.build.lib.analysis.FilesToRunProvider;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.Whitelist;
import com.google.devtools.build.lib.analysis.actions.ActionConstructionContext;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SafeImplicitOutputsFunction;
import com.google.devtools.build.lib.packages.RuleErrorConsumer;
import com.google.devtools.build.lib.packages.TriState;
import com.google.devtools.build.lib.skylarkbuildapi.android.AndroidDataContextApi;
import com.google.devtools.build.lib.vfs.PathFragment;
/**
* Wraps common tools and settings used for working with Android assets, resources, and manifests.
*
* <p>Do not create implementation classes directly - instead, get the appropriate one from {@link
* com.google.devtools.build.lib.rules.android.AndroidSemantics}.
*
* <p>The {@link Label}, {@link ActionConstructionContext}, and BusyBox {@link FilesToRunProvider}
* are needed to create virtually all actions for working with Android data, so it makes sense to
* bundle them together. Additionally, this class includes some common tools (such as an SDK) that
* are used in BusyBox actions.
*/
public class AndroidDataContext implements AndroidDataContextApi {
// Feature which would cause AndroidCompiledResourceMerger actions to pass a flag with the same
// name to ResourceProcessorBusyBox.
private static final String ANNOTATE_R_FIELDS_FROM_TRANSITIVE_DEPS =
"annotate_r_fields_from_transitive_deps";
// If specified, omit resources from transitive dependencies when generating Android R classes.
private static final String OMIT_TRANSITIVE_RESOURCES_FROM_ANDROID_R_CLASSES =
"android_resources_strict_deps";
private final RuleContext ruleContext;
private final FilesToRunProvider busybox;
private final AndroidSdkProvider sdk;
private final boolean persistentBusyboxToolsEnabled;
private final boolean compatibleForResourcePathShortening;
private final boolean compatibleForResourceNameObfuscation;
private final boolean compatibleForResourceShrinking;
private final boolean throwOnProguardApplyDictionary;
private final boolean throwOnProguardApplyMapping;
private final boolean throwOnResourceConflict;
private final boolean useDataBindingV2;
public static AndroidDataContext forNative(RuleContext ruleContext) {
return makeContext(ruleContext);
}
public static AndroidDataContext makeContext(RuleContext ruleContext) {
AndroidConfiguration androidConfig =
ruleContext.getConfiguration().getFragment(AndroidConfiguration.class);
return new AndroidDataContext(
ruleContext,
ruleContext.getExecutablePrerequisite("$android_resources_busybox", Mode.HOST),
androidConfig.persistentBusyboxTools(),
AndroidSdkProvider.fromRuleContext(ruleContext),
lacksAllowlistExemptions(ruleContext, "allow_raw_access_to_resource_paths", true),
lacksAllowlistExemptions(ruleContext, "allow_resource_name_obfuscation_opt_out", true),
lacksAllowlistExemptions(ruleContext, "allow_resource_shrinking_opt_out", true),
lacksAllowlistExemptions(ruleContext, "allow_proguard_apply_dictionary", false),
lacksAllowlistExemptions(ruleContext, "allow_proguard_apply_mapping", false),
lacksAllowlistExemptions(ruleContext, "allow_resource_conflicts", false),
androidConfig.useDataBindingV2());
}
private static boolean lacksAllowlistExemptions(
RuleContext ruleContext, String whitelistName, boolean valueIfNoWhitelist) {
return Whitelist.hasWhitelist(ruleContext, whitelistName)
? !Whitelist.isAvailable(ruleContext, whitelistName)
: valueIfNoWhitelist;
}
protected AndroidDataContext(
RuleContext ruleContext,
FilesToRunProvider busybox,
boolean persistentBusyboxToolsEnabled,
AndroidSdkProvider sdk,
boolean compatibleForResourcePathShortening,
boolean compatibleForResourceNameObfuscation,
boolean compatibleForResourceShrinking,
boolean throwOnProguardApplyDictionary,
boolean throwOnProguardApplyMapping,
boolean throwOnResourceConflict,
boolean useDataBindingV2) {
this.persistentBusyboxToolsEnabled = persistentBusyboxToolsEnabled;
this.ruleContext = ruleContext;
this.busybox = busybox;
this.sdk = sdk;
this.compatibleForResourcePathShortening = compatibleForResourcePathShortening;
this.compatibleForResourceNameObfuscation = compatibleForResourceNameObfuscation;
this.compatibleForResourceShrinking = compatibleForResourceShrinking;
this.throwOnProguardApplyDictionary = throwOnProguardApplyDictionary;
this.throwOnProguardApplyMapping = throwOnProguardApplyMapping;
this.throwOnResourceConflict = throwOnResourceConflict;
this.useDataBindingV2 = useDataBindingV2;
}
public Label getLabel() {
return ruleContext.getLabel();
}
public ActionConstructionContext getActionConstructionContext() {
return ruleContext;
}
public RuleErrorConsumer getRuleErrorConsumer() {
return ruleContext;
}
public FilesToRunProvider getBusybox() {
return busybox;
}
public AndroidSdkProvider getSdk() {
return sdk;
}
/*
* Convenience methods. These are just slightly cleaner ways of doing common tasks.
*/
/** Builds and registers a {@link SpawnAction.Builder}. */
public void registerAction(SpawnAction.Builder spawnActionBuilder) {
registerAction(spawnActionBuilder.build(ruleContext));
}
/** Registers one or more actions. */
public void registerAction(ActionAnalysisMetadata... actions) {
ruleContext.registerAction(actions);
}
public Artifact createOutputArtifact(SafeImplicitOutputsFunction function)
throws InterruptedException {
return ruleContext.getImplicitOutputArtifact(function);
}
public Artifact getUniqueDirectoryArtifact(String uniqueDirectorySuffix, String relative) {
return ruleContext.getUniqueDirectoryArtifact(uniqueDirectorySuffix, relative);
}
public Artifact getUniqueDirectoryArtifact(String uniqueDirectorySuffix, PathFragment relative) {
return ruleContext.getUniqueDirectoryArtifact(uniqueDirectorySuffix, relative);
}
public PathFragment getUniqueDirectory(PathFragment fragment) {
return ruleContext.getUniqueDirectory(fragment);
}
public ArtifactRoot getBinOrGenfilesDirectory() {
return ruleContext.getBinOrGenfilesDirectory();
}
public PathFragment getPackageDirectory() {
return ruleContext.getPackageDirectory();
}
public AndroidConfiguration getAndroidConfig() {
return ruleContext.getConfiguration().getFragment(AndroidConfiguration.class);
}
/** Indicates whether Busybox actions should be passed the "--debug" flag */
public boolean useDebug() {
return getActionConstructionContext().getConfiguration().getCompilationMode() != OPT;
}
public boolean isPersistentBusyboxToolsEnabled() {
return persistentBusyboxToolsEnabled;
}
public boolean compatibleForResourcePathShortening() {
return compatibleForResourcePathShortening;
}
public boolean compatibleForResourceNameObfuscation() {
return compatibleForResourceNameObfuscation;
}
public boolean compatibleForResourceShrinking() {
return compatibleForResourceShrinking;
}
public boolean throwOnProguardApplyDictionary() {
return throwOnProguardApplyDictionary;
}
public boolean throwOnProguardApplyMapping() {
return throwOnProguardApplyMapping;
}
public boolean throwOnResourceConflict() {
return throwOnResourceConflict;
}
public boolean useDataBindingV2() {
return useDataBindingV2;
}
public boolean annotateRFieldsFromTransitiveDeps() {
return ruleContext.getFeatures().contains(ANNOTATE_R_FIELDS_FROM_TRANSITIVE_DEPS);
}
boolean omitTransitiveResourcesFromAndroidRClasses() {
return ruleContext.getFeatures().contains(OMIT_TRANSITIVE_RESOURCES_FROM_ANDROID_R_CLASSES);
}
/** Returns true if the context dictates that resource shrinking should be performed. */
boolean useResourceShrinking(boolean hasProguardSpecs) {
return isResourceShrinkingEnabled() && hasProguardSpecs;
}
/**
* Returns true if the context dictates that resource shrinking is enabled. This doesn't
* necessarily mean that shrinking should be performed - for that, use {@link
* #useResourceShrinking(boolean)}, which calls this.
*/
boolean isResourceShrinkingEnabled() {
if (!ruleContext.attributes().has("shrink_resources")) {
return false;
}
TriState state = ruleContext.attributes().get("shrink_resources", BuildType.TRISTATE);
if (state == TriState.AUTO) {
state = getAndroidConfig().useAndroidResourceShrinking() ? TriState.YES : TriState.NO;
}
return state == TriState.YES && compatibleForResourceShrinking;
}
boolean useResourcePathShortening() {
// Use resource path shortening iff:
// 1) --experimental_android_resource_path_shortening
// 2) -c opt
// 3) Not on allowlist exempting from compatibleForResourcePathShortening
return getAndroidConfig().useAndroidResourcePathShortening()
&& getActionConstructionContext().getConfiguration().getCompilationMode() == OPT
&& compatibleForResourcePathShortening;
}
boolean useResourceNameObfuscation(boolean hasProguardSpecs) {
// Use resource name obfuscation iff:
// 1) --experimental_android_resource_name_obfuscation
// 2) resource shrinking is on (implying proguard specs are present)
// 3) Not on allowlist exempting from compatibleForResourceNameObfuscation
return getAndroidConfig().useAndroidResourceNameObfuscation()
&& useResourceShrinking(hasProguardSpecs)
&& compatibleForResourceNameObfuscation;
}
}
| |
/*
* Copyright (C) 2011 readyState Software Ltd, 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.vothuat.viet.databases;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.zip.ZipInputStream;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteDatabase.CursorFactory;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
/**
* A helper class to manage database creation and version management using
* an application's raw asset files.
*
* This class provides developers with a simple way to ship their Android app
* with an existing SQLite database (which may be pre-populated with data) and
* to manage its initial creation and any upgrades required with subsequent
* version releases.
*
* <p>This class makes it easy for {@link android.content.ContentProvider}
* implementations to defer opening and upgrading the database until first use,
* to avoid blocking application startup with long-running database upgrades.
*
* <p>For examples see <a href="https://github.com/jgilfelt/android-sqlite-asset-helper">
* https://github.com/jgilfelt/android-sqlite-asset-helper</a>
*
* <p class="note"><strong>Note:</strong> this class assumes
* monotonically increasing version numbers for upgrades. Also, there
* is no concept of a database downgrade; installing a new version of
* your app which uses a lower version number than a
* previously-installed version will result in undefined behavior.</p>
*/
public class SQLiteAssetHelper extends SQLiteOpenHelper {
private static final String TAG = SQLiteAssetHelper.class.getSimpleName();
private static final String ASSET_DB_PATH = "databases";
private final Context mContext;
private final String mName;
private final CursorFactory mFactory;
private final int mNewVersion;
private SQLiteDatabase mDatabase = null;
private boolean mIsInitializing = false;
private String mDatabasePath;
private String mAssetPath;
private String mUpgradePathFormat;
private int mForcedUpgradeVersion = 0;
/**
* Create a helper object to create, open, and/or manage a database in
* a specified location.
* This method always returns very quickly. The database is not actually
* created or opened until one of {@link #getWritableDatabase} or
* {@link #getReadableDatabase} is called.
*
* @param context to use to open or create the database
* @param name of the database file
* @param storageDirectory to store the database file upon creation; caller must
* ensure that the specified absolute path is available and can be written to
* @param factory to use for creating cursor objects, or null for the default
* @param version number of the database (starting at 1); if the database is older,
* SQL file(s) contained within the application assets folder will be used to
* upgrade the database
*/
public SQLiteAssetHelper(Context context, String name, String storageDirectory, CursorFactory factory, int version) {
super(context, name, factory, version);
if (version < 1) throw new IllegalArgumentException("Version must be >= 1, was " + version);
if (name == null) throw new IllegalArgumentException("Database name cannot be null");
mContext = context;
mName = name;
mFactory = factory;
mNewVersion = version;
mAssetPath = ASSET_DB_PATH + "/" + name;
if (storageDirectory != null) {
mDatabasePath = storageDirectory;
} else {
mDatabasePath = context.getApplicationInfo().dataDir + "/databases";
}
mUpgradePathFormat = ASSET_DB_PATH + "/" + name + "_upgrade_%s-%s.sql";
}
/**
* Create a helper object to create, open, and/or manage a database in
* the application's default private data directory.
* This method always returns very quickly. The database is not actually
* created or opened until one of {@link #getWritableDatabase} or
* {@link #getReadableDatabase} is called.
*
* @param context to use to open or create the database
* @param name of the database file
* @param factory to use for creating cursor objects, or null for the default
* @param version number of the database (starting at 1); if the database is older,
* SQL file(s) contained within the application assets folder will be used to
* upgrade the database
*/
public SQLiteAssetHelper(Context context, String name, CursorFactory factory, int version) {
this(context, name, null, factory, version);
}
/**
* Create and/or open a database that will be used for reading and writing.
* The first time this is called, the database will be extracted and copied
* from the application's assets folder.
*
* <p>Once opened successfully, the database is cached, so you can
* call this method every time you need to write to the database.
* (Make sure to call {@link #close} when you no longer need the database.)
* Errors such as bad permissions or a full disk may cause this method
* to fail, but future attempts may succeed if the problem is fixed.</p>
*
* <p class="caution">Database upgrade may take a long time, you
* should not call this method from the application main thread, including
* from {@link android.content.ContentProvider#onCreate ContentProvider.onCreate()}.
*
* @throws SQLiteException if the database cannot be opened for writing
* @return a read/write database object valid until {@link #close} is called
*/
@Override
public synchronized SQLiteDatabase getWritableDatabase() {
if (mDatabase != null && mDatabase.isOpen() && !mDatabase.isReadOnly()) {
return mDatabase; // The database is already open for business
}
if (mIsInitializing) {
throw new IllegalStateException("getWritableDatabase called recursively");
}
// If we have a read-only database open, someone could be using it
// (though they shouldn't), which would cause a lock to be held on
// the file, and our attempts to open the database read-write would
// fail waiting for the file lock. To prevent that, we acquire the
// lock on the read-only database, which shuts out other users.
boolean success = false;
SQLiteDatabase db = null;
//if (mDatabase != null) mDatabase.lock();
try {
mIsInitializing = true;
//if (mName == null) {
// db = SQLiteDatabase.create(null);
//} else {
// db = mContext.openOrCreateDatabase(mName, 0, mFactory);
//}
db = createOrOpenDatabase(false);
int version = db.getVersion();
// do force upgrade
if (version != 0 && version < mForcedUpgradeVersion) {
db = createOrOpenDatabase(true);
db.setVersion(mNewVersion);
version = db.getVersion();
}
if (version != mNewVersion) {
db.beginTransaction();
try {
if (version == 0) {
onCreate(db);
} else {
if (version > mNewVersion) {
Log.w(TAG, "Can't downgrade read-only database from version " +
version + " to " + mNewVersion + ": " + db.getPath());
}
onUpgrade(db, version, mNewVersion);
}
db.setVersion(mNewVersion);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
}
onOpen(db);
success = true;
return db;
} finally {
mIsInitializing = false;
if (success) {
if (mDatabase != null) {
try { mDatabase.close(); } catch (Exception e) { }
//mDatabase.unlock();
}
mDatabase = db;
} else {
//if (mDatabase != null) mDatabase.unlock();
if (db != null) db.close();
}
}
}
/**
* Create and/or open a database. This will be the same object returned by
* {@link #getWritableDatabase} unless some problem, such as a full disk,
* requires the database to be opened read-only. In that case, a read-only
* database object will be returned. If the problem is fixed, a future call
* to {@link #getWritableDatabase} may succeed, in which case the read-only
* database object will be closed and the read/write object will be returned
* in the future.
*
* <p class="caution">Like {@link #getWritableDatabase}, this method may
* take a long time to return, so you should not call it from the
* application main thread, including from
* {@link android.content.ContentProvider#onCreate ContentProvider.onCreate()}.
*
* @throws SQLiteException if the database cannot be opened
* @return a database object valid until {@link #getWritableDatabase}
* or {@link #close} is called.
*/
@Override
public synchronized SQLiteDatabase getReadableDatabase() {
if (mDatabase != null && mDatabase.isOpen()) {
return mDatabase; // The database is already open for business
}
if (mIsInitializing) {
throw new IllegalStateException("getReadableDatabase called recursively");
}
try {
return getWritableDatabase();
} catch (SQLiteException e) {
if (mName == null) throw e; // Can't open a temp database read-only!
Log.e(TAG, "Couldn't open " + mName + " for writing (will try read-only):", e);
}
SQLiteDatabase db = null;
try {
mIsInitializing = true;
String path = mContext.getDatabasePath(mName).getPath();
db = SQLiteDatabase.openDatabase(path, mFactory, SQLiteDatabase.OPEN_READONLY);
if (db.getVersion() != mNewVersion) {
throw new SQLiteException("Can't upgrade read-only database from version " +
db.getVersion() + " to " + mNewVersion + ": " + path);
}
onOpen(db);
Log.w(TAG, "Opened " + mName + " in read-only mode");
mDatabase = db;
return mDatabase;
} finally {
mIsInitializing = false;
if (db != null && db != mDatabase) db.close();
}
}
/**
* Close any open database object.
*/
@Override
public synchronized void close() {
if (mIsInitializing) throw new IllegalStateException("Closed during initialization");
if (mDatabase != null && mDatabase.isOpen()) {
mDatabase.close();
mDatabase = null;
}
}
@Override
public final void onConfigure(SQLiteDatabase db) {
// not supported!
}
@Override
public final void onCreate(SQLiteDatabase db) {
// do nothing - createOrOpenDatabase() is called in
// getWritableDatabase() to handle database creation.
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.w(TAG, "Upgrading database " + mName + " from version " + oldVersion + " to " + newVersion + "...");
ArrayList<String> paths = new ArrayList<String>();
getUpgradeFilePaths(oldVersion, newVersion-1, newVersion, paths);
if (paths.isEmpty()) {
Log.e(TAG, "no upgrade script path from " + oldVersion + " to " + newVersion);
throw new SQLiteAssetException("no upgrade script path from " + oldVersion + " to " + newVersion);
}
Collections.sort(paths, new VersionComparator());
for (String path : paths) {
try {
Log.w(TAG, "processing upgrade: " + path);
InputStream is = mContext.getAssets().open(path);
String sql = Utils.convertStreamToString(is);
if (sql != null) {
List<String> cmds = Utils.splitSqlScript(sql, ';');
for (String cmd : cmds) {
//Log.d(TAG, "cmd=" + cmd);
if (cmd.trim().length() > 0) {
db.execSQL(cmd);
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
Log.w(TAG, "Successfully upgraded database " + mName + " from version " + oldVersion + " to " + newVersion);
}
@Override
public final void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) {
// not supported!
}
/**
* Bypass the upgrade process (for each increment up to a given version) and simply
* overwrite the existing database with the supplied asset file.
*
* @param version bypass upgrade up to this version number - should never be greater than the
* latest database version.
*
* @deprecated use {@link #setForcedUpgrade} instead.
*/
@Deprecated
public void setForcedUpgradeVersion(int version) {
setForcedUpgrade(version);
}
/**
* Bypass the upgrade process (for each increment up to a given version) and simply
* overwrite the existing database with the supplied asset file.
*
* @param version bypass upgrade up to this version number - should never be greater than the
* latest database version.
*/
public void setForcedUpgrade(int version) {
mForcedUpgradeVersion = version;
}
/**
* Bypass the upgrade process for every version increment and simply overwrite the existing
* database with the supplied asset file.
*/
public void setForcedUpgrade() {
setForcedUpgrade(mNewVersion);
}
private SQLiteDatabase createOrOpenDatabase(boolean force) throws SQLiteAssetException {
// test for the existence of the db file first and don't attempt open
// to prevent the error trace in log on API 14+
SQLiteDatabase db = null;
File file = new File (mDatabasePath + "/" + mName);
if (file.exists()) {
db = returnDatabase();
}
//SQLiteDatabase db = returnDatabase();
if (db != null) {
// database already exists
if (force) {
Log.w(TAG, "forcing database upgrade!");
copyDatabaseFromAssets();
db = returnDatabase();
}
return db;
} else {
// database does not exist, copy it from assets and return it
copyDatabaseFromAssets();
db = returnDatabase();
return db;
}
}
private SQLiteDatabase returnDatabase(){
try {
SQLiteDatabase db = SQLiteDatabase.openDatabase(mDatabasePath + "/" + mName, mFactory, SQLiteDatabase.OPEN_READWRITE);
Log.i(TAG, "successfully opened database " + mName);
return db;
} catch (SQLiteException e) {
Log.w(TAG, "could not open database " + mName + " - " + e.getMessage());
return null;
}
}
private void copyDatabaseFromAssets() throws SQLiteAssetException {
Log.w(TAG, "copying database from assets...");
String path = mAssetPath;
String dest = mDatabasePath + "/" + mName;
InputStream is;
boolean isZip = false;
try {
// try uncompressed
is = mContext.getAssets().open(path);
} catch (IOException e) {
// try zip
try {
is = mContext.getAssets().open(path + ".zip");
isZip = true;
} catch (IOException e2) {
// try gzip
try {
is = mContext.getAssets().open(path + ".gz");
} catch (IOException e3) {
SQLiteAssetException se = new SQLiteAssetException("Missing " + mAssetPath + " file (or .zip, .gz archive) in assets, or target folder not writable");
se.setStackTrace(e3.getStackTrace());
throw se;
}
}
}
try {
File f = new File(mDatabasePath + "/");
if (!f.exists()) { f.mkdir(); }
if (isZip) {
ZipInputStream zis = Utils.getFileFromZip(is);
if (zis == null) {
throw new SQLiteAssetException("Archive is missing a SQLite database file");
}
Utils.writeExtractedFileToDisk(zis, new FileOutputStream(dest));
} else {
Utils.writeExtractedFileToDisk(is, new FileOutputStream(dest));
}
Log.w(TAG, "database copy complete");
} catch (IOException e) {
SQLiteAssetException se = new SQLiteAssetException("Unable to write " + dest + " to data directory");
se.setStackTrace(e.getStackTrace());
throw se;
}
}
private InputStream getUpgradeSQLStream(int oldVersion, int newVersion) {
String path = String.format(mUpgradePathFormat, oldVersion, newVersion);
try {
return mContext.getAssets().open(path);
} catch (IOException e) {
Log.w(TAG, "missing database upgrade script: " + path);
return null;
}
}
private void getUpgradeFilePaths(int baseVersion, int start, int end, ArrayList<String> paths) {
int a;
int b;
InputStream is = getUpgradeSQLStream(start, end);
if (is != null) {
String path = String.format(mUpgradePathFormat, start, end);
paths.add(path);
//Log.d(TAG, "found script: " + path);
a = start - 1;
b = start;
is = null;
} else {
a = start - 1;
b = end;
}
if (a < baseVersion) {
return;
} else {
getUpgradeFilePaths(baseVersion, a, b, paths); // recursive call
}
}
/**
* An exception that indicates there was an error with SQLite asset retrieval or parsing.
*/
@SuppressWarnings("serial")
public static class SQLiteAssetException extends SQLiteException {
public SQLiteAssetException() {}
public SQLiteAssetException(String error) {
super(error);
}
}
}
| |
package org.gearvrf.script;
import java.io.IOException;
import java.util.HashMap;
import org.gearvrf.GVRAndroidResource;
import org.gearvrf.GVRBehavior;
import org.gearvrf.GVRContext;
import org.gearvrf.GVRResourceVolume;
import org.gearvrf.GVRScene;
import org.gearvrf.GVRSceneObject;
import org.gearvrf.IErrorEvents;
import org.gearvrf.IPickEvents;
import org.gearvrf.ISceneEvents;
import org.gearvrf.utility.FileNameUtils;
import org.gearvrf.GVRPicker;
import org.gearvrf.IPickEvents;
import org.gearvrf.ISensorEvents;
import org.gearvrf.SensorEvent;
/**
* Attaches a Java or Lua script to a scene object.
*
* These script callbacks are invoked if they are present:
* onEarlyInit(GVRContext) called after script is loaded
* onAfterInit() called when the script becomes active
* (this component is attached to a scene object and enabled)
* onStep() called every frame if this component is enabled
* and attached to a scene object
* onPickEnter(GVRSceneObject, GVRPicker.GVRPickedObject)
* called when picking ray enters an object
* onPickExit(GVRSceneObject)
* called when picking ray exits an object
* onPickInside(GVRSceneObject, GVRPicker.GVRPickedObject)
* called when picking ray is inside an object
* onPick(GVRPicker) called when picking selection changes
* onNoPick(GVRPicker) called when nothing is picked
*
*/
public class GVRScriptBehavior extends GVRBehavior implements IPickEvents, ISensorEvents, ISceneEvents
{
static private long TYPE_SCRIPT_BEHAVIOR = newComponentType(GVRScriptBehavior.class);
static private Object[] noargs = new Object[0];
protected GVRScriptFile mScriptFile = null;
protected boolean mIsAttached = false;
protected int mPickEvents = 0xF;
protected String mLanguage = GVRScriptManager.LANG_JAVASCRIPT;
private String mLastError;
private GVRScene mScene = null;
private final int ON_ENTER = 1;
private final int ON_EXIT = 2;
private final int ON_PICK = 4;
private final int ON_NOPICK = 8;
private final int PICK_EVENTS = (ON_ENTER | ON_EXIT | ON_PICK | ON_NOPICK);
/**
* Constructor for a script behavior component.
* @param gvrContext The current GVRF context
*/
public GVRScriptBehavior(GVRContext gvrContext)
{
super(gvrContext);
mHasFrameCallback = false;
mType = TYPE_SCRIPT_BEHAVIOR;
mIsAttached = false;
mLanguage = GVRScriptManager.LANG_JAVASCRIPT;
gvrContext.getEventReceiver().addListener(this);
}
/**
* Constructor for a script behavior component.
* @param gvrContext The current GVRF context
* @param scriptFile Path to the script file.
* @throws IOException if script file cannot be read.
* @throws GVRScriptException if script processing error occurs.
*/
public GVRScriptBehavior(GVRContext gvrContext, String scriptFile) throws IOException, GVRScriptException
{
super(gvrContext);
mHasFrameCallback = false;
mType = TYPE_SCRIPT_BEHAVIOR;
mIsAttached = false;
mLanguage = GVRScriptManager.LANG_JAVASCRIPT;
gvrContext.getEventReceiver().addListener(this);
setFilePath(scriptFile);
}
public GVRScriptFile getScriptFile()
{
return mScriptFile;
}
public static long getComponentType() { return TYPE_SCRIPT_BEHAVIOR; }
/**
* Sets the path to the script file to load and loads the script.
*
* @param filePath path to script file
* @throws IOException if the script cannot be read.
* @throws GVRScriptException if a script processing error occurs.
*/
public void setFilePath(String filePath) throws IOException, GVRScriptException
{
GVRResourceVolume.VolumeType volumeType = GVRResourceVolume.VolumeType.ANDROID_ASSETS;
String fname = filePath.toLowerCase();
mLanguage = FileNameUtils.getExtension(fname);
if (fname.startsWith("sd:"))
{
volumeType = GVRResourceVolume.VolumeType.ANDROID_SDCARD;
}
else if (fname.startsWith("http:") || fname.startsWith("https:"))
{
volumeType = GVRResourceVolume.VolumeType.NETWORK;
}
GVRResourceVolume volume = new GVRResourceVolume(getGVRContext(), volumeType,
FileNameUtils.getParentDirectory(filePath));
GVRAndroidResource resource = volume.openResource(filePath);
setScriptFile(getGVRContext().getScriptManager().loadScript(resource, mLanguage));
}
/**
* Loads the script from a text string.
* @param scriptText text string containing script to execute.
* @param language language ("js" or "lua")
*/
public void setScriptText(String scriptText, String language)
{
GVRScriptFile newScript;
if (language.equals(GVRScriptManager.LANG_LUA))
{
newScript = new GVRLuaScriptFile(getGVRContext(), scriptText);
mLanguage = language;
}
else
{
newScript = new GVRJavascriptScriptFile(getGVRContext(), scriptText);
mLanguage = GVRScriptManager.LANG_JAVASCRIPT;
}
setScriptFile(newScript);
}
/**
* Set the GVRScriptFile to execute.
* @param scriptFile GVRScriptFile with script already loaded.
* If the script contains a function called "onEarlyInit"
* it is called if the script file is valid.
*/
public void setScriptFile(GVRScriptFile scriptFile)
{
if (mScriptFile != scriptFile)
{
detachScript();
mScriptFile = scriptFile;
}
}
/**
* Invokes the script associated with this component.
* This function invokes the script even if the
* component is not enabled and not attached to
* a scene object.
* @see GVRScriptFile#invoke() invoke
*/
public void invoke()
{
if (mScriptFile != null)
{
mScriptFile.invoke();
}
}
public void onInit(GVRContext context, GVRScene scene)
{
mScene = scene;
startPicking();
}
public void onAfterInit() { }
public void onStep() { }
public void onAttach(GVRSceneObject owner)
{
super.onAttach(owner);
attachScript(owner);
}
public void onEnable()
{
super.onEnable();
attachScript(null);
}
public void onDetach(GVRSceneObject owner)
{
detachScript();
super.onDetach(owner);
}
public void onDrawFrame(float frameTime)
{
invokeFunction("onStep", noargs);
}
public void onEnter(GVRSceneObject sceneObj, GVRPicker.GVRPickedObject hit)
{
if ((sceneObj == getOwnerObject()) && !invokeFunction("onPickEnter", new Object[] { sceneObj, hit }))
{
mPickEvents &= ~ON_ENTER;
if (mPickEvents == 0)
{
stopPicking();
}
}
}
public void onExit(GVRSceneObject sceneObj)
{
if ((sceneObj == getOwnerObject()) && !invokeFunction("onPickExit", new Object[] { sceneObj }))
{
mPickEvents &= ~ON_EXIT;
if (mPickEvents == 0)
{
stopPicking();
}
}
}
public void onPick(GVRPicker picker)
{
if (!invokeFunction("onPick", new Object[] { picker }))
{
mPickEvents &= ~ON_PICK;
if (mPickEvents == 0)
{
stopPicking();
}
}
}
public void onNoPick(GVRPicker picker)
{
if (!invokeFunction("onNoPick", new Object[] { picker }))
{
mPickEvents &= ~ON_NOPICK;
if (mPickEvents == 0)
{
stopPicking();
}
}
}
public void onSensorEvent(SensorEvent event)
{
invokeFunction("onSensorEvent", new Object[] { event });
}
public void onInside(GVRSceneObject sceneObj, GVRPicker.GVRPickedObject hit) { }
protected void attachScript(GVRSceneObject owner)
{
if (owner == null)
{
owner = getOwnerObject();
}
if (!mIsAttached && (mScriptFile != null) && isEnabled() && (owner != null) && owner.isEnabled())
{
getGVRContext().getScriptManager().attachScriptFile(owner, mScriptFile);
mIsAttached = true;
owner.getEventReceiver().addListener(this);
if (invokeFunction("onStep", noargs))
{
mHasFrameCallback = true;
startListening();
}
startPicking();
}
}
protected void startPicking()
{
GVRScene scene = mScene;
mPickEvents = PICK_EVENTS;
if (mScene == null)
{
scene = getGVRContext().getMainScene();
}
scene.getEventReceiver().addListener(this);
}
protected void stopPicking()
{
GVRScene scene = mScene;
if (mScene == null)
{
scene = getGVRContext().getMainScene();
}
scene.getEventReceiver().removeListener(this);
}
protected void detachScript()
{
GVRSceneObject owner = getOwnerObject();
if (mIsAttached && (owner != null))
{
getGVRContext().getScriptManager().detachScriptFile(owner);
owner.getEventReceiver().removeListener(this);
mIsAttached = false;
mHasFrameCallback = true;
stopPicking();
stopListening();
}
}
/**
* Calls a function script associated with this component.
* The function is called even if the component
* is not enabled and not attached to a scene object.
* @param funcName name of script function to call.
* @param args function parameters as an array of objects.
* @return true if function was called, false if no such function
* @see org.gearvrf.script.GVRScriptFile#invokeFunction(String, Object[]) invokeFunction
*/
public boolean invokeFunction(String funcName, Object[] args)
{
mLastError = null;
if (mScriptFile != null)
{
if (mScriptFile.invokeFunction(funcName, args))
{
return true;
}
}
mLastError = mScriptFile.getLastError();
if ((mLastError != null) && !mLastError.contains("is not defined"))
{
getGVRContext().logError(mLastError, this);
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.apache.calcite.util.Pair;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.SemanticDispatcher;
import org.apache.hadoop.hive.ql.lib.SemanticGraphWalker;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.SemanticRule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicListDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
/**
* Takes a Filter operator on top of a TableScan and removes dynamic pruning conditions
* if static partition pruning has been triggered already.
*
* This transformation is executed when CBO is on and hence we can guarantee that the filtering
* conditions on the partition columns will be immediately on top of the TableScan operator.
*
*/
public class RedundantDynamicPruningConditionsRemoval extends Transform {
private static final Logger LOG = LoggerFactory.getLogger(RedundantDynamicPruningConditionsRemoval.class);
/**
* Transform the query tree.
*
* @param pctx the current parse context
*/
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
// Make sure semijoin is not enabled. If it is, then do not remove the dynamic partition pruning predicates.
if (!pctx.getConf().getBoolVar(HiveConf.ConfVars.TEZ_DYNAMIC_SEMIJOIN_REDUCTION)) {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%" +
FilterOperator.getOperatorName() + "%"), new FilterTransformer());
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
}
return pctx;
}
private class FilterTransformer implements SemanticNodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs)
throws SemanticException {
FilterOperator filter = (FilterOperator) nd;
FilterDesc desc = filter.getConf();
TableScanOperator ts = (TableScanOperator) stack.get(stack.size() - 2);
// collect
CollectContext removalContext = new CollectContext();
collect(desc.getPredicate(), removalContext);
CollectContext tsRemovalContext = new CollectContext();
collect(ts.getConf().getFilterExpr(), tsRemovalContext);
for (Pair<ExprNodeDesc,ExprNodeDesc> pair : removalContext.dynamicListNodes) {
ExprNodeDesc child = pair.left;
ExprNodeDesc columnDesc = child.getChildren().get(0);
assert child.getChildren().get(1) instanceof ExprNodeDynamicListDesc;
ExprNodeDesc parent = pair.right;
String column = ExprNodeDescUtils.extractColName(columnDesc);
if (column != null) {
Table table = ts.getConf().getTableMetadata();
boolean generate = false;
if (table != null && table.isPartitionKey(column)) {
generate = true;
for (ExprNodeDesc filterColumnDesc : removalContext.comparatorNodes) {
if (columnDesc.isSame(filterColumnDesc)) {
generate = false;
break;
}
}
}
if (!generate) {
// We can safely remove the condition by replacing it with "true"
ExprNodeDesc constNode = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, Boolean.TRUE);
if (parent == null) {
desc.setPredicate(constNode);
} else {
int i = parent.getChildren().indexOf(child);
parent.getChildren().remove(i);
parent.getChildren().add(i, constNode);
}
// We remove it from the TS too if it was pushed
for (Pair<ExprNodeDesc,ExprNodeDesc> tsPair : tsRemovalContext.dynamicListNodes) {
ExprNodeDesc tsChild = tsPair.left;
ExprNodeDesc tsParent = tsPair.right;
if (tsChild.isSame(child)) {
if (tsParent == null) {
ts.getConf().setFilterExpr(null);
} else {
int i = tsParent.getChildren().indexOf(tsChild);
if (i != -1) {
tsParent.getChildren().remove(i);
tsParent.getChildren().add(i, constNode);
}
}
break;
}
}
if (LOG.isInfoEnabled()) {
LOG.info("Dynamic pruning condition removed: " + child);
}
}
}
}
return false;
}
}
private static void collect(ExprNodeDesc pred, CollectContext listContext) {
collect(null, pred, listContext);
}
private static void collect(ExprNodeDesc parent, ExprNodeDesc child, CollectContext listContext) {
if (child instanceof ExprNodeGenericFuncDesc &&
((ExprNodeGenericFuncDesc)child).getGenericUDF() instanceof GenericUDFIn) {
if (child.getChildren().get(1) instanceof ExprNodeDynamicListDesc) {
listContext.dynamicListNodes.add(new Pair<ExprNodeDesc,ExprNodeDesc>(child, parent));
}
return;
}
if (child instanceof ExprNodeGenericFuncDesc &&
((ExprNodeGenericFuncDesc)child).getGenericUDF() instanceof GenericUDFBaseCompare &&
child.getChildren().size() == 2) {
ExprNodeDesc leftCol = child.getChildren().get(0);
ExprNodeDesc rightCol = child.getChildren().get(1);
ExprNodeColumnDesc leftColDesc = ExprNodeDescUtils.getColumnExpr(leftCol);
if (leftColDesc != null) {
boolean rightConstant = false;
if (rightCol instanceof ExprNodeConstantDesc) {
rightConstant = true;
} else if (rightCol instanceof ExprNodeGenericFuncDesc) {
ExprNodeDesc foldedExpr = ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc)rightCol);
rightConstant = foldedExpr != null;
}
if (rightConstant) {
listContext.comparatorNodes.add(leftColDesc);
}
} else {
ExprNodeColumnDesc rightColDesc = ExprNodeDescUtils.getColumnExpr(rightCol);
if (rightColDesc != null) {
boolean leftConstant = false;
if (leftCol instanceof ExprNodeConstantDesc) {
leftConstant = true;
} else if (leftCol instanceof ExprNodeGenericFuncDesc) {
ExprNodeDesc foldedExpr = ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc)leftCol);
leftConstant = foldedExpr != null;
}
if (leftConstant) {
listContext.comparatorNodes.add(rightColDesc);
}
}
}
return;
}
if (FunctionRegistry.isOpAnd(child)) {
for (ExprNodeDesc newChild : child.getChildren()) {
collect(child, newChild, listContext);
}
}
}
private class CollectContext implements NodeProcessorCtx {
private final List<Pair<ExprNodeDesc,ExprNodeDesc>> dynamicListNodes;
private final List<ExprNodeDesc> comparatorNodes;
public CollectContext() {
this.dynamicListNodes = Lists.<Pair<ExprNodeDesc,ExprNodeDesc>>newArrayList();
this.comparatorNodes = Lists.<ExprNodeDesc>newArrayList();
}
}
}
| |
package mil.nga.giat.geowave.datastore.accumulo.minicluster;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Map;
import java.util.jar.Attributes;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl;
import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.impl.VFSClassLoader;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.util.VersionUtil;
import org.apache.log4j.Logger;
public class MiniAccumuloClusterFactory
{
private static final Logger LOGGER = Logger.getLogger(MiniAccumuloClusterFactory.class);
protected static final String HADOOP_WINDOWS_UTIL = "winutils.exe";
protected static boolean isYarn() {
return VersionUtil.compareVersions(
VersionInfo.getVersion(),
"2.2.0") >= 0;
}
public static MiniAccumuloClusterImpl newAccumuloCluster(
final MiniAccumuloConfigImpl config,
final Class context )
throws IOException {
final String jarPath = setupPathingJarClassPath(
config.getDir(),
context);
if (jarPath == null) {
// Jar was not successfully created
return null;
}
config.setClasspathItems(jarPath);
MiniAccumuloClusterImpl retVal = new GeoWaveMiniAccumuloClusterImpl(
config);
if (SystemUtils.IS_OS_WINDOWS && isYarn()) {
// this must happen after instantiating Mini
// Accumulo Cluster because it ensures the accumulo
// directory is empty or it will fail, but must
// happen before the cluster is started because yarn
// expects winutils.exe to exist within a bin
// directory in the mini accumulo cluster directory
// (mini accumulo cluster will always set this
// directory as hadoop_home)
LOGGER.info("Running YARN on windows requires a local installation of Hadoop");
LOGGER.info("'HADOOP_HOME' must be set and 'PATH' must contain %HADOOP_HOME%/bin");
final Map<String, String> env = System.getenv();
// HP Fortify "Path Manipulation" false positive
// What Fortify considers "user input" comes only
// from users with OS-level access anyway
String hadoopHome = System.getProperty("hadoop.home.dir");
if (hadoopHome == null) {
hadoopHome = env.get("HADOOP_HOME");
}
boolean success = false;
if (hadoopHome != null) {
final File hadoopDir = new File(
hadoopHome);
if (hadoopDir.exists()) {
final File binDir = new File(
config.getDir(),
"bin");
if (binDir.mkdir()) {
FileUtils.copyFile(
new File(
hadoopDir + File.separator + "bin",
HADOOP_WINDOWS_UTIL),
new File(
binDir,
HADOOP_WINDOWS_UTIL));
success = true;
}
}
}
if (!success) {
LOGGER
.error("'HADOOP_HOME' environment variable is not set or <HADOOP_HOME>/bin/winutils.exe does not exist");
// return mini accumulo cluster anyways
return retVal;
}
}
return retVal;
}
private static String setupPathingJarClassPath(
final File dir,
final Class context )
throws IOException {
final String classpath = getClasspath(context);
final File jarDir = new File(
dir.getParentFile().getAbsolutePath() + File.separator + "pathing");
if (!jarDir.exists()) {
try {
jarDir.mkdirs();
}
catch (final Exception e) {
LOGGER.error("Failed to create pathing jar directory: " + e);
return null;
}
}
final File jarFile = new File(
jarDir,
"pathing.jar");
if (jarFile.exists()) {
try {
jarFile.delete();
}
catch (final Exception e) {
LOGGER.error("Failed to delete old pathing jar: " + e);
return null;
}
}
// build jar
final Manifest manifest = new Manifest();
manifest.getMainAttributes().put(
Attributes.Name.MANIFEST_VERSION,
"1.0");
manifest.getMainAttributes().put(
Attributes.Name.CLASS_PATH,
classpath);
try (final JarOutputStream target = new JarOutputStream(
new FileOutputStream(
jarFile),
manifest)) {
target.close();
}
return jarFile.getAbsolutePath();
}
private static String getClasspath(
final Class context )
throws IOException {
try {
final ArrayList<ClassLoader> classloaders = new ArrayList<ClassLoader>();
ClassLoader cl = context.getClassLoader();
while (cl != null) {
classloaders.add(cl);
cl = cl.getParent();
}
Collections.reverse(classloaders);
final StringBuilder classpathBuilder = new StringBuilder();
// assume 0 is the system classloader and skip it
for (int i = 0; i < classloaders.size(); i++) {
final ClassLoader classLoader = classloaders.get(i);
if (classLoader instanceof URLClassLoader) {
for (final URL u : ((URLClassLoader) classLoader).getURLs()) {
append(
classpathBuilder,
u);
}
}
else if (classLoader instanceof VFSClassLoader) {
final VFSClassLoader vcl = (VFSClassLoader) classLoader;
for (final FileObject f : vcl.getFileObjects()) {
append(
classpathBuilder,
f.getURL());
}
}
else {
throw new IllegalArgumentException(
"Unknown classloader type : " + classLoader.getClass().getName());
}
}
classpathBuilder.deleteCharAt(0);
return classpathBuilder.toString();
}
catch (final URISyntaxException e) {
throw new IOException(
e);
}
}
private static boolean containsSiteFile(
final File f ) {
if (f.isDirectory()) {
final File[] sitefile = f.listFiles(new FileFilter() {
@Override
public boolean accept(
final File pathname ) {
return pathname.getName().endsWith(
"site.xml");
}
});
return (sitefile != null) && (sitefile.length > 0);
}
return false;
}
private static void append(
final StringBuilder classpathBuilder,
final URL url )
throws URISyntaxException {
final File file = new File(
url.toURI());
// do not include dirs containing hadoop or accumulo site files
if (!containsSiteFile(file)) {
classpathBuilder.append(
" ").append(
file.getAbsolutePath().replace(
"C:\\",
"file:/C:/").replace(
"\\",
"/"));
if (file.isDirectory()) {
classpathBuilder.append("/");
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
@ESIntegTestCase.SuiteScopeTestCase
public class RangeIT extends ESIntegTestCase {
private static final String SINGLE_VALUED_FIELD_NAME = "l_value";
private static final String MULTI_VALUED_FIELD_NAME = "l_values";
static int numDocs;
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(CustomScriptPlugin.class);
}
public static class CustomScriptPlugin extends AggregationTestScriptsPlugin {
@Override
@SuppressWarnings("unchecked")
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
Map<String, Function<Map<String, Object>, Object>> scripts = super.pluginScripts();
scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(SINGLE_VALUED_FIELD_NAME);
return value.getValue();
});
scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "'].values", vars -> {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(MULTI_VALUED_FIELD_NAME);
return value.getValues();
});
return scripts;
}
}
@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx");
numDocs = randomIntBetween(10, 20);
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
builders.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()
.startObject()
.field(SINGLE_VALUED_FIELD_NAME, i+1)
.startArray(MULTI_VALUED_FIELD_NAME).value(i+1).value(i+2).endArray()
.endObject()));
}
createIndex("idx_unmapped");
prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer").execute().actionGet();
for (int i = 0; i < 2; i++) {
builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder()
.startObject()
// shift sequence by 1, to ensure we have negative values, and value 3 on the edge of the tested ranges
.field(SINGLE_VALUED_FIELD_NAME, i * 2 - 1)
.endObject()));
}
indexRandom(true, builders);
ensureSearchable();
}
public void testRangeAsSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).size(100)
.collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation(
range("range").field(SINGLE_VALUED_FIELD_NAME)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6)))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getBuckets().size(), equalTo(numDocs + 1));
for (int i = 1; i < numDocs + 2; ++i) {
Terms.Bucket bucket = terms.getBucketByKey("" + i);
assertThat(bucket, notNullValue());
final long docCount = i == 1 || i == numDocs + 1 ? 1 : 2;
assertThat(bucket.getDocCount(), equalTo(docCount));
Range range = bucket.getAggregations().get("range");
List<? extends Bucket> buckets = range.getBuckets();
Range.Bucket rangeBucket = buckets.get(0);
assertThat(rangeBucket.getKey(), equalTo("*-3.0"));
assertThat(rangeBucket.getKeyAsString(), equalTo("*-3.0"));
assertThat(rangeBucket, notNullValue());
assertThat(rangeBucket.getFromAsString(), nullValue());
assertThat(rangeBucket.getToAsString(), equalTo("3.0"));
if (i == 1 || i == 3) {
assertThat(rangeBucket.getDocCount(), equalTo(1L));
} else if (i == 2) {
assertThat(rangeBucket.getDocCount(), equalTo(2L));
} else {
assertThat(rangeBucket.getDocCount(), equalTo(0L));
}
rangeBucket = buckets.get(1);
assertThat(rangeBucket.getKey(), equalTo("3.0-6.0"));
assertThat(rangeBucket.getKeyAsString(), equalTo("3.0-6.0"));
assertThat(rangeBucket, notNullValue());
assertThat(rangeBucket.getFromAsString(), equalTo("3.0"));
assertThat(rangeBucket.getToAsString(), equalTo("6.0"));
if (i == 3 || i == 6) {
assertThat(rangeBucket.getDocCount(), equalTo(1L));
} else if (i == 4 || i == 5) {
assertThat(rangeBucket.getDocCount(), equalTo(2L));
} else {
assertThat(rangeBucket.getDocCount(), equalTo(0L));
}
rangeBucket = buckets.get(2);
assertThat(rangeBucket.getKey(), equalTo("6.0-*"));
assertThat(rangeBucket.getKeyAsString(), equalTo("6.0-*"));
assertThat(rangeBucket, notNullValue());
assertThat(rangeBucket.getFromAsString(), equalTo("6.0"));
assertThat(rangeBucket.getToAsString(), nullValue());
if (i == 6 || i == numDocs + 1) {
assertThat(rangeBucket.getDocCount(), equalTo(1L));
} else if (i < 6) {
assertThat(rangeBucket.getDocCount(), equalTo(0L));
} else {
assertThat(rangeBucket.getDocCount(), equalTo(2L));
}
}
}
public void testSingleValueField() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(buckets.size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
}
public void testSingleValueFieldWithFormat() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6).format("#"))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3-6"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3"));
assertThat(bucket.getToAsString(), equalTo("6"));
assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
}
public void testSingleValueFieldWithCustomKey() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.addUnboundedTo("r1", 3)
.addRange("r2", 3, 6)
.addUnboundedFrom("r3", 6))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r1"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r2"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r3"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
}
public void testSingleValuedFieldWithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Object[] propertiesKeys = (Object[]) range.getProperty("_key");
Object[] propertiesDocCounts = (Object[]) range.getProperty("_count");
Object[] propertiesCounts = (Object[]) range.getProperty("sum.value");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(3.0)); // 1 + 2
assertThat(propertiesKeys[0], equalTo("*-3.0"));
assertThat(propertiesDocCounts[0], equalTo(2L));
assertThat(propertiesCounts[0], equalTo(3.0));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(12.0)); // 3 + 4 + 5
assertThat(propertiesKeys[1], equalTo("3.0-6.0"));
assertThat(propertiesDocCounts[1], equalTo(3L));
assertThat(propertiesCounts[1], equalTo(12.0));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
long total = 0;
for (int i = 5; i < numDocs; ++i) {
total += i + 1;
}
assertThat(sum.getValue(), equalTo((double) total));
assertThat(propertiesKeys[2], equalTo("6.0-*"));
assertThat(propertiesDocCounts[2], equalTo(numDocs - 5L));
assertThat(propertiesCounts[2], equalTo((double) total));
}
public void testSingleValuedFieldWithValueScript() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.get();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(1L)); // 2
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3L)); // 3, 4, 5
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
}
/*
[1, 2]
[2, 3]
[3, 4]
[4, 5]
[5, 6]
[6, 7]
[7, 8j
[8, 9]
[9, 10]
[10, 11]
*/
public void testMultiValuedField() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(range("range")
.field(MULTI_VALUED_FIELD_NAME)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
}
/*
[2, 3]
[3, 4]
[4, 5]
[5, 6]
[6, 7]
[7, 8j
[8, 9]
[9, 10]
[10, 11]
[11, 12]
*/
public void testMultiValuedFieldWithValueScript() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
range("range")
.field(MULTI_VALUED_FIELD_NAME)
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.get();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(1L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 3L));
}
/*
[2, 3]
[3, 4]
[4, 5]
[5, 6]
[6, 7]
[7, 8j
[8, 9]
[9, 10]
[10, 11]
[11, 12]
r1: 2
r2: 3, 3, 4, 4, 5, 5
r3: 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12
*/
public void testScriptSingleValue() throws Exception {
Script script =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap());
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
range("range")
.script(script)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.get();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
}
public void testEmptyRange() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(range("range")
.field(MULTI_VALUED_FIELD_NAME)
.addUnboundedTo(-1)
.addUnboundedFrom(1000))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(2));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*--1.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(-1.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("-1.0"));
assertThat(bucket.getDocCount(), equalTo(0L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("1000.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000d));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
}
public void testScriptMultiValued() throws Exception {
Script script =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "'].values", Collections.emptyMap());
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
range("range")
.script(script)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.get();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
}
/*
[1, 2]
[2, 3]
[3, 4]
[4, 5]
[5, 6]
[6, 7]
[7, 8j
[8, 9]
[9, 10]
[10, 11]
r1: 1, 2, 2
r2: 3, 3, 4, 4, 5, 5
r3: 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11
*/
public void testUnmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx_unmapped")
.addAggregation(range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(0L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(0L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
}
public void testPartiallyUnmapped() throws Exception {
client().admin().cluster().prepareHealth("idx_unmapped").setWaitForYellowStatus().execute().actionGet();
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
.addAggregation(range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
}
public void testOverlappingRanges() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(range("range")
.field(MULTI_VALUED_FIELD_NAME)
.addUnboundedTo(5)
.addRange(3, 6)
.addRange(4, 5)
.addUnboundedFrom(4))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(4));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-5.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("5.0"));
assertThat(bucket.getDocCount(), equalTo(4L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("4.0-5.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0));
assertThat(bucket.getFromAsString(), equalTo("4.0"));
assertThat(bucket.getToAsString(), equalTo("5.0"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("4.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("4.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L));
}
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
.addAggregation(
histogram("histo")
.field(SINGLE_VALUED_FIELD_NAME)
.interval(1L)
.minDocCount(0)
.subAggregation(
range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.addRange("0-2", 0.0, 2.0)))
.get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Range range = bucket.getAggregations().get("range");
// TODO: use diamond once JI-9019884 is fixed
List<Range.Bucket> buckets = new ArrayList<>(range.getBuckets());
assertThat(range, Matchers.notNullValue());
assertThat(range.getName(), equalTo("range"));
assertThat(buckets.size(), is(1));
assertThat(buckets.get(0).getKey(), equalTo("0-2"));
assertThat(((Number) buckets.get(0).getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) buckets.get(0).getTo()).doubleValue(), equalTo(2.0));
assertThat(buckets.get(0).getFromAsString(), equalTo("0.0"));
assertThat(buckets.get(0).getToAsString(), equalTo("2.0"));
assertThat(buckets.get(0).getDocCount(), equalTo(0L));
}
/**
* Make sure that a request using a script does not get cached and a request
* not using a script does get cached.
*/
public void testDontCacheScripts() throws Exception {
assertAcked(prepareCreate("cache_test_idx").addMapping("type", "i", "type=integer")
.setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1))
.get());
indexRandom(true,
client().prepareIndex("cache_test_idx", "type", "1").setSource(jsonBuilder().startObject().field("i", 1).endObject()),
client().prepareIndex("cache_test_idx", "type", "2").setSource(jsonBuilder().startObject().field("i", 2).endObject()));
// Make sure we are starting with a clear cache
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getHitCount(), equalTo(0L));
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(0L));
// Test that a request using a script does not get cached
Map<String, Object> params = new HashMap<>();
params.put("fieldname", "date");
SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(
range("foo").field("i").script(
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())).addRange(0, 10))
.get();
assertSearchResponse(r);
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getHitCount(), equalTo(0L));
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(0L));
// To make sure that the cache is working test that a request not using
// a script is cached
r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(range("foo").field("i").addRange(0, 10)).get();
assertSearchResponse(r);
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getHitCount(), equalTo(0L));
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.xml.security.test.stax.signature;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.security.Key;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.spec.DSAPublicKeySpec;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.KeySpec;
import java.security.spec.RSAPublicKeySpec;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.xml.security.exceptions.XMLSecurityException;
import org.apache.xml.security.stax.config.Init;
import org.apache.xml.security.stax.ext.InboundXMLSec;
import org.apache.xml.security.stax.ext.XMLSec;
import org.apache.xml.security.stax.ext.XMLSecurityProperties;
import org.apache.xml.security.stax.impl.securityToken.KeyNameSecurityToken;
import org.apache.xml.security.stax.impl.securityToken.X509IssuerSerialSecurityToken;
import org.apache.xml.security.stax.impl.securityToken.X509SecurityToken;
import org.apache.xml.security.stax.impl.securityToken.X509SubjectNameSecurityToken;
import org.apache.xml.security.stax.securityEvent.DefaultTokenSecurityEvent;
import org.apache.xml.security.stax.securityEvent.KeyNameTokenSecurityEvent;
import org.apache.xml.security.stax.securityEvent.SecurityEventConstants;
import org.apache.xml.security.stax.securityEvent.X509TokenSecurityEvent;
import org.apache.xml.security.stax.securityToken.SecurityTokenConstants;
import org.apache.xml.security.test.stax.utils.StAX2DOM;
import org.apache.xml.security.test.stax.utils.XMLSecEventAllocator;
import org.apache.xml.security.utils.XMLUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.w3c.dom.Document;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/**
* This test is to ensure interoperability with the examples provided by the IAIK
* XML Signature implementation. Thanks to Gregor Karlinger who provided these
* test vectors. They are located in the directory <CODE>data/at/iaik/ixsil/</CODE>.
*/
public class IAIKTest {
// Define the Keys
private static final String DSA_Y =
"33765117117947274661410382382650381161343617353664210170104406353610701044610078240124960165589268013959628883158481521066490826259260800878347905093229352801096566573507150438307560047568318507187154066482564350264253492725510108330786058643267447097509233135065057837400865193836500518383366090134577741053";
private static final String DSA_P =
"91634265413269728335373456840902298947347457680573446480385884712203252882476860316549099567586720335306748578940814977769093940974266715233397005957274714637390846659304524279579796384844387472915589310715455237001400834751102257352922064898227481939437670342534515495271294497038496656824770631295812638999";
private static final String DSA_Q =
"1429042367422631366787309673414805238266287675163";
private static final String DSA_G =
"55996752437939033808848513898546387171938363874894496914563143236312388388575433783546897866725079197988900114055877651265845210275099560192808554894952746896447422004598952101382809226581856515647962078133491799837520059128557664983865646964858235956075258101815411978037059289614937207339691043148996572947";
private static final String RSA_MOD =
"123741519167989388559377626745542702486926628431631931688706922056140679850039257167520167484412112276535334078519003803614712993739893643126140460918237455879023461779027296599477635539211426788386258873478147007239191180574000289143927884425647619290073015083375160571949522764083669597074190296532088216887";
private static final String RSA_PUB =
"3";
private XMLInputFactory xmlInputFactory;
private TransformerFactory transformerFactory = TransformerFactory.newInstance();
@BeforeEach
public void setUp() throws Exception {
Init.init(IAIKTest.class.getClassLoader().getResource("security-config.xml").toURI(),
this.getClass());
org.apache.xml.security.Init.init();
xmlInputFactory = XMLInputFactory.newInstance();
xmlInputFactory.setEventAllocator(new XMLSecEventAllocator());
}
@Test
public void test_signatureAlgorithms_signatures_hMACSignature() throws Exception {
// Read in plaintext document
InputStream sourceDocument =
this.getClass().getClassLoader().getResourceAsStream(
"at/iaik/ixsil/signatureAlgorithms/signatures/hMACSignature.xml");
Document document = XMLUtils.read(sourceDocument, false);
// Set up the Key
byte[] hmacKey = "secret".getBytes(StandardCharsets.US_ASCII);
SecretKey key = new SecretKeySpec(hmacKey, "http://www.w3.org/2000/09/xmldsig#hmac-sha1");
// XMLUtils.outputDOM(document, System.out);
// Convert Document to a Stream Reader
javax.xml.transform.Transformer transformer = transformerFactory.newTransformer();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
transformer.transform(new DOMSource(document), new StreamResult(baos));
XMLStreamReader xmlStreamReader = null;
try (InputStream is = new ByteArrayInputStream(baos.toByteArray())) {
xmlStreamReader = xmlInputFactory.createXMLStreamReader(is);
}
// Verify signature
XMLSecurityProperties properties = new XMLSecurityProperties();
properties.setSignatureVerificationKey(key);
InboundXMLSec inboundXMLSec = XMLSec.getInboundWSSec(properties);
TestSecurityEventListener securityEventListener = new TestSecurityEventListener();
XMLStreamReader securityStreamReader =
inboundXMLSec.processInMessage(xmlStreamReader, null, securityEventListener);
StAX2DOM.readDoc(securityStreamReader);
// Check the SecurityEvents
checkSignatureToken(securityEventListener, key, SecurityTokenConstants.KeyIdentifier_NoKeyInfo);
}
@Test
public void test_signatureAlgorithms_signatures_hMACShortSignature() throws Exception {
// Read in plaintext document
InputStream sourceDocument =
this.getClass().getClassLoader().getResourceAsStream(
"at/iaik/ixsil/signatureAlgorithms/signatures/hMACShortSignature.xml");
Document document = XMLUtils.read(sourceDocument, false);
// Set up the Key
byte[] hmacKey = "secret".getBytes(StandardCharsets.US_ASCII);
SecretKey key = new SecretKeySpec(hmacKey, "http://www.w3.org/2000/09/xmldsig#hmac-sha1");
// XMLUtils.outputDOM(document, System.out);
// Convert Document to a Stream Reader
javax.xml.transform.Transformer transformer = transformerFactory.newTransformer();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
transformer.transform(new DOMSource(document), new StreamResult(baos));
final XMLStreamReader xmlStreamReader =
xmlInputFactory.createXMLStreamReader(new ByteArrayInputStream(baos.toByteArray()));
// Verify signature
XMLSecurityProperties properties = new XMLSecurityProperties();
properties.setSignatureVerificationKey(key);
InboundXMLSec inboundXMLSec = XMLSec.getInboundWSSec(properties);
XMLStreamReader securityStreamReader = inboundXMLSec.processInMessage(xmlStreamReader);
try {
StAX2DOM.readDoc(securityStreamReader);
fail("Failure expected on a short HMAC length");
} catch (XMLStreamException ex) {
assertTrue(ex.getCause() instanceof XMLSecurityException);
assertEquals("INVALID signature -- core validation failed.", ex.getCause().getMessage());
}
}
@Test
public void test_signatureAlgorithms_signatures_dSASignature() throws Exception {
// Read in plaintext document
InputStream sourceDocument =
this.getClass().getClassLoader().getResourceAsStream(
"at/iaik/ixsil/signatureAlgorithms/signatures/dSASignature.xml");
Document document = XMLUtils.read(sourceDocument, false);
// XMLUtils.outputDOM(document, System.out);
// Convert Document to a Stream Reader
javax.xml.transform.Transformer transformer = transformerFactory.newTransformer();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
transformer.transform(new DOMSource(document), new StreamResult(baos));
XMLStreamReader xmlStreamReader = null;
try (InputStream is = new ByteArrayInputStream(baos.toByteArray())) {
xmlStreamReader = xmlInputFactory.createXMLStreamReader(is);
}
// Verify signature
XMLSecurityProperties properties = new XMLSecurityProperties();
InboundXMLSec inboundXMLSec = XMLSec.getInboundWSSec(properties);
TestSecurityEventListener securityEventListener = new TestSecurityEventListener();
XMLStreamReader securityStreamReader =
inboundXMLSec.processInMessage(xmlStreamReader, null, securityEventListener);
StAX2DOM.readDoc(securityStreamReader);
// Check the SecurityEvents
checkSignatureToken(securityEventListener, getPublicKey("DSA"),
SecurityTokenConstants.KeyIdentifier_KeyValue);
}
@Test
public void test_signatureAlgorithms_signatures_rSASignature() throws Exception {
// Read in plaintext document
InputStream sourceDocument =
this.getClass().getClassLoader().getResourceAsStream(
"at/iaik/ixsil/signatureAlgorithms/signatures/rSASignature.xml");
Document document = XMLUtils.read(sourceDocument, false);
// XMLUtils.outputDOM(document, System.out);
// Convert Document to a Stream Reader
javax.xml.transform.Transformer transformer = transformerFactory.newTransformer();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
transformer.transform(new DOMSource(document), new StreamResult(baos));
XMLStreamReader xmlStreamReader = null;
try (InputStream is = new ByteArrayInputStream(baos.toByteArray())) {
xmlStreamReader = xmlInputFactory.createXMLStreamReader(is);
}
// Verify signature
XMLSecurityProperties properties = new XMLSecurityProperties();
InboundXMLSec inboundXMLSec = XMLSec.getInboundWSSec(properties);
TestSecurityEventListener securityEventListener = new TestSecurityEventListener();
XMLStreamReader securityStreamReader =
inboundXMLSec.processInMessage(xmlStreamReader, null, securityEventListener);
StAX2DOM.readDoc(securityStreamReader);
// Check the SecurityEvents
checkSignatureToken(securityEventListener, getPublicKey("RSA"),
SecurityTokenConstants.KeyIdentifier_KeyValue);
}
@Test
public void test_transforms_signatures_envelopedSignatureSignature() throws Exception {
// Read in plaintext document
InputStream sourceDocument =
this.getClass().getClassLoader().getResourceAsStream(
"at/iaik/ixsil/transforms/signatures/envelopedSignatureSignature.xml");
Document document = XMLUtils.read(sourceDocument, false);
// Set up the Key
Key publicKey = getPublicKey("RSA");
// XMLUtils.outputDOM(document, System.out);
// Convert Document to a Stream Reader
javax.xml.transform.Transformer transformer = transformerFactory.newTransformer();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
transformer.transform(new DOMSource(document), new StreamResult(baos));
XMLStreamReader xmlStreamReader = null;
try (InputStream is = new ByteArrayInputStream(baos.toByteArray())) {
xmlStreamReader = xmlInputFactory.createXMLStreamReader(is);
}
// Verify signature
XMLSecurityProperties properties = new XMLSecurityProperties();
properties.setSignatureVerificationKey(publicKey);
InboundXMLSec inboundXMLSec = XMLSec.getInboundWSSec(properties);
TestSecurityEventListener securityEventListener = new TestSecurityEventListener();
XMLStreamReader securityStreamReader =
inboundXMLSec.processInMessage(xmlStreamReader, null, securityEventListener);
StAX2DOM.readDoc(securityStreamReader);
// Check the SecurityEvents
checkSignatureToken(securityEventListener, getPublicKey("RSA"),
SecurityTokenConstants.KeyIdentifier_KeyValue);
}
private static PublicKey getPublicKey(String algo)
throws InvalidKeySpecException, NoSuchAlgorithmException {
KeyFactory kf = KeyFactory.getInstance(algo);
KeySpec kspec = null;
if ("DSA".equalsIgnoreCase(algo)) {
kspec = new DSAPublicKeySpec(new BigInteger(DSA_Y),
new BigInteger(DSA_P),
new BigInteger(DSA_Q),
new BigInteger(DSA_G));
} else if ("RSA".equalsIgnoreCase(algo)) {
kspec = new RSAPublicKeySpec(new BigInteger(RSA_MOD),
new BigInteger(RSA_PUB));
} else {
throw new RuntimeException("Unsupported key algorithm " + algo);
}
return kf.generatePublic(kspec);
}
private void checkSignatureToken(
TestSecurityEventListener securityEventListener,
Key key,
SecurityTokenConstants.KeyIdentifier keyIdentifier
) throws XMLSecurityException {
if (SecurityTokenConstants.KeyIdentifier_KeyValue.equals(keyIdentifier)) { //NOPMD
} else if (SecurityTokenConstants.KeyIdentifier_NoKeyInfo.equals(keyIdentifier)) {
DefaultTokenSecurityEvent tokenEvent =
(DefaultTokenSecurityEvent)securityEventListener.getSecurityEvent(SecurityEventConstants.DefaultToken);
assertNotNull(tokenEvent);
Key processedKey = tokenEvent.getSecurityToken().getSecretKey().values().iterator().next();
assertEquals(processedKey, key);
} else if (SecurityTokenConstants.KeyIdentifier_KeyName.equals(keyIdentifier)) {
KeyNameTokenSecurityEvent tokenEvent =
(KeyNameTokenSecurityEvent)securityEventListener.getSecurityEvent(SecurityEventConstants.KeyNameToken);
assertNotNull(tokenEvent);
Key processedKey = tokenEvent.getSecurityToken().getSecretKey().values().iterator().next();
assertEquals(processedKey, key);
assertNotNull(((KeyNameSecurityToken)tokenEvent.getSecurityToken()).getKeyName());
} else {
X509TokenSecurityEvent tokenEvent =
(X509TokenSecurityEvent)securityEventListener.getSecurityEvent(SecurityEventConstants.X509Token);
assertNotNull(tokenEvent);
X509SecurityToken x509SecurityToken =
(X509SecurityToken)tokenEvent.getSecurityToken();
assertNotNull(x509SecurityToken);
if (SecurityTokenConstants.KeyIdentifier_X509SubjectName.equals(keyIdentifier)) {
Key processedKey = x509SecurityToken.getPublicKey();
assertEquals(processedKey, key);
assertNotNull(((X509SubjectNameSecurityToken)x509SecurityToken).getSubjectName());
} else if (SecurityTokenConstants.KeyIdentifier_IssuerSerial.equals(keyIdentifier)) {
Key processedKey = x509SecurityToken.getPublicKey();
assertEquals(processedKey, key);
assertNotNull(((X509IssuerSerialSecurityToken)x509SecurityToken).getIssuerName());
assertNotNull(((X509IssuerSerialSecurityToken)x509SecurityToken).getSerialNumber());
}
}
}
}
| |
package io.sentry.transport;
import static io.sentry.SentryLevel.DEBUG;
import static io.sentry.SentryLevel.ERROR;
import static java.net.HttpURLConnection.HTTP_OK;
import io.sentry.RequestDetails;
import io.sentry.SentryEnvelope;
import io.sentry.SentryOptions;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.nio.charset.Charset;
import java.util.Map;
import java.util.zip.GZIPOutputStream;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSocketFactory;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
final class HttpConnection {
@SuppressWarnings("CharsetObjectCanBeUsed")
private static final Charset UTF_8 = Charset.forName("UTF-8");
private final @Nullable Proxy proxy;
private final @NotNull RequestDetails requestDetails;
private final @NotNull SentryOptions options;
private final @NotNull RateLimiter rateLimiter;
/**
* Constructs a new HTTP transport instance. Notably, the provided {@code requestUpdater} must set
* the appropriate content encoding header for the {@link io.sentry.ISerializer} instance obtained
* from the options.
*
* @param options sentry options to read the config from
* @param requestDetails request details
* @param rateLimiter rate limiter
*/
public HttpConnection(
final @NotNull SentryOptions options,
final @NotNull RequestDetails requestDetails,
final @NotNull RateLimiter rateLimiter) {
this(options, requestDetails, AuthenticatorWrapper.getInstance(), rateLimiter);
}
HttpConnection(
final @NotNull SentryOptions options,
final @NotNull RequestDetails requestDetails,
final @NotNull AuthenticatorWrapper authenticatorWrapper,
final @NotNull RateLimiter rateLimiter) {
this.requestDetails = requestDetails;
this.options = options;
this.rateLimiter = rateLimiter;
this.proxy = resolveProxy(options.getProxy());
if (proxy != null && options.getProxy() != null) {
final String proxyUser = options.getProxy().getUser();
final String proxyPassword = options.getProxy().getPass();
if (proxyUser != null && proxyPassword != null) {
authenticatorWrapper.setDefault(new ProxyAuthenticator(proxyUser, proxyPassword));
}
}
}
private @Nullable Proxy resolveProxy(final @Nullable SentryOptions.Proxy optionsProxy) {
Proxy proxy = null;
if (optionsProxy != null) {
final String port = optionsProxy.getPort();
final String host = optionsProxy.getHost();
if (port != null && host != null) {
try {
InetSocketAddress proxyAddr = new InetSocketAddress(host, Integer.parseInt(port));
proxy = new Proxy(Proxy.Type.HTTP, proxyAddr);
} catch (NumberFormatException e) {
options
.getLogger()
.log(
ERROR,
e,
"Failed to parse Sentry Proxy port: "
+ optionsProxy.getPort()
+ ". Proxy is ignored");
}
}
}
return proxy;
}
@NotNull
HttpURLConnection open() throws IOException {
return (HttpURLConnection)
(proxy == null
? requestDetails.getUrl().openConnection()
: requestDetails.getUrl().openConnection(proxy));
}
/**
* Create a HttpURLConnection connection Sets specific content-type if its an envelope or not
*
* @return the HttpURLConnection
* @throws IOException if connection has a problem
*/
private @NotNull HttpURLConnection createConnection() throws IOException {
HttpURLConnection connection = open();
for (Map.Entry<String, String> header : requestDetails.getHeaders().entrySet()) {
connection.setRequestProperty(header.getKey(), header.getValue());
}
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.setRequestProperty("Content-Encoding", "gzip");
connection.setRequestProperty("Content-Type", "application/x-sentry-envelope");
connection.setRequestProperty("Accept", "application/json");
// https://stackoverflow.com/questions/52726909/java-io-ioexception-unexpected-end-of-stream-on-connection/53089882
connection.setRequestProperty("Connection", "close");
connection.setConnectTimeout(options.getConnectionTimeoutMillis());
connection.setReadTimeout(options.getReadTimeoutMillis());
final HostnameVerifier hostnameVerifier = options.getHostnameVerifier();
if (connection instanceof HttpsURLConnection && hostnameVerifier != null) {
((HttpsURLConnection) connection).setHostnameVerifier(hostnameVerifier);
}
final SSLSocketFactory sslSocketFactory = options.getSslSocketFactory();
if (connection instanceof HttpsURLConnection && sslSocketFactory != null) {
((HttpsURLConnection) connection).setSSLSocketFactory(sslSocketFactory);
}
connection.connect();
return connection;
}
public @NotNull TransportResult send(final @NotNull SentryEnvelope envelope) throws IOException {
final HttpURLConnection connection = createConnection();
TransportResult result;
try (final OutputStream outputStream = connection.getOutputStream();
final GZIPOutputStream gzip = new GZIPOutputStream(outputStream)) {
options.getSerializer().serialize(envelope, gzip);
} catch (Exception e) {
options
.getLogger()
.log(
ERROR,
e,
"An exception occurred while submitting the envelope to the Sentry server.");
} finally {
result = readAndLog(connection);
}
return result;
}
/**
* Read responde code, retry after header and its error stream if there are errors and log it
*
* @param connection the HttpURLConnection
* @return TransportResult.success if responseCode is 200 or TransportResult.error otherwise
*/
private @NotNull TransportResult readAndLog(final @NotNull HttpURLConnection connection) {
try {
final int responseCode = connection.getResponseCode();
updateRetryAfterLimits(connection, responseCode);
if (!isSuccessfulResponseCode(responseCode)) {
options.getLogger().log(ERROR, "Request failed, API returned %s", responseCode);
// double check because call is expensive
if (options.isDebug()) {
String errorMessage = getErrorMessageFromStream(connection);
options.getLogger().log(ERROR, errorMessage);
}
return TransportResult.error(responseCode);
}
options.getLogger().log(DEBUG, "Envelope sent successfully.");
return TransportResult.success();
} catch (IOException e) {
options.getLogger().log(ERROR, e, "Error reading and logging the response stream");
} finally {
closeAndDisconnect(connection);
}
return TransportResult.error();
}
/**
* Read retry after headers and update the rate limit Dictionary
*
* @param connection the HttpURLConnection
* @param responseCode the responseCode
*/
public void updateRetryAfterLimits(
final @NotNull HttpURLConnection connection, final int responseCode) {
// seconds
final String retryAfterHeader = connection.getHeaderField("Retry-After");
// X-Sentry-Rate-Limits looks like: seconds:categories:scope
// it could have more than one scope so it looks like:
// quota_limit, quota_limit, quota_limit
// a real example: 50:transaction:key, 2700:default;error;security:organization
// 50::key is also a valid case, it means no categories and it should apply to all of them
final String sentryRateLimitHeader = connection.getHeaderField("X-Sentry-Rate-Limits");
rateLimiter.updateRetryAfterLimits(sentryRateLimitHeader, retryAfterHeader, responseCode);
}
/**
* Closes the Response stream and disconnect the connection
*
* @param connection the HttpURLConnection
*/
private void closeAndDisconnect(final @NotNull HttpURLConnection connection) {
try {
connection.getInputStream().close();
} catch (IOException ignored) {
// connection is already closed
} finally {
connection.disconnect();
}
}
/**
* Reads the error message from the error stream
*
* @param connection the HttpURLConnection
* @return the error message or null if none
*/
private @NotNull String getErrorMessageFromStream(final @NotNull HttpURLConnection connection) {
try (final InputStream errorStream = connection.getErrorStream();
final BufferedReader reader =
new BufferedReader(new InputStreamReader(errorStream, UTF_8))) {
final StringBuilder sb = new StringBuilder();
String line;
// ensure we do not add "\n" to the last line
boolean first = true;
while ((line = reader.readLine()) != null) {
if (!first) {
sb.append("\n");
}
sb.append(line);
first = false;
}
return sb.toString();
} catch (IOException e) {
return "Failed to obtain error message while analyzing send failure.";
}
}
/**
* Returns if response code is OK=200
*
* @param responseCode the response code
* @return true if it is OK=200 or false otherwise
*/
private boolean isSuccessfulResponseCode(final int responseCode) {
return responseCode == HTTP_OK;
}
@TestOnly
@Nullable
Proxy getProxy() {
return proxy;
}
}
| |
/*
* $RCSfile: Tuple4d.java,v $
*
* Copyright 1997-2008 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*
* $Revision: 1.8 $
* $Date: 2008/02/28 20:18:51 $
* $State: Exp $
*/
package jo.vecmath;
/**
* A 4 element tuple represented by double precision floating point x,y,z,w
* coordinates.
*
*/
public abstract class Tuple4d implements java.io.Serializable, Cloneable {
static final long serialVersionUID = -4748953690425311052L;
/**
* The x coordinate.
*/
public double x;
/**
* The y coordinate.
*/
public double y;
/**
* The z coordinate.
*/
public double z;
/**
* The w coordinate.
*/
public double w;
/**
* Constructs and initializes a Tuple4d from the specified xyzw coordinates.
*
* @param x the x coordinate
* @param y the y coordinate
* @param z the z coordinate
* @param w the w coordinate
*/
public Tuple4d(double x, double y, double z, double w) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
/**
* Constructs and initializes a Tuple4d from the coordinates contained in
* the array.
*
* @param t the array of length 4 containing xyzw in order
*/
public Tuple4d(double[] t) {
this.x = t[0];
this.y = t[1];
this.z = t[2];
this.w = t[3];
}
/**
* Constructs and initializes a Tuple4d from the specified Tuple4d.
*
* @param t1 the Tuple4d containing the initialization x y z w data
*/
public Tuple4d(Tuple4d t1) {
this.x = t1.x;
this.y = t1.y;
this.z = t1.z;
this.w = t1.w;
}
/**
* Constructs and initializes a Tuple4d from the specified Tuple4f.
*
* @param t1 the Tuple4f containing the initialization x y z w data
*/
public Tuple4d(Tuple4f t1) {
this.x = t1.x;
this.y = t1.y;
this.z = t1.z;
this.w = t1.w;
}
/**
* Constructs and initializes a Tuple4d to (0,0,0,0).
*/
public Tuple4d() {
this.x = 0.0;
this.y = 0.0;
this.z = 0.0;
this.w = 0.0;
}
/**
* Sets the value of this tuple to the specified xyzw coordinates.
*
* @param x the x coordinate
* @param y the y coordinate
* @param z the z coordinate
* @param w the w coordinate
*/
public final void set(double x, double y, double z, double w) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
/**
* Sets the value of this tuple to the specified xyzw coordinates.
*
* @param t the array of length 4 containing xyzw in order
*/
public final void set(double[] t) {
this.x = t[0];
this.y = t[1];
this.z = t[2];
this.w = t[3];
}
/**
* Sets the value of this tuple to the value of tuple t1.
*
* @param t1 the tuple to be copied
*/
public final void set(Tuple4d t1) {
this.x = t1.x;
this.y = t1.y;
this.z = t1.z;
this.w = t1.w;
}
/**
* Sets the value of this tuple to the value of tuple t1.
*
* @param t1 the tuple to be copied
*/
public final void set(Tuple4f t1) {
this.x = t1.x;
this.y = t1.y;
this.z = t1.z;
this.w = t1.w;
}
/**
* Gets the value of this tuple and places it into the array t of length
* four in x,y,z,w order.
*
* @param t the array of length four
*/
public final void get(double[] t) {
t[0] = this.x;
t[1] = this.y;
t[2] = this.z;
t[3] = this.w;
}
/**
* Gets the value of this tuple and places it into the Tuple4d argument of
* length four in x,y,z,w order.
*
* @param t the Tuple into which the values will be copied
*/
public final void get(Tuple4d t) {
t.x = this.x;
t.y = this.y;
t.z = this.z;
t.w = this.w;
}
/**
* Sets the value of this tuple to the tuple sum of tuples t1 and t2.
*
* @param t1 the first tuple
* @param t2 the second tuple
*/
public final void add(Tuple4d t1, Tuple4d t2) {
this.x = t1.x + t2.x;
this.y = t1.y + t2.y;
this.z = t1.z + t2.z;
this.w = t1.w + t2.w;
}
/**
* Sets the value of this tuple to the sum of itself and tuple t1.
*
* @param t1 the other tuple
*/
public final void add(Tuple4d t1) {
this.x += t1.x;
this.y += t1.y;
this.z += t1.z;
this.w += t1.w;
}
/**
* Sets the value of this tuple to the difference of tuples t1 and t2 (this
* = t1 - t2).
*
* @param t1 the first tuple
* @param t2 the second tuple
*/
public final void sub(Tuple4d t1, Tuple4d t2) {
this.x = t1.x - t2.x;
this.y = t1.y - t2.y;
this.z = t1.z - t2.z;
this.w = t1.w - t2.w;
}
/**
* Sets the value of this tuple to the difference of itself and tuple t1
* (this = this - t1).
*
* @param t1 the other tuple
*/
public final void sub(Tuple4d t1) {
this.x -= t1.x;
this.y -= t1.y;
this.z -= t1.z;
this.w -= t1.w;
}
/**
* Sets the value of this tuple to the negation of tuple t1.
*
* @param t1 the source tuple
*/
public final void negate(Tuple4d t1) {
this.x = -t1.x;
this.y = -t1.y;
this.z = -t1.z;
this.w = -t1.w;
}
/**
* Negates the value of this tuple in place.
*/
public final void negate() {
this.x = -this.x;
this.y = -this.y;
this.z = -this.z;
this.w = -this.w;
}
/**
* Sets the value of this tuple to the scalar multiplication of the scale
* factor with the tuple t1.
*
* @param s the scalar value
* @param t1 the source tuple
*/
public final void scale(double s, Tuple4d t1) {
this.x = s * t1.x;
this.y = s * t1.y;
this.z = s * t1.z;
this.w = s * t1.w;
}
/**
* Sets the value of this tuple to the scalar multiplication of the scale
* factor with this.
*
* @param s the scalar value
*/
public final void scale(double s) {
this.x *= s;
this.y *= s;
this.z *= s;
this.w *= s;
}
/**
* Sets the value of this tuple to the scalar multiplication by s of tuple
* t1 plus tuple t2 (this = s*t1 + t2).
*
* @param s the scalar value
* @param t1 the tuple to be multipled
* @param t2 the tuple to be added
*/
public final void scaleAdd(double s, Tuple4d t1, Tuple4d t2) {
this.x = s * t1.x + t2.x;
this.y = s * t1.y + t2.y;
this.z = s * t1.z + t2.z;
this.w = s * t1.w + t2.w;
}
/**
* @deprecated Use scaleAdd(double,Tuple4d) instead
*/
public final void scaleAdd(float s, Tuple4d t1) {
scaleAdd((double) s, t1);
}
/**
* Sets the value of this tuple to the scalar multiplication of itself and
* then adds tuple t1 (this = s*this + t1).
*
* @param s the scalar value
* @param t1 the tuple to be added
*/
public final void scaleAdd(double s, Tuple4d t1) {
this.x = s * this.x + t1.x;
this.y = s * this.y + t1.y;
this.z = s * this.z + t1.z;
this.w = s * this.w + t1.w;
}
/**
* Returns a string that contains the values of this Tuple4d. The form is
* (x,y,z,w).
*
* @return the String representation
*/
public String toString() {
return "(" + this.x + ", " + this.y + ", " + this.z + ", " + this.w + ")";
}
/**
* Returns true if all of the data members of Tuple4d t1 are equal to the
* corresponding data members in this Tuple4d.
*
* @param t1 the tuple with which the comparison is made
* @return true or false
*/
public boolean equals(Tuple4d t1) {
try {
return (this.x == t1.x && this.y == t1.y && this.z == t1.z
&& this.w == t1.w);
} catch (NullPointerException e2) {
return false;
}
}
/**
* Returns true if the Object t1 is of type Tuple4d and all of the data
* members of t1 are equal to the corresponding data members in this
* Tuple4d.
*
* @param t1 the object with which the comparison is made
* @return true or false
*/
public boolean equals(Object t1) {
try {
Tuple4d t2 = (Tuple4d) t1;
return (this.x == t2.x && this.y == t2.y
&& this.z == t2.z && this.w == t2.w);
} catch (NullPointerException e2) {
return false;
} catch (ClassCastException e1) {
return false;
}
}
/**
* Returns true if the L-infinite distance between this tuple and tuple t1
* is less than or equal to the epsilon parameter, otherwise returns false.
* The L-infinite distance is equal to MAX[abs(x1-x2), abs(y1-y2),
* abs(z1-z2), abs(w1-w2)].
*
* @param t1 the tuple to be compared to this tuple
* @param epsilon the threshold value
* @return true or false
*/
public boolean epsilonEquals(Tuple4d t1, double epsilon) {
double diff;
diff = x - t1.x;
if (Double.isNaN(diff)) {
return false;
}
if ((diff < 0 ? -diff : diff) > epsilon) {
return false;
}
diff = y - t1.y;
if (Double.isNaN(diff)) {
return false;
}
if ((diff < 0 ? -diff : diff) > epsilon) {
return false;
}
diff = z - t1.z;
if (Double.isNaN(diff)) {
return false;
}
if ((diff < 0 ? -diff : diff) > epsilon) {
return false;
}
diff = w - t1.w;
if (Double.isNaN(diff)) {
return false;
}
if ((diff < 0 ? -diff : diff) > epsilon) {
return false;
}
return true;
}
/**
* Returns a hash code value based on the data values in this object. Two
* different Tuple4d objects with identical data values (i.e.,
* Tuple4d.equals returns true) will return the same hash code value. Two
* objects with different data members may return the same hash value,
* although this is not likely.
*
* @return the integer hash code value
*/
public int hashCode() {
long bits = 1L;
bits = 31L * bits + VecMathUtil.doubleToLongBits(x);
bits = 31L * bits + VecMathUtil.doubleToLongBits(y);
bits = 31L * bits + VecMathUtil.doubleToLongBits(z);
bits = 31L * bits + VecMathUtil.doubleToLongBits(w);
return (int) (bits ^ (bits >> 32));
}
/**
* @deprecated Use clamp(double,double,Tuple4d) instead
*/
public final void clamp(float min, float max, Tuple4d t) {
clamp((double) min, (double) max, t);
}
/**
* Clamps the tuple parameter to the range [low, high] and places the values
* into this tuple.
*
* @param min the lowest value in the tuple after clamping
* @param max the highest value in the tuple after clamping
* @param t the source tuple, which will not be modified
*/
public final void clamp(double min, double max, Tuple4d t) {
if (t.x > max) {
x = max;
} else if (t.x < min) {
x = min;
} else {
x = t.x;
}
if (t.y > max) {
y = max;
} else if (t.y < min) {
y = min;
} else {
y = t.y;
}
if (t.z > max) {
z = max;
} else if (t.z < min) {
z = min;
} else {
z = t.z;
}
if (t.w > max) {
w = max;
} else if (t.w < min) {
w = min;
} else {
w = t.w;
}
}
/**
* @deprecated Use clampMin(double,Tuple4d) instead
*/
public final void clampMin(float min, Tuple4d t) {
clampMin((double) min, t);
}
/**
* Clamps the minimum value of the tuple parameter to the min parameter and
* places the values into this tuple.
*
* @param min the lowest value in the tuple after clamping
* @param t the source tuple, which will not be modified
*/
public final void clampMin(double min, Tuple4d t) {
if (t.x < min) {
x = min;
} else {
x = t.x;
}
if (t.y < min) {
y = min;
} else {
y = t.y;
}
if (t.z < min) {
z = min;
} else {
z = t.z;
}
if (t.w < min) {
w = min;
} else {
w = t.w;
}
}
/**
* @deprecated Use clampMax(double,Tuple4d) instead
*/
public final void clampMax(float max, Tuple4d t) {
clampMax((double) max, t);
}
/**
* Clamps the maximum value of the tuple parameter to the max parameter and
* places the values into this tuple.
*
* @param max the highest value in the tuple after clamping
* @param t the source tuple, which will not be modified
*/
public final void clampMax(double max, Tuple4d t) {
if (t.x > max) {
x = max;
} else {
x = t.x;
}
if (t.y > max) {
y = max;
} else {
y = t.y;
}
if (t.z > max) {
z = max;
} else {
z = t.z;
}
if (t.w > max) {
w = max;
} else {
w = t.z;
}
}
/**
* Sets each component of the tuple parameter to its absolute value and
* places the modified values into this tuple.
*
* @param t the source tuple, which will not be modified
*/
public final void absolute(Tuple4d t) {
x = Math.abs(t.x);
y = Math.abs(t.y);
z = Math.abs(t.z);
w = Math.abs(t.w);
}
/**
* @deprecated Use clamp(double,double) instead
*/
public final void clamp(float min, float max) {
clamp((double) min, (double) max);
}
/**
* Clamps this tuple to the range [low, high].
*
* @param min the lowest value in this tuple after clamping
* @param max the highest value in this tuple after clamping
*/
public final void clamp(double min, double max) {
if (x > max) {
x = max;
} else if (x < min) {
x = min;
}
if (y > max) {
y = max;
} else if (y < min) {
y = min;
}
if (z > max) {
z = max;
} else if (z < min) {
z = min;
}
if (w > max) {
w = max;
} else if (w < min) {
w = min;
}
}
/**
* @deprecated Use clampMin(double) instead
*/
public final void clampMin(float min) {
clampMin((double) min);
}
/**
* Clamps the minimum value of this tuple to the min parameter.
*
* @param min the lowest value in this tuple after clamping
*/
public final void clampMin(double min) {
if (x < min) {
x = min;
}
if (y < min) {
y = min;
}
if (z < min) {
z = min;
}
if (w < min) {
w = min;
}
}
/**
* @deprecated Use clampMax(double) instead
*/
public final void clampMax(float max) {
clampMax((double) max);
}
/**
* Clamps the maximum value of this tuple to the max parameter.
*
* @param max the highest value in the tuple after clamping
*/
public final void clampMax(double max) {
if (x > max) {
x = max;
}
if (y > max) {
y = max;
}
if (z > max) {
z = max;
}
if (w > max) {
w = max;
}
}
/**
* Sets each component of this tuple to its absolute value.
*/
public final void absolute() {
x = Math.abs(x);
y = Math.abs(y);
z = Math.abs(z);
w = Math.abs(w);
}
/**
* @deprecated Use interpolate(Tuple4d,Tuple4d,double) instead
*/
public void interpolate(Tuple4d t1, Tuple4d t2, float alpha) {
interpolate(t1, t2, (double) alpha);
}
/**
* Linearly interpolates between tuples t1 and t2 and places the result into
* this tuple: this = (1-alpha)*t1 + alpha*t2.
*
* @param t1 the first tuple
* @param t2 the second tuple
* @param alpha the alpha interpolation parameter
*/
public void interpolate(Tuple4d t1, Tuple4d t2, double alpha) {
this.x = (1 - alpha) * t1.x + alpha * t2.x;
this.y = (1 - alpha) * t1.y + alpha * t2.y;
this.z = (1 - alpha) * t1.z + alpha * t2.z;
this.w = (1 - alpha) * t1.w + alpha * t2.w;
}
/**
* @deprecated Use interpolate(Tuple4d,double) instead
*/
public void interpolate(Tuple4d t1, float alpha) {
interpolate(t1, (double) alpha);
}
/**
* Linearly interpolates between this tuple and tuple t1 and places the
* result into this tuple: this = (1-alpha)*this + alpha*t1.
*
* @param t1 the first tuple
* @param alpha the alpha interpolation parameter
*/
public void interpolate(Tuple4d t1, double alpha) {
this.x = (1 - alpha) * this.x + alpha * t1.x;
this.y = (1 - alpha) * this.y + alpha * t1.y;
this.z = (1 - alpha) * this.z + alpha * t1.z;
this.w = (1 - alpha) * this.w + alpha * t1.w;
}
/**
* Creates a new object of the same class as this object.
*
* @return a clone of this instance.
* @exception OutOfMemoryError if there is not enough memory.
* @see java.lang.Cloneable
* @since vecmath 1.3
*/
public Object clone() {
// Since there are no arrays we can just use Object.clone()
try {
return super.clone();
} catch (CloneNotSupportedException e) {
// this shouldn't happen, since we are Cloneable
throw new InternalError();
}
}
/**
* Get the <i>x</i> coordinate.
*
* @return the x coordinate.
*
* @since vecmath 1.5
*/
public final double getX() {
return x;
}
/**
* Set the <i>x</i> coordinate.
*
* @param x value to <i>x</i> coordinate.
*
* @since vecmath 1.5
*/
public final void setX(double x) {
this.x = x;
}
/**
* Get the <i>y</i> coordinate.
*
* @return the <i>y</i> coordinate.
*
* @since vecmath 1.5
*/
public final double getY() {
return y;
}
/**
* Set the <i>y</i> coordinate.
*
* @param y value to <i>y</i> coordinate.
*
* @since vecmath 1.5
*/
public final void setY(double y) {
this.y = y;
}
/**
* Get the <i>z</i> coordinate.
*
* @return the <i>z</i> coordinate.
*
* @since vecmath 1.5
*/
public final double getZ() {
return z;
}
/**
* Set the <i>z</i> coordinate.
*
* @param z value to <i>z</i> coordinate.
*
* @since vecmath 1.5
*/
public final void setZ(double z) {
this.z = z;
}
/**
* Get the <i>w</i> coordinate.
*
* @return the <i>w</i> coordinate.
*
* @since vecmath 1.5
*/
public final double getW() {
return w;
}
/**
* Set the <i>w</i> coordinate.
*
* @param w value to <i>w</i> coordinate.
*
* @since vecmath 1.5
*/
public final void setW(double w) {
this.w = w;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.jms;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import javax.annotation.Nullable;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Destination;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TextMessage;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.io.Read.Unbounded;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark;
import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PDone;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An unbounded source for JMS destinations (queues or topics).
*
* <h3>Reading from a JMS destination</h3>
*
* <p>JmsIO source returns unbounded collection of JMS records as {@code PCollection<JmsRecord<T>>}.
* A {@link JmsRecord} includes JMS headers and properties, along with the JMS message payload.</p>
*
* <p>To configure a JMS source, you have to provide a {@link javax.jms.ConnectionFactory}
* and the destination (queue or topic) where to consume. The following example
* illustrates various options for configuring the source:</p>
*
* <pre>{@code
*
* pipeline.apply(JmsIO.read()
* .withConnectionFactory(myConnectionFactory)
* .withQueue("my-queue")
* // above two are required configuration, returns PCollection<JmsRecord<byte[]>>
*
* // rest of the settings are optional
*
* }</pre>
*
* <h3>Writing to a JMS destination</h3>
*
* <p>JmsIO sink supports writing text messages to a JMS destination on a broker.
* To configure a JMS sink, you must specify a {@link javax.jms.ConnectionFactory} and a
* {@link javax.jms.Destination} name.
* For instance:
*
* <pre>{@code
*
* pipeline
* .apply(...) // returns PCollection<String>
* .apply(JmsIO.write()
* .withConnectionFactory(myConnectionFactory)
* .withQueue("my-queue")
*
* }</pre>
*/
@Experimental(Experimental.Kind.SOURCE_SINK)
public class JmsIO {
private static final Logger LOG = LoggerFactory.getLogger(JmsIO.class);
public static Read read() {
return new AutoValue_JmsIO_Read.Builder().setMaxNumRecords(Long.MAX_VALUE).build();
}
public static Write write() {
return new AutoValue_JmsIO_Write.Builder().build();
}
/**
* A {@link PTransform} to read from a JMS destination. See {@link JmsIO} for more
* information on usage and configuration.
*/
@AutoValue
public abstract static class Read extends PTransform<PBegin, PCollection<JmsRecord>> {
/**
* NB: According to http://docs.oracle.com/javaee/1.4/api/javax/jms/ConnectionFactory.html
* "It is expected that JMS providers will provide the tools an administrator needs to create
* and configure administered objects in a JNDI namespace. JMS provider implementations of
* administered objects should be both javax.jndi.Referenceable and java.io.Serializable so
* that they can be stored in all JNDI naming contexts. In addition, it is recommended that
* these implementations follow the JavaBeansTM design patterns."
*
* <p>So, a {@link ConnectionFactory} implementation is serializable.
*/
@Nullable abstract ConnectionFactory getConnectionFactory();
@Nullable abstract String getQueue();
@Nullable abstract String getTopic();
@Nullable abstract String getUsername();
@Nullable abstract String getPassword();
abstract long getMaxNumRecords();
@Nullable abstract Duration getMaxReadTime();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionFactory(ConnectionFactory connectionFactory);
abstract Builder setQueue(String queue);
abstract Builder setTopic(String topic);
abstract Builder setUsername(String username);
abstract Builder setPassword(String password);
abstract Builder setMaxNumRecords(long maxNumRecords);
abstract Builder setMaxReadTime(Duration maxReadTime);
abstract Read build();
}
/**
* Specify the JMS connection factory to connect to the JMS broker.
*
* <p>For instance:
*
* <pre>
* {@code
* pipeline.apply(JmsIO.read().withConnectionFactory(myConnectionFactory)
* }
* </pre>
*
* @param connectionFactory The JMS {@link ConnectionFactory}.
* @return The corresponding {@link JmsIO.Read}.
*/
public Read withConnectionFactory(ConnectionFactory connectionFactory) {
checkArgument(connectionFactory != null, "connectionFactory can not be null");
return builder().setConnectionFactory(connectionFactory).build();
}
/**
* Specify the JMS queue destination name where to read messages from. The
* {@link JmsIO.Read} acts as a consumer on the queue.
*
* <p>This method is exclusive with {@link JmsIO.Read#withTopic(String)}. The user has to
* specify a destination: queue or topic.
*
* <p>For instance:
*
* <pre>
* {@code
* pipeline.apply(JmsIO.read().withQueue("my-queue")
* }
* </pre>
*
* @param queue The JMS queue name where to read messages from.
* @return The corresponding {@link JmsIO.Read}.
*/
public Read withQueue(String queue) {
checkArgument(queue != null, "queue can not be null");
return builder().setQueue(queue).build();
}
/**
* Specify the JMS topic destination name where to receive messages from. The
* {@link JmsIO.Read} acts as a subscriber on the topic.
*
* <p>This method is exclusive with {@link JmsIO.Read#withQueue(String)}. The user has to
* specify a destination: queue or topic.
*
* <p>For instance:
*
* <pre>
* {@code
* pipeline.apply(JmsIO.read().withTopic("my-topic")
* }
* </pre>
*
* @param topic The JMS topic name.
* @return The corresponding {@link JmsIO.Read}.
*/
public Read withTopic(String topic) {
checkArgument(topic != null, "topic can not be null");
return builder().setTopic(topic).build();
}
/**
* Define the username to connect to the JMS broker (authenticated).
*/
public Read withUsername(String username) {
checkArgument(username != null, "username can not be null");
return builder().setUsername(username).build();
}
/**
* Define the password to connect to the JMS broker (authenticated).
*/
public Read withPassword(String password) {
checkArgument(password != null, "password can not be null");
return builder().setPassword(password).build();
}
/**
* Define the max number of records that the source will read. Using a max number of records
* different from {@code Long.MAX_VALUE} means the source will be {@code Bounded}, and will
* stop once the max number of records read is reached.
*
* <p>For instance:
*
* <pre>
* {@code
* pipeline.apply(JmsIO.read().withNumRecords(1000)
* }
* </pre>
*
* @param maxNumRecords The max number of records to read from the JMS destination.
* @return The corresponding {@link JmsIO.Read}.
*/
public Read withMaxNumRecords(long maxNumRecords) {
checkArgument(maxNumRecords >= 0, "maxNumRecords must be > 0, but was: %d", maxNumRecords);
return builder().setMaxNumRecords(maxNumRecords).build();
}
/**
* Define the max read time that the source will read. Using a non null max read time
* duration means the source will be {@code Bounded}, and will stop once the max read time is
* reached.
*
* <p>For instance:
*
* <pre>
* {@code
* pipeline.apply(JmsIO.read().withMaxReadTime(Duration.minutes(10))
* }
* </pre>
*
* @param maxReadTime The max read time duration.
* @return The corresponding {@link JmsIO.Read}.
*/
public Read withMaxReadTime(Duration maxReadTime) {
checkArgument(maxReadTime != null, "maxReadTime can not be null");
return builder().setMaxReadTime(maxReadTime).build();
}
@Override
public PCollection<JmsRecord> expand(PBegin input) {
checkArgument(getConnectionFactory() != null, "withConnectionFactory() is required");
checkArgument(
getQueue() != null || getTopic() != null,
"Either withQueue() or withTopic() is required");
checkArgument(
getQueue() == null || getTopic() == null,
"withQueue() and withTopic() are exclusive");
// handles unbounded source to bounded conversion if maxNumRecords is set.
Unbounded<JmsRecord> unbounded = org.apache.beam.sdk.io.Read.from(createSource());
PTransform<PBegin, PCollection<JmsRecord>> transform = unbounded;
if (getMaxNumRecords() < Long.MAX_VALUE || getMaxReadTime() != null) {
transform = unbounded.withMaxReadTime(getMaxReadTime())
.withMaxNumRecords(getMaxNumRecords());
}
return input.getPipeline().apply(transform);
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder.addIfNotNull(DisplayData.item("queue", getQueue()));
builder.addIfNotNull(DisplayData.item("topic", getTopic()));
}
///////////////////////////////////////////////////////////////////////////////////////
/**
* Creates an {@link UnboundedSource UnboundedSource<JmsRecord, ?>} with the configuration
* in {@link Read}. Primary use case is unit tests, should not be used in an
* application.
*/
@VisibleForTesting
UnboundedSource<JmsRecord, JmsCheckpointMark> createSource() {
return new UnboundedJmsSource(this);
}
}
private JmsIO() {}
/**
* An unbounded JMS source.
*/
@VisibleForTesting
protected static class UnboundedJmsSource extends UnboundedSource<JmsRecord, JmsCheckpointMark> {
private final Read spec;
public UnboundedJmsSource(Read spec) {
this.spec = spec;
}
@Override
public List<UnboundedJmsSource> split(
int desiredNumSplits, PipelineOptions options) throws Exception {
List<UnboundedJmsSource> sources = new ArrayList<>();
if (spec.getTopic() != null) {
// in the case of a topic, we create a single source, so an unique subscriber, to avoid
// element duplication
sources.add(new UnboundedJmsSource(spec));
} else {
// in the case of a queue, we allow concurrent consumers
for (int i = 0; i < desiredNumSplits; i++) {
sources.add(new UnboundedJmsSource(spec));
}
}
return sources;
}
@Override
public UnboundedJmsReader createReader(PipelineOptions options,
JmsCheckpointMark checkpointMark) {
return new UnboundedJmsReader(this, checkpointMark);
}
@Override
public Coder<JmsCheckpointMark> getCheckpointMarkCoder() {
return AvroCoder.of(JmsCheckpointMark.class);
}
@Override
public Coder<JmsRecord> getOutputCoder() {
return SerializableCoder.of(JmsRecord.class);
}
}
@VisibleForTesting
static class UnboundedJmsReader extends UnboundedReader<JmsRecord> {
private UnboundedJmsSource source;
private JmsCheckpointMark checkpointMark;
private Connection connection;
private Session session;
private MessageConsumer consumer;
private JmsRecord currentRecord;
private Instant currentTimestamp;
public UnboundedJmsReader(
UnboundedJmsSource source,
JmsCheckpointMark checkpointMark) {
this.source = source;
if (checkpointMark != null) {
this.checkpointMark = checkpointMark;
} else {
this.checkpointMark = new JmsCheckpointMark();
}
this.currentRecord = null;
}
@Override
public boolean start() throws IOException {
Read spec = source.spec;
ConnectionFactory connectionFactory = spec.getConnectionFactory();
try {
Connection connection;
if (spec.getUsername() != null) {
connection =
connectionFactory.createConnection(spec.getUsername(), spec.getPassword());
} else {
connection = connectionFactory.createConnection();
}
connection.start();
this.connection = connection;
} catch (Exception e) {
throw new IOException("Error connecting to JMS", e);
}
try {
this.session = this.connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
} catch (Exception e) {
throw new IOException("Error creating JMS session", e);
}
try {
if (spec.getTopic() != null) {
this.consumer =
this.session.createConsumer(this.session.createTopic(spec.getTopic()));
} else {
this.consumer =
this.session.createConsumer(this.session.createQueue(spec.getQueue()));
}
} catch (Exception e) {
throw new IOException("Error creating JMS consumer", e);
}
return advance();
}
@Override
public boolean advance() throws IOException {
try {
TextMessage message = (TextMessage) this.consumer.receiveNoWait();
if (message == null) {
currentRecord = null;
return false;
}
Map<String, Object> properties = new HashMap<>();
@SuppressWarnings("rawtypes")
Enumeration propertyNames = message.getPropertyNames();
while (propertyNames.hasMoreElements()) {
String propertyName = (String) propertyNames.nextElement();
properties.put(propertyName, message.getObjectProperty(propertyName));
}
JmsRecord jmsRecord = new JmsRecord(
message.getJMSMessageID(),
message.getJMSTimestamp(),
message.getJMSCorrelationID(),
message.getJMSReplyTo(),
message.getJMSDestination(),
message.getJMSDeliveryMode(),
message.getJMSRedelivered(),
message.getJMSType(),
message.getJMSExpiration(),
message.getJMSPriority(),
properties,
message.getText());
checkpointMark.addMessage(message);
currentRecord = jmsRecord;
currentTimestamp = new Instant(message.getJMSTimestamp());
return true;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public JmsRecord getCurrent() throws NoSuchElementException {
if (currentRecord == null) {
throw new NoSuchElementException();
}
return currentRecord;
}
@Override
public Instant getWatermark() {
return checkpointMark.getOldestPendingTimestamp();
}
@Override
public Instant getCurrentTimestamp() {
if (currentRecord == null) {
throw new NoSuchElementException();
}
return currentTimestamp;
}
@Override
public CheckpointMark getCheckpointMark() {
return checkpointMark;
}
@Override
public UnboundedSource<JmsRecord, ?> getCurrentSource() {
return source;
}
@Override
public void close() throws IOException {
try {
if (consumer != null) {
consumer.close();
consumer = null;
}
if (session != null) {
session.close();
session = null;
}
if (connection != null) {
connection.stop();
connection.close();
connection = null;
}
} catch (Exception e) {
throw new IOException(e);
}
}
}
/**
* A {@link PTransform} to write to a JMS queue. See {@link JmsIO} for
* more information on usage and configuration.
*/
@AutoValue
public abstract static class Write extends PTransform<PCollection<String>, PDone> {
@Nullable abstract ConnectionFactory getConnectionFactory();
@Nullable abstract String getQueue();
@Nullable abstract String getTopic();
@Nullable abstract String getUsername();
@Nullable abstract String getPassword();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionFactory(ConnectionFactory connectionFactory);
abstract Builder setQueue(String queue);
abstract Builder setTopic(String topic);
abstract Builder setUsername(String username);
abstract Builder setPassword(String password);
abstract Write build();
}
/**
* Specify the JMS connection factory to connect to the JMS broker.
*
* <p>For instance:
*
* <pre>
* {@code
* .apply(JmsIO.write().withConnectionFactory(myConnectionFactory)
* }
* </pre>
*
* @param connectionFactory The JMS {@link ConnectionFactory}.
* @return The corresponding {@link JmsIO.Read}.
*/
public Write withConnectionFactory(ConnectionFactory connectionFactory) {
checkArgument(connectionFactory != null, "connectionFactory can not be null");
return builder().setConnectionFactory(connectionFactory).build();
}
/**
* Specify the JMS queue destination name where to send messages to. The
* {@link JmsIO.Write} acts as a producer on the queue.
*
* <p>This method is exclusive with {@link JmsIO.Write#withTopic(String)}. The user has to
* specify a destination: queue or topic.
*
* <p>For instance:
*
* <pre>
* {@code
* .apply(JmsIO.write().withQueue("my-queue")
* }
* </pre>
*
* @param queue The JMS queue name where to send messages to.
* @return The corresponding {@link JmsIO.Read}.
*/
public Write withQueue(String queue) {
checkArgument(queue != null, "queue can not be null");
return builder().setQueue(queue).build();
}
/**
* Specify the JMS topic destination name where to send messages to. The
* {@link JmsIO.Read} acts as a publisher on the topic.
*
* <p>This method is exclusive with {@link JmsIO.Write#withQueue(String)}. The user has to
* specify a destination: queue or topic.
*
* <p>For instance:
*
* <pre>
* {@code
* .apply(JmsIO.write().withTopic("my-topic")
* }
* </pre>
*
* @param topic The JMS topic name.
* @return The corresponding {@link JmsIO.Read}.
*/
public Write withTopic(String topic) {
checkArgument(topic != null, "topic can not be null");
return builder().setTopic(topic).build();
}
/**
* Define the username to connect to the JMS broker (authenticated).
*/
public Write withUsername(String username) {
checkArgument(username != null, "username can not be null");
return builder().setUsername(username).build();
}
/**
* Define the password to connect to the JMS broker (authenticated).
*/
public Write withPassword(String password) {
checkArgument(password != null, "password can not be null");
return builder().setPassword(password).build();
}
@Override
public PDone expand(PCollection<String> input) {
checkArgument(getConnectionFactory() != null, "withConnectionFactory() is required");
checkArgument(
getQueue() != null || getTopic() != null,
"Either withQueue(queue) or withTopic(topic) is required");
checkArgument(
getQueue() == null || getTopic() == null,
"withQueue(queue) and withTopic(topic) are exclusive");
input.apply(ParDo.of(new WriterFn(this)));
return PDone.in(input.getPipeline());
}
private static class WriterFn extends DoFn<String, Void> {
private Write spec;
private Connection connection;
private Session session;
private MessageProducer producer;
public WriterFn(Write spec) {
this.spec = spec;
}
@Setup
public void setup() throws Exception {
if (producer == null) {
if (spec.getUsername() != null) {
this.connection =
spec.getConnectionFactory()
.createConnection(spec.getUsername(), spec.getPassword());
} else {
this.connection = spec.getConnectionFactory().createConnection();
}
this.connection.start();
// false means we don't use JMS transaction.
this.session = this.connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Destination destination;
if (spec.getQueue() != null) {
destination = session.createQueue(spec.getQueue());
} else {
destination = session.createTopic(spec.getTopic());
}
this.producer = this.session.createProducer(destination);
}
}
@ProcessElement
public void processElement(ProcessContext ctx) throws Exception {
String value = ctx.element();
TextMessage message = session.createTextMessage(value);
producer.send(message);
}
@Teardown
public void teardown() throws Exception {
producer.close();
producer = null;
session.close();
session = null;
connection.stop();
connection.close();
connection = null;
}
}
}
}
| |
package org.usfirst.frc.team4915.stronghold;
import org.usfirst.frc.team4915.stronghold.commands.AutoCommand1;
import org.usfirst.frc.team4915.stronghold.commands.PortcullisMoveUp;
import org.usfirst.frc.team4915.stronghold.subsystems.Autonomous;
import org.usfirst.frc.team4915.stronghold.subsystems.DriveTrain;
import org.usfirst.frc.team4915.stronghold.subsystems.GearShift;
import org.usfirst.frc.team4915.stronghold.subsystems.IntakeLauncher;
import org.usfirst.frc.team4915.stronghold.subsystems.Scaler;
import org.usfirst.frc.team4915.stronghold.utils.BNO055;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot extends IterativeRobot {
public static DriveTrain driveTrain;
public static IntakeLauncher intakeLauncher;
public static OI oi;
public static GearShift gearShift;
public static Scaler scaler;
Command autonomousCommand;
SendableChooser autonomousProgramChooser;
private volatile double lastTime;
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
@Override
public void robotInit() {
RobotMap.init(); // 1. Initialize RobotMap prior to initializing modules
// 2. conditionally create the modules
if (ModuleManager.PORTCULLIS_MODULE_ON){
new PortcullisMoveUp().start();
}
if (ModuleManager.DRIVE_MODULE_ON) {
driveTrain = new DriveTrain();
SmartDashboard.putString("Drivetrain Module", "initialized");
}
else
SmartDashboard.putString("Drivetrain Module", "disabled");
if (ModuleManager.GEARSHIFT_MODULE_ON) {
gearShift = new GearShift();
SmartDashboard.putString("Shift Module", "initialized");
}
else
SmartDashboard.putString("Shift Module", "disabled");
if (ModuleManager.INTAKELAUNCHER_MODULE_ON) {
/* to prevent module-manager-madness (M-cubed), we
* place try/catch block for exceptions thrown on account of
* missing hardware.
*/
try {
intakeLauncher = new IntakeLauncher();
SmartDashboard.putString("IntakeLauncher Module", "initialized");
}
catch (Throwable e) {
System.out.println("Disabling IntakeLauncher at runtime");
SmartDashboard.putString("IntakeLauncher Module", "ERROR");
ModuleManager.INTAKELAUNCHER_MODULE_ON = false;
}
}
else
SmartDashboard.putString("IntakeLauncher Module", "disabled");
if (ModuleManager.SCALING_MODULE_ON) {
scaler = new Scaler();
SmartDashboard.putString("Scaling Module", "initialized");
}
else
SmartDashboard.putString("Scaling Module", "disabled");
if (ModuleManager.IMU_MODULE_ON) {
// imu is initialized in RobotMap.init()
SmartDashboard.putString("IMU Module", "initialized");
SmartDashboard.putBoolean("IMU present", RobotMap.imu.isSensorPresent());
updateIMUStatus();
}
else
SmartDashboard.putString("IMU Module", "disabled");
oi = new OI(); // 3. Construct OI after subsystems created
}
@Override
public void disabledPeriodic() {
Scheduler.getInstance().run();
}
@Override
public void autonomousInit() {
// schedule the autonomous command
autonomousCommand = new AutoCommand1((Autonomous.Type) oi.barrierType.getSelected(), (Autonomous.Strat) oi.strategy.getSelected(),
(Autonomous.Position) oi.startingFieldPosition.getSelected());
System.out.println("Autonomous selection Angle: " + oi.startingFieldPosition.getSelected() + "Field Position " + oi.startingFieldPosition.getSelected() + "strategy " + oi.strategy.getSelected() + "Obstacle " + oi.barrierType.getSelected());
if (this.autonomousCommand != null) {
this.autonomousCommand.start();
}
}
/**
* This function is called periodically during autonomous
*/
@Override
public void autonomousPeriodic() {
Scheduler.getInstance().run();
periodicStatusUpdate();
}
@Override
public void teleopInit() {
// This makes sure that the autonomous stops running when
// teleop starts running. If you want the autonomous to
// continue until interrupted by another command, remove
// this line or comment it out.
//set speed
// RobotMap.rightBackMotor.changeControlMode(CANTalon.TalonControlMode.Speed);
// RobotMap.leftBackMotor.changeControlMode(CANTalon.TalonControlMode.Speed);
System.out.println("entering teleop");
Robot.intakeLauncher.aimMotor.enableControl();
if (this.autonomousCommand != null) {
this.autonomousCommand.cancel();
}
}
/**
* This function is called when the disabled button is hit. You can use it
* to reset subsystems before shutting down.
*/
@Override
public void disabledInit() {
}
/**
* This function is called periodically during operator control
*/
@Override
public void teleopPeriodic() {
Scheduler.getInstance().run();
periodicStatusUpdate();
}
/**
* This function is called periodically during test mode
*/
@Override
public void testPeriodic() {
LiveWindow.run();
periodicStatusUpdate();
}
public void periodicStatusUpdate() {
double currentTime = Timer.getFPGATimestamp(); // seconds
// only update the smart dashboard twice per second to prevent
// network congestion.
if(currentTime - this.lastTime > .5) {
updateIMUStatus();
updateLauncherStatus();
updateDrivetrainStatus();
this.lastTime = currentTime;
}
}
public void updateIMUStatus() {
if (ModuleManager.IMU_MODULE_ON) {
BNO055.CalData calData = RobotMap.imu.getCalibration();
SmartDashboard.putNumber("IMU heading", RobotMap.imu.getNormalizedHeading());
// SmartDashboard.putNumber("IMU dist to origin", RobotMap.imu.getDistFromOrigin());
SmartDashboard.putNumber("IMU calibration",
(1000 + (calData.accel * 100) + calData.gyro *10 + calData.mag));
//Calibration values range from 0-3, Right to left: mag, gyro, accel
}
}
public void updateLauncherStatus() {
if (ModuleManager.INTAKELAUNCHER_MODULE_ON) {
SmartDashboard.putNumber("aimMotor Potentiometer: ", intakeLauncher.getPosition());
SmartDashboard.putBoolean("Top Limit Switch: ", intakeLauncher.isLauncherAtTop());
SmartDashboard.putBoolean("Bottom Limit Switch: ", intakeLauncher.isLauncherAtBottom());
SmartDashboard.putBoolean("Boulder Limit Switch: ", intakeLauncher.boulderSwitch.get());
SmartDashboard.putBoolean("Potentiometer might be broken", intakeLauncher.getIsPotentiometerScrewed());
}
}
public void toggleSpeed() {
}
public void updateDrivetrainStatus() {
if (ModuleManager.DRIVE_MODULE_ON) {
}
}
}
| |
/**
* Copyright (c) 2004-2005, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the University of California, Los Angeles nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package avrora.sim.clock;
import avrora.sim.*;
import cck.util.Util;
import java.util.HashMap;
import java.util.Iterator;
/**
* The <code>BarrierSynchronizer</code> class implements a global timer among multiple simulators by inserting
* periodic events into the queue of each simulator.
*
* @author Ben L. Titzer, Daniel Lee
*/
public class BarrierSynchronizer extends Synchronizer {
/**
* <code>period</code> is the number of cycles on a member local clock per cycle on the global clock. Some
* re-coding must be done if microcontrollers running at difference speeds are to be accurately
* simulated.
*/
protected long period;
protected final HashMap threadMap;
protected final Simulator.Event action;
protected final Object condition;
protected int goal;
protected int meet_count;
protected int wait_count;
protected WaitSlot waitSlotList;
/**
* The constructor for the <code>IntervalSynchronizer</code> class creates a new synchronizer
* with the specified period, that will fire the specified event each time all threads meet at
* a synchronization point.
* @param p the period in clock cycles which to synchronize the threads
* @param a the event to fire each time all threads meet at a synchronization point
*/
public BarrierSynchronizer(long p, Simulator.Event a) {
period = p;
action = a;
threadMap = new HashMap();
condition = new Object();
}
/**
* The <code>SynchEvent</code> class represents an event that is inserted into the event
* queue of each simulator at the same global time. When this event fires, it will stop the thread
* running this simulator by waiting on a shared
* condition variable. The last thread to fire the event will then notify the condition variable
* which frees the other threads to run again in parallel.
*/
protected class SynchEvent implements Simulator.Event {
protected final SimulatorThread thread;
protected final MainClock clock;
protected boolean removed;
protected boolean met;
protected WaitSlot waitSlot;
protected SynchEvent(SimulatorThread t) {
thread = t;
clock = t.getSimulator().getClock();
}
/**
* The <code>fire()</code> method of this event is called by the individual event queues of each
* simulator as they reach this point in time. The implementation of this method waits for all threads
* to join.
*/
public void fire() {
try {
synchronized (condition) {
// if we have been removed since the last synchronization, return!
if ( removed ) return;
met = true;
// increment the count of the number of threads that have entered
meet_count++;
if ( !signalOthers() )
condition.wait();
met = false;
}
// if we have been removed since the last synchronization, don't insert synch event
if ( removed ) return;
// we have not been removed, we can reinsert the synch event
clock.insertEvent(this, period);
} catch (InterruptedException e) {
throw Util.unexpected(e);
}
}
}
/**
* The <code>signalOthers()</code> method is used to check whether the thread that has just arrived
* should signal other threads to continue.
* @return true if this thread signalled the others to continue; false if this thread should stop
* and wait for the other threads before continuing
*/
protected boolean signalOthers() {
// check for any waiters that need to be woken
checkWaiters();
// have we reached the goal?
if (meet_count < goal) {
return false;
} else {
// last thread to arrive sets the count to zero and notifies all other threads
meet_count = 0;
wait_count = 0;
// perform the action that should be run while all threads are stopped (serial)
action.fire();
// release threads
condition.notifyAll();
return true;
}
}
/**
* The <code>start()</code> method starts the threads executing, and the synchronizer
* will add whatever synchronization to their execution that is necessary to preserve
* the global timing properties of simulation.
*/
public synchronized void start() {
Iterator threadIterator = threadMap.keySet().iterator();
while (threadIterator.hasNext()) {
SimulatorThread thread = (SimulatorThread)threadIterator.next();
thread.start();
}
}
/**
* The <code>join()</code> method will block the caller until all of the threads in
* this synchronization interval have terminated, either through <code>stop()</code>
* being called, or terminating normally such as through a timeout.
*/
public void join() throws InterruptedException {
Iterator threadIterator = threadMap.keySet().iterator();
while (threadIterator.hasNext()) {
SimulatorThread thread = (SimulatorThread)threadIterator.next();
thread.join();
}
}
/**
* The <code>stop()</code> method will terminate all the simulation threads. It is
* not guaranteed to stop all the simulation threads at the same global time.
*/
public synchronized void stop() {
Iterator threadIterator = threadMap.keySet().iterator();
while (threadIterator.hasNext()) {
SimulatorThread thread = (SimulatorThread)threadIterator.next();
thread.getSimulator().stop();
}
}
/**
* The <code>pause()</code> method temporarily pauses the simulation. The nodes are
* not guaranteed to stop at the same global time. This method will return when all
* threads in the simulation have been paused and will no longer make progress until
* the <code>start()</code> method is called again.
*/
public synchronized void pause() {
throw Util.unimplemented();
}
/**
* The <code>synch()</code> method will pause all of the nodes at the same global time.
* This method can only be called when the simulation is paused. It will run all threads
* forward until the global time specified and pause them.
* @param globalTime the global time in clock cycles to run all threads ahead to
*/
public synchronized void synch(long globalTime) {
throw Util.unimplemented();
}
/**
* The <code>addNode()</code> method adds a node to this synchronization group.
* This method should only be called before the <code>start()</code> method is
* called.
* @param t the simulator representing the node to add to this group
*/
public synchronized void addNode(Simulation.Node t) {
// if we already have this thread, do nothing
SimulatorThread st = t.getThread();
if (threadMap.containsKey(st)) return;
st.setSynchronizer(this);
// create a new synchronization event for this thread's queue
SynchEvent event = new SynchEvent(st);
threadMap.put(st, event);
// insert the synch event in the thread's queue
event.clock.insertEvent(event, period);
goal++;
}
/**
* The <code>removeNode()</code> method removes a node from this synchronization
* group, and wakes any nodes that might be waiting on it.
* @param t the simulator thread to remove from this synchronization group
*/
public synchronized void removeNode(Simulation.Node t) {
// don't try to remove a thread that's not here!
SimulatorThread st = t.getThread();
if ( !threadMap.containsKey(st) ) return;
synchronized ( condition ) {
SynchEvent e = (SynchEvent)threadMap.get(st);
e.removed = true; // just in case the thread is still running, don't let it synch
if ( e.met ) meet_count--;
if ( stillWaiting(e.waitSlot) ) {
// if this wait slot hasn't happened yet, we need to decrement wait_count
// and to decrement the number of waiters in that slot
e.waitSlot.numWaiters--;
wait_count--;
}
threadMap.remove(e);
goal--;
// signal any other threads (and wake waiters as necessary) but don't wait
signalOthers();
}
}
/**
* The <code>waitForNeighbors()</code> method is called from within the execution
* of a node when that node needs to wait for its neighbors to catch up to it
* in execution time. The node will be blocked until the other nodes in other
* threads catch up in global time.
*/
public void waitForNeighbors(long time) {
// get the current simulator thread
SimulatorThread thread = (SimulatorThread)Thread.currentThread();
SynchEvent event = (SynchEvent)threadMap.get(thread);
// if the current thread is not in the synchronizer, do nothing
if ( event == null ) return;
WaitSlot w;
synchronized ( condition ) {
// allocate a wait slot for this thread
w = insertWaiter(event, time);
// check for other waiters and wake them if necessary
WaitSlot h = checkWaiters();
// if we were at the head and just woken up, we can just return
if ( w == h ) return;
}
// falling through means that we are either not at the head
// or that not all threads have performed a meet or a wait
try {
// we must grab the lock for this wait slot
synchronized ( w ) {
// check for intervening wakeup between dropping global lock and taking local lock
if ( w.shouldWait )
w.wait();
}
} catch ( InterruptedException e) {
throw Util.unexpected(e);
}
}
/**
* The <code>WaitSlot</code> class represents a slot in time where multiple threads are waiting
* for others to catch up.
*/
static class WaitSlot {
final long time;
int numWaiters;
WaitSlot next;
boolean shouldWait;
WaitSlot(long t) {
shouldWait = true;
time = t;
}
}
protected WaitSlot insertWaiter(SynchEvent event, long time) {
// get a wait slot for this waiter
WaitSlot w = getWaitSlot(time);
// now this thread is officially waiting
wait_count++;
// remember the wait slot this waiter is in
event.waitSlot = w;
// increment the number of waiters in this slot
w.numWaiters++;
return w;
}
private WaitSlot getWaitSlot(long time) {
WaitSlot prev = waitSlotList;
// search through the wait list from front to back
for ( WaitSlot slot = waitSlotList; ; slot = slot.next ) {
// if we are at the end of the list, or in-between links, create a new link
if ( slot == null || slot.time > time ) {
return insertAfter(prev, new WaitSlot(time));
}
// if we matched the time of some other waiter exactly
if ( slot.time == time ) {
return slot;
}
// keep track of previous link
prev = slot;
}
}
private WaitSlot insertAfter(WaitSlot prev, WaitSlot w) {
if ( prev != null ) {
w.next = prev.next;
prev.next = w;
} else {
waitSlotList = w;
}
return w;
}
protected WaitSlot checkWaiters() {
// have all threads reached either a meet or a wait?
if ( wait_count + meet_count < goal ) return null;
// are there any waiters at all?
if ( waitSlotList == null ) return null;
// there is a ready wait slot, wake those threads waiting on it
WaitSlot h = waitSlotList;
// move the wait list ahead to the next link
waitSlotList = h.next;
synchronized ( h ) {
// notify the threads waiting on this wait slot
h.shouldWait = false;
h.notifyAll();
}
// reduce the wait count by the number of waiters in this slot
wait_count -= h.numWaiters;
return h;
}
protected boolean stillWaiting(WaitSlot w) {
if ( w == null ) return false;
for ( WaitSlot h = waitSlotList; h != null; h = h.next )
if ( h == w ) return true;
return false;
}
}
| |
package es.tid.pce.pcep.objects;
import java.util.LinkedList;
import es.tid.rsvp.objects.subobjects.IPv4AddressRROSubobject;
import es.tid.rsvp.objects.subobjects.IPv6AddressRROSubobject;
import es.tid.rsvp.objects.subobjects.RROSubobject;
import es.tid.rsvp.objects.subobjects.SubObjectValues;
/**
* PCEP Reported Route Object (RRO) (RFC 5440).
*
* From RFC 5440 Section 7.10. Reported Route Object
The RRO is exclusively carried within a PCReq message so as to report
the route followed by a TE LSP for which a reoptimization is desired.
The contents of this object are identical in encoding to the contents
of the Route Record Object defined in [RFC3209], [RFC3473], and
[RFC3477]. That is, the object is constructed from a series of sub-
objects. Any RSVP-TE RRO sub-object already defined or that could be
defined in the future for use in the RSVP-TE RRO is acceptable in
this object.
The meanings of all of the sub-objects and fields in this object are
identical to those defined for the RSVP-TE RRO.
PCEP RRO sub-object types correspond to RSVP-TE RRO sub-object types.
RRO Object-Class is 8.
RRO Object-Type is 1.
*
* From RFC 3209 4.4. Record Route Object
Routes can be recorded via the RECORD_ROUTE object (RRO).
Optionally, labels may also be recorded. The Record Route Class is
21. Currently one C_Type is defined, Type 1 Record Route. The
RECORD_ROUTE object has the following format:
Class = 21, C_Type = 1
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| |
// (Subobjects) //
| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Subobjects
The contents of a RECORD_ROUTE object are a series of
variable-length data items called subobjects. The subobjects
are defined in section 4.4.1 below.
The RRO can be present in both RSVP Path and Resv messages. If a
Path message contains multiple RROs, only the first RRO is
meaningful. Subsequent RROs SHOULD be ignored and SHOULD NOT be
propagated. Similarly, if in a Resv message multiple RROs are
encountered following a FILTER_SPEC before another FILTER_SPEC is
encountered, only the first RRO is meaningful. Subsequent RROs
SHOULD be ignored and SHOULD NOT be propagated.
* @author ogondio
*
*/
public class ReportedRouteObject extends PCEPObject{
private LinkedList<RROSubobject> rroSubObjectList;
public ReportedRouteObject(){
this.setObjectClass(ObjectParameters.PCEP_OBJECT_CLASS_RRO);
this.setOT(ObjectParameters.PCEP_OBJECT_TYPE_RRO);
rroSubObjectList=new LinkedList<RROSubobject>();
}
/**
* Constructs a Reported Route Object (RRO) from a sequence of bytes
* @param bytes Sequence of bytes where the object is present
* @param offset Position at which the object starts
* @throws MalformedPCEPObjectException Exception when the object is malformed
*/
public ReportedRouteObject(byte[] bytes, int offset) throws MalformedPCEPObjectException {
super(bytes,offset);
decode();
}
/**
* Encode Reported Route Object
*/
public void encode() {
int len=4;//The four bytes of the header
for (int k=0; k<rroSubObjectList.size();k=k+1){
rroSubObjectList.get(k).encode();
len=len+rroSubObjectList.get(k).getRrosolength();
}
ObjectLength=len;
this.object_bytes=new byte[ObjectLength];
encode_header();
int pos=4;
for (int k=0 ; k<rroSubObjectList.size(); k=k+1) {
System.arraycopy(rroSubObjectList.get(k).getSubobject_bytes(),0, this.object_bytes, pos, rroSubObjectList.get(k).getRrosolength());
pos=pos+rroSubObjectList.get(k).getRrosolength();
}
}
/**
* Decode Reported Route Object
*/
public void decode() throws MalformedPCEPObjectException {
rroSubObjectList=new LinkedList<RROSubobject>();
boolean fin=false;
int offset=4;//Position of the next subobject
if (ObjectLength==4){
fin=true;
}
while (!fin) {
int subojectclass=RROSubobject.getType(this.getObject_bytes(), offset);
int subojectlength=RROSubobject.getLength(this.getObject_bytes(), offset);
switch(subojectclass) {
case SubObjectValues.RRO_SUBOBJECT_IPV4ADDRESS:
IPv4AddressRROSubobject sobjt4=new IPv4AddressRROSubobject(this.getObject_bytes(), offset);
rroSubObjectList.add(sobjt4);
break;
case SubObjectValues.RRO_SUBOBJECT_IPV6ADDRESS:
IPv6AddressRROSubobject sobjt6=new IPv6AddressRROSubobject(this.getObject_bytes(), offset);
rroSubObjectList.add(sobjt6);
break;
//FIXME: COMPLETAR!!!!!!!!!!!!!!!
// case SubObjectValues.ERO_SUBOBJECT_ASNUMBER:
// ASNumberEROSubobject sobjas=new ASNumberEROSubobject (this.getObject_bytes(), offset);
// IROList.add(sobjas);
// break;
//
// case SubObjectValues.ERO_SUBOBJECT_UNNUMBERED_IF_ID:
// UnnumberIfIDEROSubobject subun=new UnnumberIfIDEROSubobject(this.getObject_bytes(), offset);
// IROList.add(subun);
// break;
//
// case SubObjectValues.ERO_SUBOBJECT_LABEL:
// int ctype=LabelEROSubobject.getCType(this.getObject_bytes(), offset);
// switch (ctype){
//
// case SubObjectValues.ERO_SUBOBJECT_LABEL_CTYPE_GENERALIZED_LABEL:
// GeneralizedLabelEROSubobject subgl=new GeneralizedLabelEROSubobject(this.getObject_bytes(), offset);
// IROList.add(subgl);
// case SubObjectValues.ERO_SUBOBJECT_LABEL_CTYPE_WAVEBAND_LABEL:
// WavebandLabelEROSubobject subwl=new WavebandLabelEROSubobject(this.getObject_bytes(), offset);
// IROList.add(subwl);
// default:
// break;
// }
default:
//ERO Subobject Unknown
break;
}
offset=offset+subojectlength;
if (offset>=ObjectLength){
//No more subobjects in ERO
fin=true;
}
}
}
public LinkedList<RROSubobject> getRroSubObjectList() {
return rroSubObjectList;
}
public void setRroSubObjectList(LinkedList<RROSubobject> rroSubObjectList) {
this.rroSubObjectList = rroSubObjectList;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime
* result
+ ((rroSubObjectList == null) ? 0 : rroSubObjectList.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
ReportedRouteObject other = (ReportedRouteObject) obj;
if (rroSubObjectList == null) {
if (other.rroSubObjectList != null)
return false;
} else if (!rroSubObjectList.equals(other.rroSubObjectList))
return false;
return true;
}
}
| |
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.query.aggregator;
import org.apache.log4j.Logger;
import org.testng.AssertJUnit;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.wso2.siddhi.core.SiddhiAppRuntime;
import org.wso2.siddhi.core.SiddhiManager;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.stream.input.InputHandler;
import org.wso2.siddhi.core.stream.output.StreamCallback;
import org.wso2.siddhi.core.util.EventPrinter;
public class OrAggregatorExtensionTestCase {
private static final Logger log = Logger.getLogger(OrAggregatorExtensionTestCase.class);
private volatile int count;
private volatile boolean eventArrived;
@BeforeMethod
public void init() {
count = 0;
eventArrived = false;
}
@Test
public void testOrAggregatorTrueOnlyScenario() throws InterruptedException {
log.info("OrAggregator TestCase 1");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "define stream cscStream(messageID string, isFraud bool, price double);";
String query = ("@info(name = 'query1') " +
"from cscStream#window.lengthBatch(3) " +
"select messageID, or(isFraud) as isValidTransaction " +
"group by messageID " +
"insert all events into outputStream;");
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(inStreamDefinition +
query);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
eventArrived = true;
for (Event event : events) {
count++;
switch (count) {
case 1:
AssertJUnit.assertEquals(true, event.getData(1));
break;
default:
AssertJUnit.fail();
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cscStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[]{"messageId1", true, 35.75});
inputHandler.send(new Object[]{"messageId1", true, 35.75});
inputHandler.send(new Object[]{"messageId1", true, 35.75});
Thread.sleep(2000);
AssertJUnit.assertEquals(1, count);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
@Test(dependsOnMethods = "testOrAggregatorTrueOnlyScenario")
public void testOrAggregatorFalseOnlyScenario() throws InterruptedException {
log.info("OrAggregator TestCase 2");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "define stream cscStream(messageID string, isFraud bool, price double);";
String query = ("@info(name = 'query1') " +
"from cscStream#window.lengthBatch(4) " +
"select messageID, or(isFraud) as isValidTransaction " +
"group by messageID " +
"insert all events into outputStream;");
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(inStreamDefinition +
query);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
eventArrived = true;
for (Event event : events) {
count++;
switch (count) {
case 1:
AssertJUnit.assertEquals(false, event.getData(1));
break;
default:
AssertJUnit.fail();
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cscStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[]{"messageId1", false, 35.75});
inputHandler.send(new Object[]{"messageId1", false, 35.75});
inputHandler.send(new Object[]{"messageId1", false, 35.75});
inputHandler.send(new Object[]{"messageId1", false, 35.75});
Thread.sleep(2000);
AssertJUnit.assertEquals(1, count);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
@Test(dependsOnMethods = "testOrAggregatorFalseOnlyScenario")
public void testOrAggregatorTrueFalseScenario() throws InterruptedException {
log.info("OrAggregator TestCase 3");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "define stream cscStream(messageID string, isFraud bool, price double);";
String query = ("@info(name = 'query1') " +
"from cscStream#window.lengthBatch(4) " +
"select messageID, or(isFraud) as isValidTransaction " +
"group by messageID " +
"insert all events into outputStream;");
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(inStreamDefinition +
query);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
eventArrived = true;
for (Event event : events) {
count++;
switch (count) {
case 1:
AssertJUnit.assertEquals(true, event.getData(1));
break;
default:
AssertJUnit.fail();
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cscStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[]{"messageId1", false, 35.75});
inputHandler.send(new Object[]{"messageId1", true, 35.75});
inputHandler.send(new Object[]{"messageId1", false, 35.75});
inputHandler.send(new Object[]{"messageId1", true, 35.75});
Thread.sleep(2000);
Thread.sleep(300);
AssertJUnit.assertEquals(1, count);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
@Test(dependsOnMethods = "testOrAggregatorTrueFalseScenario")
public void testORAggregatorMoreEventsBatchScenario() throws InterruptedException {
log.info("AndAggregator TestCase 4");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "define stream cscStream(messageID string, isFraud bool, price double);";
String query = ("@info(name = 'query1') " +
"from cscStream#window.lengthBatch(2) " +
"select messageID, or(isFraud) as isValidTransaction " +
"group by messageID " +
"insert all events into outputStream;");
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(inStreamDefinition +
query);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
eventArrived = true;
for (Event event : events) {
count++;
switch (count) {
case 1:
AssertJUnit.assertEquals(false, event.getData(1));
break;
case 2:
AssertJUnit.assertEquals(true, event.getData(1));
break;
default:
AssertJUnit.fail();
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cscStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[]{"messageId1", false, 35.75});
inputHandler.send(new Object[]{"messageId1", false, 35.75});
inputHandler.send(new Object[]{"messageId1", true, 35.75});
inputHandler.send(new Object[]{"messageId1", true, 35.75});
Thread.sleep(2000);
AssertJUnit.assertEquals(2, count);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.inject.multibindings.Multibinder;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.AbstractQueryTestCase;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPoolModule;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState;
import static org.hamcrest.Matchers.equalTo;
public abstract class BaseAggregationTestCase<AB extends AggregatorBuilder<AB>> extends ESTestCase {
protected static final String STRING_FIELD_NAME = "mapped_string";
protected static final String INT_FIELD_NAME = "mapped_int";
protected static final String DOUBLE_FIELD_NAME = "mapped_double";
protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean";
protected static final String DATE_FIELD_NAME = "mapped_date";
protected static final String OBJECT_FIELD_NAME = "mapped_object";
protected static final String[] mappedFieldNames = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME,
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME};
private static Injector injector;
private static Index index;
private static String[] currentTypes;
protected static String[] getCurrentTypes() {
return currentTypes;
}
private static NamedWriteableRegistry namedWriteableRegistry;
protected static AggregatorParsers aggParsers;
protected static IndicesQueriesRegistry queriesRegistry;
protected static ParseFieldMatcher parseFieldMatcher;
protected abstract AB createTestAggregatorBuilder();
/**
* Setup for the whole base test class.
*/
@BeforeClass
public static void init() throws IOException {
// we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually.
Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT);
Settings settings = Settings.builder()
.put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
.build();
namedWriteableRegistry = new NamedWriteableRegistry();
index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
final ThreadPool threadPool = new ThreadPool(settings);
final ClusterService clusterService = createClusterService(threadPool);
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
SettingsModule settingsModule = new SettingsModule(settings);
settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
ScriptModule scriptModule = new ScriptModule() {
@Override
protected void configure() {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
// no file watching, so we don't need a
// ResourceWatcherService
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
MockScriptEngine mockScriptEngine = new MockScriptEngine();
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
multibinder.addBinding().toInstance(mockScriptEngine);
Set<ScriptEngineService> engines = new HashSet<>();
engines.add(mockScriptEngine);
List<ScriptContext.Plugin> customContexts = new ArrayList<>();
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections
.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.TYPES)));
bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
bind(ScriptSettings.class).toInstance(scriptSettings);
try {
ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null,
scriptEngineRegistry, scriptContextRegistry, scriptSettings);
bind(ScriptService.class).toInstance(scriptService);
} catch (IOException e) {
throw new IllegalStateException("error while binding ScriptService", e);
}
}
};
scriptModule.prepareSettings(settingsModule);
injector = new ModulesBuilder().add(
new EnvironmentModule(new Environment(settings)),
settingsModule,
new ThreadPoolModule(threadPool),
scriptModule,
new IndicesModule() {
@Override
protected void configure() {
bindMapperExtension();
}
}, new SearchModule(settings, namedWriteableRegistry) {
@Override
protected void configureSearch() {
// Skip me
}
},
new IndexSettingsModule(index, settings),
new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toProvider(Providers.of(clusterService));
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
}
}
).createInjector();
aggParsers = injector.getInstance(AggregatorParsers.class);
//create some random type with some default field, those types will stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) {
String type = randomAsciiOfLengthBetween(1, 10);
currentTypes[i] = type;
}
queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
@AfterClass
public static void afterClass() throws Exception {
injector.getInstance(ClusterService.class).close();
terminate(injector.getInstance(ThreadPool.class));
injector = null;
index = null;
aggParsers = null;
currentTypes = null;
namedWriteableRegistry = null;
}
/**
* Generic test that creates new AggregatorFactory from the test
* AggregatorFactory and checks both for equality and asserts equality on
* the two queries.
*/
public void testFromXContent() throws IOException {
AB testAgg = createTestAggregatorBuilder();
AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder().addAggregator(testAgg);
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet());
XContentParser parser = XContentFactory.xContent(shuffled.bytes()).createParser(shuffled.bytes());
QueryParseContext parseContext = new QueryParseContext(queriesRegistry);
parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.name, parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.type.name(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregatorBuilder<?> newAgg = aggParsers.parser(testAgg.getType(), ParseFieldMatcher.STRICT).parse(testAgg.name, parseContext);
assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
assertNotNull(newAgg);
assertNotSame(newAgg, testAgg);
assertEquals(testAgg, newAgg);
assertEquals(testAgg.hashCode(), newAgg.hashCode());
}
/**
* Test serialization and deserialization of the test AggregatorFactory.
*/
public void testSerialization() throws IOException {
AB testAgg = createTestAggregatorBuilder();
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.writeAggregatorBuilder(testAgg);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
AggregatorBuilder deserialized = in.readAggregatorBuilder();
assertEquals(testAgg, deserialized);
assertEquals(testAgg.hashCode(), deserialized.hashCode());
assertNotSame(testAgg, deserialized);
}
}
}
public void testEqualsAndHashcode() throws IOException {
AB firstAgg = createTestAggregatorBuilder();
assertFalse("aggregation is equal to null", firstAgg.equals(null));
assertFalse("aggregation is equal to incompatible type", firstAgg.equals(""));
assertTrue("aggregation is not equal to self", firstAgg.equals(firstAgg));
assertThat("same aggregation's hashcode returns different values if called multiple times", firstAgg.hashCode(),
equalTo(firstAgg.hashCode()));
AB secondQuery = copyAggregation(firstAgg);
assertTrue("aggregation is not equal to self", secondQuery.equals(secondQuery));
assertTrue("aggregation is not equal to its copy", firstAgg.equals(secondQuery));
assertTrue("equals is not symmetric", secondQuery.equals(firstAgg));
assertThat("aggregation copy's hashcode is different from original hashcode", secondQuery.hashCode(), equalTo(firstAgg.hashCode()));
AB thirdQuery = copyAggregation(secondQuery);
assertTrue("aggregation is not equal to self", thirdQuery.equals(thirdQuery));
assertTrue("aggregation is not equal to its copy", secondQuery.equals(thirdQuery));
assertThat("aggregation copy's hashcode is different from original hashcode", secondQuery.hashCode(),
equalTo(thirdQuery.hashCode()));
assertTrue("equals is not transitive", firstAgg.equals(thirdQuery));
assertThat("aggregation copy's hashcode is different from original hashcode", firstAgg.hashCode(), equalTo(thirdQuery.hashCode()));
assertTrue("equals is not symmetric", thirdQuery.equals(secondQuery));
assertTrue("equals is not symmetric", thirdQuery.equals(firstAgg));
}
// we use the streaming infra to create a copy of the query provided as
// argument
private AB copyAggregation(AB agg) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
agg.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
@SuppressWarnings("unchecked")
AB secondAgg = (AB) namedWriteableRegistry.getReader(AggregatorBuilder.class, agg.getWriteableName()).read(in);
return secondAgg;
}
}
}
protected String[] getRandomTypes() {
String[] types;
if (currentTypes.length > 0 && randomBoolean()) {
int numberOfQueryTypes = randomIntBetween(1, currentTypes.length);
types = new String[numberOfQueryTypes];
for (int i = 0; i < numberOfQueryTypes; i++) {
types[i] = randomFrom(currentTypes);
}
} else {
if (randomBoolean()) {
types = new String[]{MetaData.ALL};
} else {
types = new String[0];
}
}
return types;
}
public String randomNumericField() {
int randomInt = randomInt(3);
switch (randomInt) {
case 0:
return DATE_FIELD_NAME;
case 1:
return DOUBLE_FIELD_NAME;
case 2:
default:
return INT_FIELD_NAME;
}
}
}
| |
package com.rehivetech.beeeon.gui.fragment;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.content.ContextCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import com.avast.android.dialogs.fragment.SimpleDialogFragment;
import com.github.mikephil.charting.charts.BarChart;
import com.github.mikephil.charting.charts.BarLineChartBase;
import com.github.mikephil.charting.charts.LineChart;
import com.github.mikephil.charting.components.XAxis;
import com.github.mikephil.charting.components.YAxis;
import com.github.mikephil.charting.data.BarData;
import com.github.mikephil.charting.data.BarDataSet;
import com.github.mikephil.charting.data.BarEntry;
import com.github.mikephil.charting.data.DataSet;
import com.github.mikephil.charting.data.LineData;
import com.github.mikephil.charting.data.LineDataSet;
import com.rehivetech.beeeon.R;
import com.rehivetech.beeeon.controller.Controller;
import com.rehivetech.beeeon.gcm.analytics.GoogleAnalyticsManager;
import com.rehivetech.beeeon.gui.activity.ModuleGraphActivity;
import com.rehivetech.beeeon.gui.view.GraphSettings;
import com.rehivetech.beeeon.gui.view.ModuleGraphMarkerView;
import com.rehivetech.beeeon.household.device.Device;
import com.rehivetech.beeeon.household.device.Module;
import com.rehivetech.beeeon.household.device.ModuleLog;
import com.rehivetech.beeeon.household.device.values.BaseValue;
import com.rehivetech.beeeon.household.device.values.EnumValue;
import com.rehivetech.beeeon.persistence.GraphSettingsPersistence;
import com.rehivetech.beeeon.util.ChartHelper;
import com.rehivetech.beeeon.util.TimeHelper;
import com.rehivetech.beeeon.util.UnitsHelper;
import com.rehivetech.beeeon.util.Utils;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import timber.log.Timber;
/**
* @author martin on 18.8.2015.
*/
public class ModuleGraphFragment extends BaseApplicationFragment implements ModuleGraphActivity.ChartSettingListener {
private static final String KEY_GATE_ID = "gate_id";
private static final String KEY_DEVICE_ID = "device_id";
private static final String KEY_MODULE_ID = "module_id";
private static final String KEY_DATA_RANGE = "data_range";
private String mGateId;
private String mDeviceId;
private String mModuleId;
private @ChartHelper.DataRange int mRange;
private ModuleGraphActivity mActivity;
@BindView(R.id.module_graph_layout)
RelativeLayout mRootLayout;
private UnitsHelper mUnitsHelper;
private TimeHelper mTimeHelper;
private DateTimeFormatter mFormatter;
private BarLineChartBase mChart;
private DataSet mDataSetMin;
private DataSet mDataSetAvg;
private DataSet mDataSetMax;
private StringBuffer mYlabels = new StringBuffer();
private boolean mCheckboxMin;
private boolean mCheckboxAvg;
private boolean mCheckboxMax;
private int mSliderProgress;
private ChartHelper.ChartLoadListener mChartLoadCallback = new ChartHelper.ChartLoadListener() {
@Override
public void onChartLoaded(DataSet dataSet, List<String> xValues) {
if (dataSet instanceof BarDataSet) {
BarData data = ((BarChart) mChart).getBarData() == null ? new BarData(xValues) : ((BarChart) mChart).getBarData();
if (dataSet.getYVals().size() < 2 && ((BarChart) mChart).getBarData() == null) {
mChart.setNoDataText(getString(R.string.chart_helper_chart_no_data));
mChart.invalidate();
return;
}
data.addDataSet((BarDataSet) dataSet);
((BarChart) mChart).setData(data);
} else {
LineData data = ((LineChart) mChart).getLineData() == null ? new LineData(xValues) : ((LineChart) mChart).getLineData();
if (dataSet.getYVals().size() < 2 && ((LineChart) mChart).getLineData() == null) {
mChart.setNoDataText(getString(R.string.chart_helper_chart_no_data));
mChart.invalidate();
return;
}
data.addDataSet((LineDataSet) dataSet);
((LineChart) mChart).setData(data);
}
ChartHelper.setDataSetCircles(dataSet, mChart.getViewPortHandler(), mChart.getData().getYValCount(), getResources().getInteger(R.integer.graph_number_circles));
ChartHelper.setDrawDataSetValues(dataSet, mChart.getViewPortHandler(), mChart.getXValCount(), getResources().getInteger(R.integer.graph_values_count));
mChart.invalidate();
mActivity.setRequestRedrawActiveFragmentCalled(false);
Timber.d("dataSet added: %s", dataSet.getLabel());
}
};
public static ModuleGraphFragment newInstance(String gateId, String deviceId, String moduleId, @ChartHelper.DataRange int range) {
Bundle args = new Bundle();
args.putString(KEY_GATE_ID, gateId);
args.putString(KEY_DEVICE_ID, deviceId);
args.putString(KEY_MODULE_ID, moduleId);
args.putInt(KEY_DATA_RANGE, range);
ModuleGraphFragment fragment = new ModuleGraphFragment();
fragment.setArguments(args);
return fragment;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
mActivity = (ModuleGraphActivity) getActivity();
} catch (ClassCastException e) {
throw new ClassCastException("Must be instance of ModuleGraphActivity");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle args = getArguments();
mGateId = args.getString(KEY_GATE_ID);
mDeviceId = args.getString(KEY_DEVICE_ID);
mModuleId = args.getString(KEY_MODULE_ID);
//noinspection ResourceType
mRange = args.getInt(KEY_DATA_RANGE);
// UserSettings can be null when user is not logged in!
Controller controller = Controller.getInstance(mActivity);
SharedPreferences prefs = controller.getUserSettings();
mUnitsHelper = Utils.getUnitsHelper(prefs, mActivity);
mTimeHelper = Utils.getTimeHelper(prefs);
// TODO do as Utils.getTimeHelper()
mFormatter = mTimeHelper != null
? mTimeHelper.getFormatter(ChartHelper.GRAPH_DATE_TIME_FORMAT, controller.getGatesModel().getGate(mGateId))
: DateTimeFormat.forPattern(ChartHelper.GRAPH_DATE_TIME_FORMAT).withZone(DateTimeZone.getDefault());
GraphSettingsPersistence persistence = controller.getGraphSettingsPersistence(mGateId, Utils.getAbsoluteModuleId(mDeviceId, mModuleId), mRange);
mCheckboxMin = persistence.restoreCheckboxValue(GraphSettingsPersistence.CHECKBOX_MIN, false);
mCheckboxAvg = persistence.restoreCheckboxValue(GraphSettingsPersistence.CHECKBOX_AVG, true);
mCheckboxMax = persistence.restoreCheckboxValue(GraphSettingsPersistence.CHECKBOX_MAX, false);
mSliderProgress = persistence.restoreSliderValue(0);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_module_graph, container, false);
mUnbinder = ButterKnife.bind(this, view);
mActivity.setShowLegendButtonOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
SimpleDialogFragment.createBuilder(mActivity, getFragmentManager())
.setTitle(getString(R.string.chart_helper_chart_y_axis))
.setMessage(mYlabels.toString())
.setNeutralButtonText("close")
.show();
}
});
return view;
}
@Override
public void onResume() {
super.onResume();
GoogleAnalyticsManager.getInstance().logScreen(GoogleAnalyticsManager.MODULE_GRAPH_DETAIL_SCREEN);
Device device = Controller.getInstance(mActivity).getDevicesModel().getDevice(mGateId, mDeviceId);
if (device == null) {
Timber.e("Device #%s does not exists", mDeviceId);
mActivity.finish();
}
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
addGraphView();
}
@Override
public void onStop() {
super.onStop();
GraphSettingsPersistence persistence = Controller.getInstance(mActivity).getGraphSettingsPersistence(mGateId, Utils.getAbsoluteModuleId(mDeviceId, mModuleId), mRange);
persistence.saveCheckBoxesStates(mCheckboxMin, mCheckboxAvg, mCheckboxMax);
persistence.saveSliderValue(mSliderProgress);
}
private void addGraphView() {
Controller controller = Controller.getInstance(mActivity);
Module module = controller.getDevicesModel().getDevice(mGateId, mDeviceId).getModuleById(mModuleId);
BaseValue baseValue = module.getValue();
boolean barchart = baseValue instanceof EnumValue;
String deviceName = module.getDevice().getName(mActivity);
String moduleName = module.getName(mActivity);
//set chart
String unit = mUnitsHelper.getStringUnit(baseValue);
mYlabels = new StringBuffer();
if (barchart) {
mChart = new BarChart(mActivity);
ChartHelper.prepareChart(mChart, mActivity, baseValue, mYlabels, null, false, true);
} else {
mChart = new LineChart(mActivity);
ModuleGraphMarkerView markerView = new ModuleGraphMarkerView(mActivity, R.layout.util_chart_module_markerview, (LineChart) mChart, module);
ChartHelper.prepareChart(mChart, mActivity, baseValue, mYlabels, markerView, false, true);
}
mChart.setLayoutParams(new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
mRootLayout.addView(mChart);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mChart.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
// prepare axis bottom
ChartHelper.prepareXAxis(mActivity, mChart.getXAxis(), null, XAxis.XAxisPosition.BOTTOM, false);
//prepare axis left
ChartHelper.prepareYAxis(mActivity, module.getValue(), mChart.getAxisLeft(), null, YAxis.YAxisLabelPosition.OUTSIDE_CHART, true, false, 5);
//disable right axis
mChart.getAxisRight().setEnabled(false);
mChart.setDrawBorders(false);
String dataSetMinName = String.format("%s - %s min", deviceName, moduleName);
String dataSetAvgName = String.format("%s - %s avg", deviceName, moduleName);
String dataSetMaxName = String.format("%s - %s max", deviceName, moduleName);
if (barchart) {
mDataSetMin = new BarDataSet(new ArrayList<BarEntry>(), dataSetMinName);
mDataSetAvg = new BarDataSet(new ArrayList<BarEntry>(), dataSetAvgName);
mDataSetMax = new BarDataSet(new ArrayList<BarEntry>(), dataSetMaxName);
((BarDataSet) mDataSetMin).setBarSpacePercent(0);
((BarDataSet) mDataSetAvg).setBarSpacePercent(0);
((BarDataSet) mDataSetMax).setBarSpacePercent(0);
} else {
mDataSetMin = new LineDataSet(new ArrayList<com.github.mikephil.charting.data.Entry>(), dataSetMinName);
mDataSetAvg = new LineDataSet(new ArrayList<com.github.mikephil.charting.data.Entry>(), dataSetAvgName);
mDataSetMax = new LineDataSet(new ArrayList<com.github.mikephil.charting.data.Entry>(), dataSetMaxName);
// mShowLegendButton.setVisibility(View.GONE);
}
//set dataset style
ChartHelper.prepareDataSet(mActivity, baseValue, mDataSetAvg, barchart, true, Utils.getGraphColor(mActivity, 0), ContextCompat.getColor(mActivity, R.color.beeeon_accent), true);
ChartHelper.prepareDataSet(mActivity, baseValue, mDataSetMin, barchart, true, Utils.getGraphColor(mActivity, 1), ContextCompat.getColor(mActivity, R.color.beeeon_accent), true);
ChartHelper.prepareDataSet(mActivity, baseValue, mDataSetMax, barchart, true, Utils.getGraphColor(mActivity, 2), ContextCompat.getColor(mActivity, R.color.beeeon_accent), true);
}
@Override
public void onChartSettingChanged(boolean drawMin, boolean drawAvg, boolean drawMax, ModuleLog.DataInterval dataGranularity, int sliderProgress) {
mCheckboxMin = drawMin;
mCheckboxAvg = drawAvg;
mCheckboxMax = drawMax;
mSliderProgress = sliderProgress;
mChart.clear();
mChart.setNoDataText(getString(R.string.chart_helper_chart_loading));
mDataSetMin.clear();
mDataSetAvg.clear();
mDataSetMax.clear();
if (drawMax) {
ChartHelper.loadChartData(mActivity, mDataSetMax, mGateId, mDeviceId, mModuleId, mRange,
ModuleLog.DataType.MAXIMUM, dataGranularity, mChartLoadCallback, mFormatter);
}
if (drawAvg) {
ChartHelper.loadChartData(mActivity, mDataSetAvg, mGateId, mDeviceId, mModuleId, mRange,
ModuleLog.DataType.AVERAGE, dataGranularity, mChartLoadCallback, mFormatter);
}
if (drawMin) {
ChartHelper.loadChartData(mActivity, mDataSetMin, mGateId, mDeviceId, mModuleId, mRange,
ModuleLog.DataType.MINIMUM, dataGranularity, mChartLoadCallback, mFormatter);
}
}
@Override
public GraphSettings onFragmentChange(GraphSettings settings) {
initGraphSetting(settings);
ModuleLog.DataInterval interval = settings.getIntervalByProgress();
onChartSettingChanged(mCheckboxMin, mCheckboxAvg, mCheckboxMax, interval, mSliderProgress);
return settings;
}
private void initGraphSetting(GraphSettings settings) {
int sliderMin = 0;
int sliderMax = 8;
switch (mRange) {
case ChartHelper.RANGE_HOUR:
sliderMax = 5;
break;
case ChartHelper.RANGE_DAY:
sliderMax = 6;
break;
case ChartHelper.RANGE_WEEK:
sliderMin = 2;
sliderMax = 7;
break;
case ChartHelper.RANGE_MONTH:
sliderMin = 5;
sliderMax = 8;
break;
}
settings.initGraphSettings(mCheckboxMin, mCheckboxAvg, mCheckboxMax, sliderMin, sliderMax, mSliderProgress);
}
}
| |
package Jetstorm.Enterprise.Entities;
import java.awt.Rectangle;
import java.util.Random;
import Jetstorm.Enterprise.Handlers.Animation;
import Jetstorm.Enterprise.Main.WindowClass;
import Jetstorm.Enterprise.TileMap.Tile;
import Jetstorm.Enterprise.TileMap.TileMap;
public class MapObject {
protected TileMap tileMap;
protected int tileSize;
protected double xmap;
protected double ymap;
protected double x;
protected double y;
protected double dx;
protected double dy;
protected double originX;
protected double originY;
protected int width;
protected int height;
protected int cwidth;
protected int cheight;
protected int currRow;
protected int currCol;
protected double xdest;
protected double ydest;
protected double xtemp;
protected double ytemp;
protected boolean topLeft;
protected boolean topRight;
protected boolean bottomLeft;
protected boolean bottomRight;
protected boolean left;
protected boolean right;
protected boolean up;
protected boolean down;
protected double moveWaitTime;
protected double aiWaitTime;
Random ran = new Random();
protected Animation animation;
protected int currentAction;
protected int previouseAction;
public MapObject(TileMap tm) {
tileMap = tm;
tileSize = tm.getTileSize();
animation = new Animation();
}
public boolean intersects(MapObject o) {
Rectangle r1 = getRectangle();
Rectangle r2 = o.getRectangle();
return r1.intersects(r2);
}
public boolean contains(MapObject o) {
Rectangle r1 = getRectangle();
Rectangle r2 = o.getRectangle();
return r1.contains(r2);
}
public Rectangle getRectangle() {
return new Rectangle((int) x - cwidth, (int) y - cheight, cwidth, cheight);
}
public void calculateCorners(double x, double y) {
int leftTile = (int) (x - cwidth / 2) / tileSize;
int rightTile = (int) (x + cwidth / 2 - 1) / tileSize;
int topTile = (int) (y - cheight / 2) / tileSize;
int bottomTile = (int) (y + cheight / 2 - 1) / tileSize;
if (topTile < 0 || bottomTile >= tileMap.getNumRows() || leftTile < 0 || rightTile >= tileMap.getNumCols()) {
topLeft = topRight = bottomLeft = bottomRight = false;
return;
}
int tl = tileMap.getType(topTile, leftTile);
int tr = tileMap.getType(topTile, rightTile);
int bl = tileMap.getType(bottomTile, leftTile);
int br = tileMap.getType(bottomTile, rightTile);
topLeft = tl == Tile.BLOCKED;
topRight = tr == Tile.BLOCKED;
bottomLeft = bl == Tile.BLOCKED;
bottomRight = br == Tile.BLOCKED;
}
public void checkTileMapCollision() {
currCol = (int) x / tileSize;
currRow = (int) y / tileSize;
xdest = x + dx;
ydest = y + dy;
xtemp = x;
ytemp = y;
calculateCorners(x, ydest);
if (dy < 0) {
if (topLeft || topRight) {
dy = 0;
ytemp = currRow * tileSize + cheight / 2;
} else {
ytemp += dy;
}
}
if (dy > 0) {
if (bottomLeft || bottomRight) {
dy = 0;
ytemp = (currRow + 1) * tileSize - cheight / 2;
} else {
ytemp += dy;
}
}
calculateCorners(xdest, y);
if (dx < 0) {
if (topLeft || bottomLeft) {
dx = 0;
xtemp = currCol * tileSize + cwidth / 2;
} else {
xtemp += dx;
}
}
if (dx > 0) {
if (topRight || bottomRight) {
dx = 0;
xtemp = (currCol + 1) * tileSize - cwidth / 2;
} else {
xtemp += dx;
}
}
}
public int getx() {
return (int) x;
}
public int gety() {
return (int) y;
}
public double getX() {
return x;
}
public double getY() {
return y;
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public int getCWidth() {
return cwidth;
}
public int getCHeight() {
return cheight;
}
public void setPosition(double x, double y) {
originX = x;
originY = y;
this.x = x + (WindowClass.WIDTH / 2) - 7;
this.y = y + (WindowClass.HEIGHT / 2) - 2;
}
public void setNPCPosition(double x, double y) {
originX = x;
originY = y;
this.x = x + (WindowClass.WIDTH / 2);
this.y = y + (WindowClass.HEIGHT / 2) - 6;
createNPCArea();
}
public void setLeft(boolean b) {
left = b;
}
public void setRight(boolean b) {
right = b;
}
public void setUp(boolean b) {
up = b;
}
public void setDown(boolean b) {
down = b;
}
public void createSingleArea() {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2, 16, 16);
}
public void createNPCArea() {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2, 16, 16);
}
public void createBuildingArea(String type) {
if (type.equalsIgnoreCase("pokemonCenter1")) {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 - 16, 80, 48);
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 - 64, 32, 16);
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() - 33,
(int) -originY + tileMap.getYOffSet() + 2 - 64, 32, 16);
} else if (type.equalsIgnoreCase("corperate1")) {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 - 144, 112, 80);
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 - 224, 48, 16);
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() - 49,
(int) -originY + tileMap.getYOffSet() + 2 - 224, 48, 16);
}
}
public void createDeskArea(String type) {
if (type.equalsIgnoreCase("fooddesk")) {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 + 8, 128, 96);
} else if (type.equalsIgnoreCase("pokemoncenterdesk")) {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 + 8, 128, 48);
} else if (type.equalsIgnoreCase("pokemonmartdesk")) {
} else if (type.equalsIgnoreCase("pokemonhelpdesk")) {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 + 8, 160, 48);
} else if (type.equalsIgnoreCase("pokemonbasedesk")) {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 + 8, 128, 48);
} else if (type.equalsIgnoreCase("pokemonmartdesk2")) {
tileMap.createBlockedArea((int) (-originX - 16) + tileMap.getXOffSet() + 15,
(int) -originY + tileMap.getYOffSet() + 2 - 4, 224, 32);
}
}
public void draw(java.awt.Graphics2D g) {
g.drawImage(animation.getImage(), (int) (x + xmap - width / 2), (int) (y + ymap - height / 2), null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators;
import java.io.IOException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.apache.pig.PigException;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.plan.OperatorKey;
import org.apache.pig.impl.plan.VisitorException;
import org.apache.pig.impl.streaming.ExecutableManager;
import org.apache.pig.impl.streaming.StreamingCommand;
import org.apache.pig.impl.util.IdentityHashSet;
import org.apache.pig.pen.util.ExampleTuple;
import org.apache.pig.pen.util.LineageTracer;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor;
import org.apache.pig.data.Tuple;
public class POStream extends PhysicalOperator {
private static final long serialVersionUID = 2L;
private static final Result EOP_RESULT = new Result(POStatus.STATUS_EOP, null);
private String executableManagerStr; // String representing ExecutableManager to use
transient private ExecutableManager executableManager; // ExecutableManager to use
private StreamingCommand command; // Actual command to be run
private Properties properties;
protected boolean initialized = false;
protected BlockingQueue<Result> binaryOutputQueue = new ArrayBlockingQueue<Result>(1);
protected BlockingQueue<Result> binaryInputQueue = new ArrayBlockingQueue<Result>(1);
protected boolean allInputFromPredecessorConsumed = false;
protected boolean allOutputFromBinaryProcessed = false;
public POStream(OperatorKey k, ExecutableManager executableManager,
StreamingCommand command, Properties properties) {
super(k);
this.executableManagerStr = executableManager.getClass().getName();
this.command = command;
this.properties = properties;
// Setup streaming-specific properties
if (command.getShipFiles()) {
parseShipCacheSpecs(command.getShipSpecs(),
properties, "pig.streaming.ship.files");
}
parseShipCacheSpecs(command.getCacheSpecs(),
properties, "pig.streaming.cache.files");
}
private static void parseShipCacheSpecs(List<String> specs,
Properties properties, String property) {
String existingValue = properties.getProperty(property, "");
if (specs == null || specs.size() == 0) {
return;
}
// Setup streaming-specific properties
StringBuffer sb = new StringBuffer();
Iterator<String> i = specs.iterator();
// first append any existing value
if(!existingValue.equals("")) {
sb.append(existingValue);
if (i.hasNext()) {
sb.append(", ");
}
}
while (i.hasNext()) {
sb.append(i.next());
if (i.hasNext()) {
sb.append(", ");
}
}
properties.setProperty(property, sb.toString());
}
public Properties getShipCacheProperties() {
return properties;
}
/**
* Get the {@link StreamingCommand} for this <code>StreamSpec</code>.
* @return the {@link StreamingCommand} for this <code>StreamSpec</code>
*/
public StreamingCommand getCommand() {
return command;
}
/* (non-Javadoc)
* @see org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator#getNext(org.apache.pig.data.Tuple)
*/
@Override
public Result getNext(Tuple t) throws ExecException {
// The POStream Operator works with ExecutableManager to
// send input to the streaming binary and to get output
// from it. To achieve a tuple oriented behavior, two queues
// are used - one for output from the binary and one for
// input to the binary. In each getNext() call:
// 1) If there is no more output expected from the binary, an EOP is
// sent to successor
// 2) If there is any output from the binary in the queue, it is passed
// down to the successor
// 3) if neither of these two are true and if it is possible to
// send input to the binary, then the next tuple from the
// predecessor is got and passed to the binary
try {
// if we are being called AFTER all output from the streaming
// binary has already been sent to us then just return EOP
// The "allOutputFromBinaryProcessed" flag is set when we see
// an EOS (End of Stream output) from streaming binary
if(allOutputFromBinaryProcessed) {
return new Result(POStatus.STATUS_EOP, null);
}
// if we are here AFTER all map() calls have been completed
// AND AFTER we process all possible input to be sent to the
// streaming binary, then all we want to do is read output from
// the streaming binary
if(allInputFromPredecessorConsumed) {
Result r = binaryOutputQueue.take();
if(r.returnStatus == POStatus.STATUS_EOS) {
// If we received EOS, it means all output
// from the streaming binary has been sent to us
// So we can send an EOP to the successor in
// the pipeline. Also since we are being called
// after all input from predecessor has been processed
// it means we got here from a call from close() in
// map or reduce. So once we send this EOP down,
// getNext() in POStream should never be called. So
// we don't need to set any flag noting we saw all output
// from binary
r = EOP_RESULT;
} else if (r.returnStatus == POStatus.STATUS_OK)
illustratorMarkup(r.result, r.result, 0);
return(r);
}
// if we are here, we haven't consumed all input to be sent
// to the streaming binary - check if we are being called
// from close() on the map or reduce
if(this.parentPlan.endOfAllInput) {
Result r = getNextHelper(t);
if(r.returnStatus == POStatus.STATUS_EOP) {
// we have now seen *ALL* possible input
// check if we ever had any real input
// in the course of the map/reduce - if we did
// then "initialized" will be true. If not, just
// send EOP down.
if(initialized) {
// signal End of ALL input to the Executable Manager's
// Input handler thread
binaryInputQueue.put(r);
// note this state for future calls
allInputFromPredecessorConsumed = true;
// look for output from binary
r = binaryOutputQueue.take();
if(r.returnStatus == POStatus.STATUS_EOS) {
// If we received EOS, it means all output
// from the streaming binary has been sent to us
// So we can send an EOP to the successor in
// the pipeline. Also since we are being called
// after all input from predecessor has been processed
// it means we got here from a call from close() in
// map or reduce. So once we send this EOP down,
// getNext() in POStream should never be called. So
// we don't need to set any flag noting we saw all output
// from binary
r = EOP_RESULT;
}
}
} else if(r.returnStatus == POStatus.STATUS_EOS) {
// If we received EOS, it means all output
// from the streaming binary has been sent to us
// So we can send an EOP to the successor in
// the pipeline. Also we are being called
// from close() in map or reduce (this is so because
// only then this.parentPlan.endOfAllInput is true).
// So once we send this EOP down, getNext() in POStream
// should never be called. So we don't need to set any
// flag noting we saw all output from binary
r = EOP_RESULT;
} else if (r.returnStatus == POStatus.STATUS_OK)
illustratorMarkup(r.result, r.result, 0);
return r;
} else {
// we are not being called from close() - so
// we must be called from either map() or reduce()
// get the next Result from helper
Result r = getNextHelper(t);
if(r.returnStatus == POStatus.STATUS_EOS) {
// If we received EOS, it means all output
// from the streaming binary has been sent to us
// So we can send an EOP to the successor in
// the pipeline and also note this condition
// for future calls
r = EOP_RESULT;
allOutputFromBinaryProcessed = true;
} else if (r.returnStatus == POStatus.STATUS_OK)
illustratorMarkup(r.result, r.result, 0);
return r;
}
} catch(Exception e) {
int errCode = 2083;
String msg = "Error while trying to get next result in POStream.";
throw new ExecException(msg, errCode, PigException.BUG, e);
}
}
public Result getNextHelper(Tuple t) throws ExecException {
try {
synchronized(this) {
while(true) {
// if there is something in binary output Queue
// return it
if(!binaryOutputQueue.isEmpty()) {
Result res = binaryOutputQueue.take();
return res;
}
// check if we can write tuples to
// input of the process
if(binaryInputQueue.remainingCapacity() > 0) {
Result input = processInput();
if(input.returnStatus == POStatus.STATUS_EOP ||
input.returnStatus == POStatus.STATUS_ERR) {
return input;
} else {
// we have a tuple to send as input
// Only when we see the first tuple which can
// be sent as input to the binary we want
// to initialize the ExecutableManager and set
// up the streaming binary - this is required in
// Unions due to a JOIN where there may never be
// any input to send to the binary in one of the map
// tasks - so we initialize only if we have to.
// initialize the ExecutableManager once
if(!initialized) {
// set up the executableManager
executableManager =
(ExecutableManager)PigContext.instantiateFuncFromSpec(executableManagerStr);
try {
executableManager.configure(this);
executableManager.run();
} catch (IOException ioe) {
int errCode = 2084;
String msg = "Error while running streaming binary.";
throw new ExecException(msg, errCode, PigException.BUG, ioe);
}
initialized = true;
}
// send this input to the streaming
// process
binaryInputQueue.put(input);
}
} else {
// wait for either input to be available
// or output to be consumed
while(binaryOutputQueue.isEmpty() && !binaryInputQueue.isEmpty())
wait();
}
}
}
} catch (Exception e) {
int errCode = 2083;
String msg = "Error while trying to get next result in POStream.";
throw new ExecException(msg, errCode, PigException.BUG, e);
}
}
public String toString() {
return getAliasString() + "POStream" + "[" + command.toString() + "]"
+ " - " + mKey.toString();
}
@Override
public void visit(PhyPlanVisitor v) throws VisitorException {
v.visitStream(this);
}
@Override
public String name() {
return toString();
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
@Override
public boolean supportsMultipleOutputs() {
return false;
}
/**
*
*/
public void finish() throws IOException {
executableManager.close();
}
/**
* @return the Queue which has input to binary
*/
public BlockingQueue<Result> getBinaryInputQueue() {
return binaryInputQueue;
}
/**
* @return the Queue which has output from binary
*/
public BlockingQueue<Result> getBinaryOutputQueue() {
return binaryOutputQueue;
}
@Override
public Tuple illustratorMarkup(Object in, Object out, int eqClassIndex) {
if(illustrator != null) {
ExampleTuple tIn = (ExampleTuple) in;
illustrator.getEquivalenceClasses().get(eqClassIndex).add(tIn);
illustrator.addData((Tuple) out);
}
return (Tuple) out;
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.forms;
import org.jboss.arquillian.drone.api.annotation.Drone;
import org.jboss.arquillian.graphene.page.Page;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.keycloak.OAuth2Constants;
import org.keycloak.common.Profile;
import org.keycloak.events.Details;
import org.keycloak.events.EventType;
import org.keycloak.models.UserModel;
import org.keycloak.representations.IDToken;
import org.keycloak.representations.idm.EventRepresentation;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.representations.idm.UserRepresentation;
import org.keycloak.testsuite.AssertEvents;
import org.keycloak.testsuite.AbstractTestRealmKeycloakTest;
import org.keycloak.testsuite.arquillian.annotation.DisableFeature;
import org.keycloak.testsuite.drone.Different;
import org.keycloak.testsuite.pages.AccountUpdateProfilePage;
import org.keycloak.testsuite.pages.AppPage;
import org.keycloak.testsuite.pages.AppPage.RequestType;
import org.keycloak.testsuite.pages.LoginPage;
import org.keycloak.testsuite.pages.LoginPasswordUpdatePage;
import org.keycloak.testsuite.util.OAuthClient;
import org.openqa.selenium.WebDriver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
* @author Stan Silvert ssilvert@redhat.com (C) 2016 Red Hat Inc.
*/
public class SSOTest extends AbstractTestRealmKeycloakTest {
@Drone
@Different
protected WebDriver driver2;
@Page
protected AppPage appPage;
@Page
protected LoginPage loginPage;
@Page
protected AccountUpdateProfilePage profilePage;
@Page
protected LoginPasswordUpdatePage updatePasswordPage;
@Rule
public AssertEvents events = new AssertEvents(this);
@Override
public void configureTestRealm(RealmRepresentation testRealm) {
}
@Test
@DisableFeature(value = Profile.Feature.ACCOUNT2, skipRestart = true) // TODO remove this (KEYCLOAK-16228)
public void loginSuccess() {
loginPage.open();
loginPage.login("test-user@localhost", "password");
assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
Assert.assertNotNull(oauth.getCurrentQuery().get(OAuth2Constants.CODE));
EventRepresentation loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
IDToken idToken = sendTokenRequestAndGetIDToken(loginEvent);
Assert.assertEquals("1", idToken.getAcr());
appPage.open();
oauth.openLoginForm();
assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
loginEvent = events.expectLogin().removeDetail(Details.USERNAME).client("test-app").assertEvent();
String sessionId2 = loginEvent.getSessionId();
assertEquals(sessionId, sessionId2);
// acr is 0 as we authenticated through SSO cookie
idToken = sendTokenRequestAndGetIDToken(loginEvent);
Assert.assertEquals("0", idToken.getAcr());
profilePage.open();
assertTrue(profilePage.isCurrent());
// Expire session
testingClient.testing().removeUserSession("test", sessionId);
oauth.doLogin("test-user@localhost", "password");
String sessionId4 = events.expectLogin().assertEvent().getSessionId();
assertNotEquals(sessionId, sessionId4);
events.clear();
}
@Test
public void multipleSessions() {
loginPage.open();
loginPage.login("test-user@localhost", "password");
Assert.assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
Assert.assertNotNull(oauth.getCurrentQuery().get(OAuth2Constants.CODE));
EventRepresentation login1 = events.expectLogin().assertEvent();
try {
//OAuthClient oauth2 = new OAuthClient(driver2);
OAuthClient oauth2 = new OAuthClient();
oauth2.init(driver2);
oauth2.doLogin("test-user@localhost", "password");
EventRepresentation login2 = events.expectLogin().assertEvent();
Assert.assertEquals(RequestType.AUTH_RESPONSE, RequestType.valueOf(driver2.getTitle()));
Assert.assertNotNull(oauth2.getCurrentQuery().get(OAuth2Constants.CODE));
assertNotEquals(login1.getSessionId(), login2.getSessionId());
oauth.openLogout();
events.expectLogout(login1.getSessionId()).assertEvent();
oauth.openLoginForm();
assertTrue(loginPage.isCurrent());
oauth2.openLoginForm();
events.expectLogin().session(login2.getSessionId()).removeDetail(Details.USERNAME).assertEvent();
Assert.assertEquals(RequestType.AUTH_RESPONSE, RequestType.valueOf(driver2.getTitle()));
Assert.assertNotNull(oauth2.getCurrentQuery().get(OAuth2Constants.CODE));
oauth2.openLogout();
events.expectLogout(login2.getSessionId()).assertEvent();
oauth2.openLoginForm();
assertTrue(driver2.getTitle().equals("Sign in to test"));
} finally {
driver2.close();
}
}
@Test
public void loginWithRequiredActionAddedInTheMeantime() {
// SSO login
loginPage.open();
loginPage.login("test-user@localhost", "password");
assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
Assert.assertNotNull(oauth.getCurrentQuery().get(OAuth2Constants.CODE));
EventRepresentation loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
// Add update-profile required action to user now
UserRepresentation user = testRealm().users().get(loginEvent.getUserId()).toRepresentation();
user.getRequiredActions().add(UserModel.RequiredAction.UPDATE_PASSWORD.toString());
testRealm().users().get(loginEvent.getUserId()).update(user);
// Attempt SSO login. update-password form is shown
oauth.openLoginForm();
updatePasswordPage.assertCurrent();
updatePasswordPage.changePassword("password", "password");
events.expectRequiredAction(EventType.UPDATE_PASSWORD).assertEvent();
assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
loginEvent = events.expectLogin().removeDetail(Details.USERNAME).client("test-app").assertEvent();
String sessionId2 = loginEvent.getSessionId();
assertEquals(sessionId, sessionId2);
}
}
| |
package nam.data.src.main.java;
import java.lang.reflect.Modifier;
import java.util.Iterator;
import java.util.List;
import nam.data.DataLayerHelper;
import nam.model.Element;
import nam.model.Field;
import nam.model.ModelLayerHelper;
import nam.model.Unit;
import nam.model.util.ElementUtil;
import nam.model.util.TypeUtil;
import nam.model.util.UnitUtil;
import org.aries.util.NameUtil;
import aries.codegen.AbstractManagementBeanBuilder;
import aries.generation.engine.GenerationContext;
import aries.generation.model.AnnotationUtil;
import aries.generation.model.ModelAnnotation;
import aries.generation.model.ModelClass;
import aries.generation.model.ModelInterface;
import aries.generation.model.ModelOperation;
import aries.generation.model.ModelReference;
/**
* Builds a Repository Bean from an Aries Element or Aries Namespace;
*
* Model construction properties:
* <ul>
* <li>generateJavadoc</li>
* </ul>
*
* @author tfisher
*/
public class RepositoryBeanBuilder extends AbstractManagementBeanBuilder {
private RepositoryBeanProvider provider;
public RepositoryBeanBuilder(GenerationContext context) {
super(context);
initialize();
}
protected void initialize() {
provider = new RepositoryBeanProvider(context);
initialize(provider);
}
/*
* Repository interface creation
* -----------------------------
*/
public ModelInterface buildInterface(Unit unit) throws Exception {
ModelInterface modelInterface = new ModelInterface();
String packageName = DataLayerHelper.getRepositoryPackageName(unit);
String interfaceName = DataLayerHelper.getRepositoryInterfaceName(unit);
String parentInterfaceName = DataLayerHelper.getRepositoryParentInterfaceName(unit);
namespace = unit.getNamespace();
modelInterface.setPackageName(packageName);
modelInterface.setClassName(interfaceName);
if (parentInterfaceName != null)
modelInterface.addExtendedInterface(parentInterfaceName);
modelInterface.setName(NameUtil.uncapName(interfaceName));
initializeInterface(modelInterface, unit);
return modelInterface;
}
// public List<ModelInterface> buildInterfaces(Unit unit) throws Exception {
// List<ModelInterface> modelInterfaces = new ArrayList<ModelInterface>();
// modelInterfaces.addAll(buildInterfaces(unit.getNamespace()));
// modelInterfaces.addAll(buildInterfaces(unit.getNamespace(), unit.getElements()));
// return modelInterfaces;
// }
// public List<ModelInterface> buildInterfaces(List<Namespace> namespaces) throws Exception {
// List<ModelInterface> modelInterfaces = new ArrayList<ModelInterface>();
// Iterator<Namespace> iterator = namespaces.iterator();
// while (iterator.hasNext()) {
// Namespace namespace = iterator.next();
// modelInterfaces.addAll(buildInterfaces(namespace));
// }
// return modelInterfaces;
// }
// public List<ModelInterface> buildInterfaces(Namespace namespace) throws Exception {
// this.namespace = namespace;
// List<Element> elements = NamespaceUtil.getElements(namespace);
// List<ModelInterface> modelInterfaces = buildInterfaces(namespace, elements);
// return modelInterfaces;
// }
//
// public List<ModelInterface> buildInterfaces(Namespace namespace, Elements elements) throws Exception {
// List<Element> list = ElementUtil.getElements(elements);
// return buildInterfaces(namespace, list);
// }
//
// public List<ModelInterface> buildInterfaces(Namespace namespace, List<Element> elements) throws Exception {
// List<ModelInterface> modelInterfaces = new ArrayList<ModelInterface>();
// Iterator<Element> iterator = elements.iterator();
// while (iterator.hasNext()) {
// Element element = iterator.next();
// if (ElementUtil.isAbstract(element))
// continue;
// if (ElementUtil.isTransient(element))
// continue;
// ModelInterface modelInterface = buildInterface(namespace, element);
// modelInterfaces.add(modelInterface);
// }
// return modelInterfaces;
// }
// public ModelInterface buildInterface(Namespace namespace, Element element) throws Exception {
// ModelInterface modelInterface = new ModelInterface();
// String packageName = DataLayerHelper.getRepositoryPackageName(namespace);
// String interfaceName = DataLayerHelper.getRepositoryInterfaceName(element);
// String parentInterfaceName = DataLayerHelper.getRepositoryParentInterfaceName(element);
//
// modelInterface.setPackageName(packageName);
// modelInterface.setClassName(interfaceName);
// if (parentInterfaceName != null)
// modelInterface.addExtendedInterface(parentInterfaceName);
// modelInterface.setName(NameUtil.uncapName(interfaceName));
// initializeInterface(modelInterface, element);
// return modelInterface;
// }
protected void initializeInterface(ModelInterface modelInterface, Unit unit) throws Exception {
//initializeInterfaceAnnotations(modelInterface);
initializeImportedClasses(modelInterface, unit);
initializeInterfaceMethods(modelInterface, unit);
}
protected void initializeInterfaceMethods(ModelInterface modelInterface, Unit unit) throws Exception {
modelInterface.addInstanceOperation(createInstanceOperation_ClearContext(unit, true));
List<Element> elements = UnitUtil.getElements(unit);
Iterator<Element> iterator = elements.iterator();
while (iterator.hasNext()) {
Element element = iterator.next();
Field field = context.findFieldByName(element, "id");
if (field != null) {
initializeInterfaceMethods(modelInterface, element);
}
}
}
protected void initializeImportedClasses(ModelInterface modelInterface, Unit unit) throws Exception {
String parentClassName = DataLayerHelper.getRepositoryParentInterfaceName(unit);
if (parentClassName != null)
modelInterface.addImportedClass(parentClassName);
modelInterface.addImportedClass("java.util.List");
List<Element> elements = UnitUtil.getElements(unit);
addImportedClassesForElements(modelInterface, elements);
}
/*
* Repository class creation
* -------------------------
*/
public ModelClass buildClass(Unit unit) throws Exception {
String packageName = DataLayerHelper.getRepositoryPackageName(namespace);
String interfaceName = DataLayerHelper.getRepositoryInterfaceName(unit);
String className = DataLayerHelper.getRepositoryClassName(unit);
String parentClassName = DataLayerHelper.getRepositoryParentClassName(unit);
this.namespace = unit.getNamespace();
ModelClass modelClass = new ModelClass();
modelClass.setPackageName(packageName);
modelClass.setClassName(className);
if (parentClassName != null)
modelClass.setParentClassName(parentClassName);
modelClass.setName(NameUtil.uncapName(interfaceName));
modelClass.setNamespace(namespace.getUri());
modelClass.addImplementedInterface(interfaceName);
//modelClass.addImplementedInterface("Serializable");
//modelClass.addImportedClass("java.io.Serializable");
initializeClass(modelClass, unit);
return modelClass;
}
public void initializeClass(ModelClass modelClass, Unit unit) throws Exception {
initializeClassAnnotations(modelClass, unit);
initializeImportedClasses(modelClass, unit);
initializeInstanceFields(modelClass, unit);
initializeInstanceMethods(modelClass, unit);
}
protected void initializeClassAnnotations(ModelClass modelClass, Unit unit) throws Exception {
List<ModelAnnotation> classAnnotations = modelClass.getClassAnnotations();
switch (context.getDataLayerBeanType()) {
case EJB:
classAnnotations.add(AnnotationUtil.createStatefulAnnotation());
String interfaceName = DataLayerHelper.getRepositoryInterfaceName(unit);
classAnnotations.add(AnnotationUtil.createLocalAnnotation(interfaceName));
break;
case SEAM:
//classAnnotations.add(AnnotationUtil.createAnnotation("AutoCreate"));
//classAnnotations.add(AnnotationUtil.createScopeAnnotation(ScopeType.SESSION));
//String contextName = DataLayerHelper.getRepositoryContextName(unit);
//classAnnotations.add(AnnotationUtil.createNameAnnotation(contextName));
break;
}
}
protected void initializeImportedClasses(ModelClass modelClass, Unit unit) throws Exception {
// String parentClassName = DataLayerHelper.getRepositoryParentClassName(element);
// String elementPackageName = ModelLayerHelper.getElementPackageName(element);
// String elementClassName = ModelLayerHelper.getElementClassName(element);
// String entityPackageName = DataLayerHelper.getEntityPackageName(namespace);
// String entityClassName = DataLayerHelper.getEntityClassName(element);
// String daoPackageName = DataLayerHelper.getDAOPackageName(namespace);
// String daoInterfaceName = DataLayerHelper.getDAOInterfaceName(element);
// String mapperPackageName = DataLayerHelper.getMapperPackageName(namespace);
// String mapperInterfaceName = DataLayerHelper.getMapperInterfaceName(element);
// String importerPackageName = DataLayerHelper.getImporterPackageName(namespace);
// String importerInterfaceName = DataLayerHelper.getImporterInterfaceName(element);
String parentClassName = DataLayerHelper.getRepositoryParentClassName(unit);
if (parentClassName != null)
modelClass.addImportedClass(parentClassName);
modelClass.addImportedClass("java.util.List");
modelClass.addImportedClass("org.aries.Assert");
modelClass.addImportedClass("org.aries.util.ExceptionUtil");
switch (context.getDataLayerBeanType()) {
case EJB:
modelClass.addImportedClass("javax.ejb.Local");
modelClass.addImportedClass("javax.ejb.Stateful");
modelClass.addImportedClass("javax.inject.Inject");
break;
case SEAM:
modelClass.addImportedClass("org.jboss.seam.ScopeType");
modelClass.addImportedClass("org.jboss.seam.annotations.AutoCreate");
modelClass.addImportedClass("org.jboss.seam.annotations.In");
modelClass.addImportedClass("org.jboss.seam.annotations.Name");
modelClass.addImportedClass("org.jboss.seam.annotations.Scope");
//modelClass.addImportedClass("org.jboss.seam.annotations.Transactional");
break;
}
List<Element> elements = UnitUtil.getElements(unit);
addImportedClassesForElements(modelClass, elements);
}
protected void initializeInstanceFields(ModelClass modelClass, Unit unit) throws Exception {
List<Element> elements = UnitUtil.getElements(unit);
Iterator<Element> iterator = elements.iterator();
while (iterator.hasNext()) {
Element element = iterator.next();
if (ElementUtil.isTransient(element))
continue;
ModelReference modelReference = createInstanceField_ManagerBean(element);
modelClass.addInstanceReference(modelReference);
modelClass.addImportedClass(modelReference);
}
}
protected ModelReference createInstanceField_ManagerBean(Element element) {
String packageName = DataLayerHelper.getManagerPackageName(namespace);
String interfaceName = DataLayerHelper.getManagerInterfaceName(element);
String beanName = DataLayerHelper.getManagerNameUncapped(element);
String beanType = org.aries.util.TypeUtil.getDerivedType(element.getType(), beanName);
String contextName = ModelLayerHelper.getElementTypeLocalPartUncapped(element);
ModelReference modelReference = new ModelReference();
modelReference.getAnnotations().add(createInstanceField_InjectionAnnotation(packageName, interfaceName, contextName));
modelReference.setModifiers(Modifier.PROTECTED);
modelReference.setPackageName(packageName);
modelReference.setClassName(interfaceName);
modelReference.setName(beanName);
modelReference.setType(beanType);
modelReference.setStructure("item");
modelReference.setGenerateGetter(true);
modelReference.setGenerateSetter(true);
return modelReference;
}
// protected ModelReference createInstanceField_ImporterBean(Element element) {
// String importerNameUncapped = DataLayerHelper.getImporterNameUncapped(element);
// String importerPackageName = DataLayerHelper.getImporterPackageName(namespace);
// String importerInterfaceName = DataLayerHelper.getImporterInterfaceName(element);
// String importerType = TypeUtil.getDerivedType(element.getType(), importerNameUncapped);
//
// ModelReference modelReference = new ModelReference();
// switch (context.getDataLayerBeanType()) {
// case EJB:
// modelReference.addAnnotation(AnnotationUtil.createInjectAnnotation());
// break;
// case SEAM:
// String importerContextName = importerNameUncapped;
// if (context.isEnabled("useQualifiedContextNames")) {
// String qualifiedName = importerPackageName + "." + importerInterfaceName;
// int segmentCount = NameUtil.getSegmentCount(qualifiedName);
// String contextPrefix = NameUtil.getQualifiedContextNamePrefix(qualifiedName, segmentCount-1);
// importerContextName = contextPrefix + "." + importerNameUncapped;
// }
// modelReference.addAnnotation(AnnotationUtil.createInAnnotation(true, importerContextName));
// break;
// }
// modelReference.setModifiers(Modifier.PROTECTED);
// modelReference.setPackageName(importerPackageName);
// modelReference.setClassName(importerInterfaceName);
// modelReference.setName(importerNameUncapped);
// modelReference.setType(importerType);
// modelReference.setStructure("item");
// modelReference.setGenerateGetter(false);
// modelReference.setGenerateSetter(false);
// return modelReference;
// }
// protected ModelReference createInstanceField_MapperBean(Element element) {
// String mapperNameUncapped = DataLayerHelper.getMapperNameUncapped(element);
// String mapperPackageName = DataLayerHelper.getMapperPackageName(namespace);
// String mapperInterfaceName = DataLayerHelper.getMapperInterfaceName(element);
// String mapperType = TypeUtil.getDerivedType(element.getType(), mapperNameUncapped);
//
// ModelReference modelReference = new ModelReference();
// switch (context.getDataLayerBeanType()) {
// case EJB:
// modelReference.addAnnotation(AnnotationUtil.createInjectAnnotation());
// break;
// case SEAM:
// String mapperContextName = mapperNameUncapped;
// if (context.isEnabled("useQualifiedContextNames")) {
// String qualifiedName = mapperPackageName + "." + mapperInterfaceName;
// int segmentCount = NameUtil.getSegmentCount(qualifiedName);
// String contextPrefix = NameUtil.getQualifiedContextNamePrefix(qualifiedName, segmentCount-1);
// mapperContextName = contextPrefix + "." + mapperNameUncapped;
// }
// modelReference.addAnnotation(AnnotationUtil.createInAnnotation(true, mapperContextName));
// break;
// }
// modelReference.setModifiers(Modifier.PROTECTED);
// modelReference.setPackageName(mapperPackageName);
// modelReference.setClassName(mapperInterfaceName);
// modelReference.setName(mapperNameUncapped);
// modelReference.setType(mapperType);
// modelReference.setStructure("item");
// modelReference.setGenerateGetter(false);
// modelReference.setGenerateSetter(false);
// return modelReference;
// }
// protected ModelReference createInstanceField_DAOBean(Element element) {
// String daoNameUncapped = DataLayerHelper.getDAONameUncapped(element);
// String daoPackageName = DataLayerHelper.getDAOPackageName(namespace);
// String daoInterfaceName = DataLayerHelper.getDAOInterfaceName(element);
// String daoType = TypeUtil.getDerivedType(element.getType(), daoNameUncapped);
//
// ModelReference modelReference = new ModelReference();
// switch (context.getDataLayerBeanType()) {
// case EJB:
// modelReference.addAnnotation(AnnotationUtil.createInjectAnnotation());
// break;
// case SEAM:
// String daoContextName = daoNameUncapped;
// if (context.isEnabled("useQualifiedContextNames")) {
// String qualifiedName = daoPackageName + "." + daoInterfaceName;
// int segmentCount = NameUtil.getSegmentCount(qualifiedName);
// String contextPrefix = NameUtil.getQualifiedContextNamePrefix(qualifiedName, segmentCount-1);
// daoContextName = contextPrefix + "." + daoNameUncapped;
// }
// modelReference.addAnnotation(AnnotationUtil.createInAnnotation(true, daoContextName));
// break;
// }
// modelReference.setModifiers(Modifier.PROTECTED);
// modelReference.setPackageName(daoPackageName);
// modelReference.setClassName(daoInterfaceName);
// modelReference.setName(daoNameUncapped);
// modelReference.setType(daoType);
// modelReference.setStructure("item");
// modelReference.setGenerateGetter(false);
// modelReference.setGenerateSetter(false);
// return modelReference;
// }
protected void initializeInstanceMethods(ModelClass modelClass, Unit unit) throws Exception {
modelClass.addInstanceOperation(createInstanceOperation_ClearContext(unit, false));
List<Element> elements = UnitUtil.getElements(unit);
Iterator<Element> iterator = elements.iterator();
while (iterator.hasNext()) {
Element element = iterator.next();
Field field = context.findFieldByName(element, "id");
if (field != null) {
provider.setElement(element);
initializeInstanceMethods(modelClass, element, false);
}
}
}
/**
* ClearContext operation
* ----------------------
*/
protected ModelOperation createInstanceOperation_ClearContext(Unit unit, boolean interfaceOnly) {
ModelOperation modelOperation = new ModelOperation();
modelOperation.setName("clearContext");
modelOperation.setModifiers(Modifier.PUBLIC);
if (interfaceOnly == false) {
modelOperation.addAnnotation(AnnotationUtil.createOverrideAnnotation());
provider.generateSourceCode_ClearContext(modelOperation, unit);
}
return modelOperation;
}
/**
* AddElement operation
* --------------------
*/
protected ModelOperation createInstanceOperation_AddElement(Element element, boolean interfaceOnly) {
ModelOperation modelOperation = super.createInstanceOperation_AddElement(element, interfaceOnly);
if (interfaceOnly == false) {
//modelOperation.addAnnotation(AnnotationUtil.createTransactionalAnnotation());
}
return modelOperation;
}
/**
* AddElementList operation
* ------------------------
*/
protected ModelOperation createInstanceOperation_AddElementList(Element element, boolean interfaceOnly) {
ModelOperation modelOperation = super.createInstanceOperation_AddElementList(element, interfaceOnly);
if (interfaceOnly == false) {
//modelOperation.addAnnotation(AnnotationUtil.createTransactionalAnnotation());
}
return modelOperation;
}
/**
* MoveElement operation
* ---------------------
*/
protected ModelOperation createInstanceOperation_MoveElement(Element element, boolean interfaceOnly) {
ModelOperation modelOperation = super.createInstanceOperation_MoveElement(element, interfaceOnly);
if (interfaceOnly == false) {
//modelOperation.addAnnotation(AnnotationUtil.createTransactionalAnnotation());
}
return modelOperation;
}
/**
* SaveElement operation
* ---------------------
*/
protected ModelOperation createInstanceOperation_SaveElement(Element element, boolean interfaceOnly) {
ModelOperation modelOperation = super.createInstanceOperation_SaveElement(element, interfaceOnly);
if (interfaceOnly == false) {
//modelOperation.addAnnotation(AnnotationUtil.createTransactionalAnnotation());
}
return modelOperation;
}
/**
* RemoveElement operation
* -----------------------
*/
protected ModelOperation createInstanceOperation_RemoveElement(Element element, boolean interfaceOnly) {
ModelOperation modelOperation = super.createInstanceOperation_RemoveElement(element, interfaceOnly);
if (interfaceOnly == false) {
//modelOperation.addAnnotation(AnnotationUtil.createTransactionalAnnotation());
}
return modelOperation;
}
/**
* RemoveElementById operation
* ---------------------------
*/
protected ModelOperation createInstanceOperation_RemoveElementById(Element element, boolean interfaceOnly) {
ModelOperation modelOperation = super.createInstanceOperation_RemoveElementById(element, interfaceOnly);
if (interfaceOnly == false) {
//modelOperation.addAnnotation(AnnotationUtil.createTransactionalAnnotation());
}
return modelOperation;
}
/**
* RemoveElementList operation
* ---------------------------
*/
protected ModelOperation createInstanceOperation_RemoveElementList(Element element, boolean interfaceOnly) {
ModelOperation modelOperation = super.createInstanceOperation_RemoveElementList(element, interfaceOnly);
if (interfaceOnly == false) {
//modelOperation.addAnnotation(AnnotationUtil.createTransactionalAnnotation());
}
return modelOperation;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.fit.console;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import org.apache.commons.lang3.StringUtils;
import org.apache.wicket.Component;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.util.tester.FormTester;
import org.junit.jupiter.api.Test;
import org.apache.syncope.client.ui.commons.Constants;
import org.apache.syncope.client.console.wicket.markup.html.form.IndicatingOnConfirmAjaxLink;
import org.junit.jupiter.api.BeforeEach;
public class GroupsITCase extends AbstractConsoleITCase {
private final static String TAB_PANEL = "body:content:body:container:content:tabbedPanel:panel:searchResult:";
private final static String CONTAINER = TAB_PANEL + "container:content:";
@BeforeEach
public void login() {
doLogin(ADMIN_UNAME, ADMIN_PWD);
}
@Test
public void read() {
TESTER.clickLink("body:realmsLI:realms", false);
TESTER.clickLink("body:content:body:container:content:tabbedPanel:tabs-container:tabs:2:link");
Component component = findComponentByProp("name", CONTAINER
+ ":searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", "artDirector");
assertNotNull(component);
TESTER.executeAjaxEvent(component.getPageRelativePath(), Constants.ON_CLICK);
TESTER.clickLink(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:2:action:action");
FormTester formTester = TESTER.newFormTester(TAB_PANEL
+ "outerObjectsRepeater:8:outer:container:content:togglePanelContainer:membersForm");
formTester.select("type:dropDownChoiceField", 0);
formTester.submit("changeit");
TESTER.assertModelValue(TAB_PANEL
+ "outerObjectsRepeater:7:outer:dialog:header:header-label", "USER members of artDirector");
assertNotNull(findComponentByProp("username", TAB_PANEL
+ "outerObjectsRepeater:7:outer:form:content:searchResult:container:content:"
+ "searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", "puccini"));
TESTER.executeAjaxEvent(TAB_PANEL
+ "outerObjectsRepeater:7:outer:dialog:footer:buttons:0:button", Constants.ON_CLICK);
}
@Test
public void filteredSearch() {
TESTER.clickLink("body:realmsLI:realms", false);
TESTER.clickLink("body:content:body:container:content:tabbedPanel:tabs-container:tabs:2:link");
TESTER.clickLink(
"body:content:body:container:content:tabbedPanel:panel:accordionPanel:tabs:0:title");
TESTER.executeAjaxEvent(
"body:content:body:container:content:tabbedPanel:panel:accordionPanel:tabs:0:body:"
+ "content:searchFormContainer:search:multiValueContainer:innerForm:content:view:0:panelPlus:add",
Constants.ON_CLICK);
TESTER.assertComponent(
"body:content:body:container:content:tabbedPanel:panel:accordionPanel:tabs:0:body:"
+ "content:searchFormContainer:search:multiValueContainer:innerForm:content:view:0:panel:container:"
+ "value:textField", TextField.class);
}
private static void cloneGroup(final String group) {
TESTER.clickLink("body:realmsLI:realms", false);
TESTER.clickLink("body:content:body:container:content:tabbedPanel:tabs-container:tabs:2:link");
Component component = findComponentByProp("name", CONTAINER
+ ":searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", group);
assertNotNull(component);
TESTER.executeAjaxEvent(component.getPageRelativePath(), Constants.ON_CLICK);
TESTER.clickLink(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:9:action:action");
TESTER.assertComponent(TAB_PANEL
+ "outerObjectsRepeater:0:outer:form:content:form:view:name:textField",
TextField.class);
FormTester formTester = TESTER.newFormTester(TAB_PANEL
+ "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.setValue("view:name:textField", group + "_clone");
formTester.submit("buttons:finish");
assertSuccessMessage();
TESTER.cleanupFeedbackMessages();
TESTER.clickLink(TAB_PANEL
+ "outerObjectsRepeater:0:outer:form:content:action:actionRepeater:0:action:action");
component = findComponentByProp("name", CONTAINER
+ ":searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", group + "_clone");
assertNotNull(component);
}
@Test
public void clickToCloneGroup() {
cloneGroup("director");
Component component = findComponentByProp("name", CONTAINER
+ ":searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", "director_clone");
assertNotNull(component);
TESTER.executeAjaxEvent(component.getPageRelativePath(), Constants.ON_CLICK);
TESTER.getRequest().addParameter("confirm", "true");
TESTER.clickLink(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:9:action:action");
TESTER.executeAjaxEvent(TESTER.getComponentFromLastRenderedPage(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:10:action:action"), Constants.ON_CLICK);
assertSuccessMessage();
TESTER.cleanupFeedbackMessages();
}
@Test
public void editGroup() {
cloneGroup("director");
Component component = findComponentByProp("name", CONTAINER
+ ":searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", "director_clone");
assertNotNull(component);
TESTER.executeAjaxEvent(component.getPageRelativePath(), Constants.ON_CLICK);
TESTER.clickLink(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:0:action:action");
TESTER.assertComponent(TAB_PANEL
+ "outerObjectsRepeater:0:outer:form:content:form:view:name:textField",
TextField.class);
FormTester formTester = TESTER.newFormTester(TAB_PANEL
+ "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:next");
// -------------------------
// SYNCOPE-1026
// -------------------------
assertEquals(TESTER.getComponentFromLastRenderedPage(
"body:content:body:container:content:tabbedPanel:panel:"
+ "searchResult:outerObjectsRepeater:0:outer:form:content:form:view:ownerContainer:search:userOwner:"
+ "textField").getDefaultModelObjectAsString(), "[823074dc-d280-436d-a7dd-07399fae48ec] puccini");
TESTER.clickLink(
"body:content:body:container:content:tabbedPanel:panel:searchResult:outerObjectsRepeater:0:"
+ "outer:form:content:form:view:ownerContainer:search:userOwnerReset");
assertEquals(TESTER.getComponentFromLastRenderedPage(
"body:content:body:container:content:tabbedPanel:panel:"
+ "searchResult:outerObjectsRepeater:0:outer:form:content:form:view:ownerContainer:search:userOwner:"
+ "textField").getDefaultModelObjectAsString(), StringUtils.EMPTY);
// -------------------------
formTester = TESTER.newFormTester(TAB_PANEL + "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:next");
formTester = TESTER.newFormTester(TAB_PANEL + "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:next");
formTester = TESTER.newFormTester(TAB_PANEL + "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:next");
formTester = TESTER.newFormTester(TAB_PANEL + "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:next");
formTester = TESTER.newFormTester(TAB_PANEL + "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:next");
formTester = TESTER.newFormTester(TAB_PANEL + "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:next");
formTester = TESTER.newFormTester(TAB_PANEL + "outerObjectsRepeater:0:outer:form:content:form");
assertNotNull(formTester);
formTester.submit("buttons:finish");
assertSuccessMessage();
TESTER.cleanupFeedbackMessages();
TESTER.assertComponent(TAB_PANEL
+ "outerObjectsRepeater:0:outer:form:content:customResultBody:resources:firstLevelContainer:first:"
+ "container:content:group:beans:0:fields:0:field", Label.class);
TESTER.clickLink(TAB_PANEL
+ "outerObjectsRepeater:0:outer:form:content:action:actionRepeater:0:action:action");
component = findComponentByProp("name", CONTAINER
+ ":searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", "director_clone");
assertNotNull(component);
TESTER.executeAjaxEvent(component.getPageRelativePath(), Constants.ON_CLICK);
TESTER.getRequest().addParameter("confirm", "true");
TESTER.clickLink(TESTER.getComponentFromLastRenderedPage(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:10:action:action"));
TESTER.executeAjaxEvent(TESTER.getComponentFromLastRenderedPage(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:10:action:action"), Constants.ON_CLICK);
assertSuccessMessage();
TESTER.cleanupFeedbackMessages();
}
@Test
public void checkDeleteGroupLink() {
TESTER.clickLink("body:realmsLI:realms", false);
TESTER.clickLink("body:content:body:container:content:tabbedPanel:tabs-container:tabs:2:link");
Component component = findComponentByProp("name", CONTAINER
+ ":searchContainer:resultTable:tablePanel:groupForm:checkgroup:dataTable", "director");
assertNotNull(component);
TESTER.executeAjaxEvent(component.getPageRelativePath(), Constants.ON_CLICK);
TESTER.assertComponent(TAB_PANEL
+ "outerObjectsRepeater:1:outer:container:content:togglePanelContainer:container:"
+ "actions:actions:actionRepeater:10:action:action", IndicatingOnConfirmAjaxLink.class);
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2012 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.core;
import static org.jboss.web.CatalinaMessages.MESSAGES;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.management.ObjectName;
import javax.naming.NamingException;
import javax.servlet.DispatcherType;
import javax.servlet.Filter;
import javax.servlet.FilterConfig;
import javax.servlet.FilterRegistration;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import org.apache.catalina.Context;
import org.apache.catalina.Globals;
import org.apache.catalina.deploy.FilterDef;
import org.apache.catalina.deploy.FilterMap;
import org.apache.catalina.security.SecurityUtil;
import org.apache.catalina.util.Enumerator;
import org.apache.tomcat.util.modeler.Registry;
import org.jboss.web.CatalinaLogger;
/**
* Implementation of a <code>javax.servlet.FilterConfig</code> useful in
* managing the filter instances instantiated when a web application
* is first started.
*
* @author Craig R. McClanahan
* @author Remy Maucherat
* @version $Revision: 1673 $ $Date: 2011-03-12 18:58:07 +0100 (Sat, 12 Mar 2011) $
*/
public final class ApplicationFilterConfig implements FilterConfig, Serializable {
// ----------------------------------------------------------- Constructors
/**
* Construct a new ApplicationFilterConfig for the specified filter
* definition.
*
* @param context The context with which we are associated
* @param filterDef Filter definition for which a FilterConfig is to be
* constructed
*/
public ApplicationFilterConfig(Context context, FilterDef filterDef) {
this.context = context;
this.filterDef = filterDef;
}
// ----------------------------------------------------- Instance Variables
/**
* The facade associated with this wrapper.
*/
protected ApplicationFilterConfigFacade facade =
new ApplicationFilterConfigFacade(this);
/**
* The Context with which we are associated.
*/
private transient Context context = null;
/**
* Dynamic flag.
*/
protected boolean dynamic = false;
/**
* The application Filter we are configured for.
*/
private transient Filter filter = null;
/**
* The application Filter we are configured for.
*/
private transient Filter filterInstance = null;
/**
* The <code>FilterDef</code> that defines our associated Filter.
*/
private FilterDef filterDef = null;
/**
* JMX registration name
*/
private ObjectName oname;
// --------------------------------------------------- FilterConfig Methods
/**
* Return the name of the filter we are configuring.
*/
public String getFilterName() {
return (filterDef.getFilterName());
}
/**
* Return the class of the filter we are configuring.
*/
public String getFilterClass() {
return filterDef.getFilterClass();
}
/**
* Return a <code>String</code> containing the value of the named
* initialization parameter, or <code>null</code> if the parameter
* does not exist.
*
* @param name Name of the requested initialization parameter
*/
public String getInitParameter(String name) {
Map<String, String> map = filterDef.getParameterMap();
if (map == null) {
return (null);
} else {
return map.get(name);
}
}
/**
* Return an <code>Enumeration</code> of the names of the initialization
* parameters for this Filter.
*/
public Enumeration<String> getInitParameterNames() {
Map<String, String> map = filterDef.getParameterMap();
if (map == null) {
return (new Enumerator(new ArrayList<String>()));
} else {
return (new Enumerator(map.keySet()));
}
}
/**
* Return the ServletContext of our associated web application.
*/
public ServletContext getServletContext() {
return (this.context.getServletContext());
}
/**
* Get the facade FilterRegistration.
*/
public FilterRegistration getFacade() {
return facade;
}
public boolean isDynamic() {
return dynamic;
}
public void setDynamic(boolean dynamic) {
this.dynamic = dynamic;
if (dynamic) {
// Change the facade (normally, this happens when the Wrapper is created)
facade = new ApplicationFilterConfigFacade.Dynamic(this);
}
}
/**
* Return a String representation of this object.
*/
public String toString() {
StringBuilder sb = new StringBuilder("ApplicationFilterConfig[");
sb.append("name=");
sb.append(filterDef.getFilterName());
sb.append(", filterClass=");
sb.append(filterDef.getFilterClass());
sb.append("]");
return (sb.toString());
}
public boolean addMappingForServletNames(EnumSet<DispatcherType> dispatcherTypes,
boolean isMatchAfter, String... servletNames) {
if (!context.isStarting()) {
throw MESSAGES.cannotAddFilterRegistrationAfterInit(context.getPath());
}
if (servletNames == null || servletNames.length == 0) {
throw MESSAGES.invalidFilterRegistrationArguments();
}
FilterMap filterMap = new FilterMap();
for (String servletName : servletNames) {
filterMap.addServletName(servletName);
}
filterMap.setFilterName(filterDef.getFilterName());
if (dispatcherTypes != null) {
for (DispatcherType dispatcherType: dispatcherTypes) {
filterMap.setDispatcher(dispatcherType.name());
}
}
if (isMatchAfter) {
context.addFilterMap(filterMap);
} else {
context.addFilterMapBefore(filterMap);
}
return true;
}
public boolean addMappingForUrlPatterns(
EnumSet<DispatcherType> dispatcherTypes, boolean isMatchAfter,
String... urlPatterns) {
if (!context.isStarting()) {
throw MESSAGES.cannotAddFilterRegistrationAfterInit(context.getPath());
}
if (urlPatterns == null || urlPatterns.length == 0) {
throw MESSAGES.invalidFilterRegistrationArguments();
}
FilterMap filterMap = new FilterMap();
for (String urlPattern : urlPatterns) {
filterMap.addURLPattern(urlPattern);
}
filterMap.setFilterName(filterDef.getFilterName());
if (dispatcherTypes != null) {
for (DispatcherType dispatcherType: dispatcherTypes) {
filterMap.setDispatcher(dispatcherType.name());
}
}
if (isMatchAfter) {
context.addFilterMap(filterMap);
} else {
context.addFilterMapBefore(filterMap);
}
return true;
}
public Collection<String> getServletNameMappings() {
HashSet<String> result = new HashSet<String>();
FilterMap[] filterMaps = context.findFilterMaps();
for (int i = 0; i < filterMaps.length; i++) {
if (filterDef.getFilterName().equals(filterMaps[i].getFilterName())) {
FilterMap filterMap = filterMaps[i];
String[] maps = filterMap.getServletNames();
for (int j = 0; j < maps.length; j++) {
result.add(maps[j]);
}
if (filterMap.getMatchAllServletNames()) {
result.add("*");
}
}
}
return Collections.unmodifiableSet(result);
}
public Collection<String> getUrlPatternMappings() {
HashSet<String> result = new HashSet<String>();
FilterMap[] filterMaps = context.findFilterMaps();
for (int i = 0; i < filterMaps.length; i++) {
if (filterDef.getFilterName().equals(filterMaps[i].getFilterName())) {
FilterMap filterMap = filterMaps[i];
String[] maps = filterMap.getURLPatterns();
for (int j = 0; j < maps.length; j++) {
result.add(maps[j]);
}
if (filterMap.getMatchAllUrlPatterns()) {
result.add("*");
}
}
}
return Collections.unmodifiableSet(result);
}
public void setAsyncSupported(boolean asyncSupported) {
if (!context.isStarting()) {
throw MESSAGES.cannotAddFilterRegistrationAfterInit(context.getPath());
}
filterDef.setAsyncSupported(asyncSupported);
context.addFilterDef(filterDef);
}
public void setDescription(String description) {
filterDef.setDescription(description);
context.addFilterDef(filterDef);
}
public boolean setInitParameter(String name, String value) {
if (!context.isStarting()) {
throw MESSAGES.cannotAddFilterRegistrationAfterInit(context.getPath());
}
if (name == null || value == null) {
throw MESSAGES.invalidFilterRegistrationArguments();
}
if (filterDef.getInitParameter(name) != null) {
return false;
}
filterDef.addInitParameter(name, value);
context.addFilterDef(filterDef);
return true;
}
public Set<String> setInitParameters(Map<String, String> initParameters) {
if (!context.isStarting()) {
throw MESSAGES.cannotAddFilterRegistrationAfterInit(context.getPath());
}
if (initParameters == null) {
throw MESSAGES.invalidFilterRegistrationArguments();
}
Set<String> conflicts = new HashSet<String>();
Iterator<String> parameterNames = initParameters.keySet().iterator();
while (parameterNames.hasNext()) {
String parameterName = parameterNames.next();
if (filterDef.getInitParameter(parameterName) != null) {
conflicts.add(parameterName);
} else {
String value = initParameters.get(parameterName);
if (value == null) {
throw MESSAGES.invalidFilterRegistrationArguments();
}
filterDef.addInitParameter(parameterName, value);
}
}
context.addFilterDef(filterDef);
return conflicts;
}
public Map<String, String> getInitParameters() {
return Collections.unmodifiableMap(filterDef.getParameterMap());
}
// -------------------------------------------------------- Package Methods
/**
* Return the application Filter we are configured for.
*
* @exception ClassCastException if the specified class does not implement
* the <code>javax.servlet.Filter</code> interface
* @exception ClassNotFoundException if the filter class cannot be found
* @exception IllegalAccessException if the filter class cannot be
* publicly instantiated
* @exception InstantiationException if an exception occurs while
* instantiating the filter object
* @exception ServletException if thrown by the filter's init() method
* @throws NamingException
* @throws InvocationTargetException
*/
Filter getFilter() throws ClassCastException, ClassNotFoundException,
IllegalAccessException, InstantiationException, ServletException,
InvocationTargetException, NamingException {
// Return the existing filter instance, if any
if (this.filter != null)
return (this.filter);
// Identify the class loader we will be using
if (filterInstance == null) {
String filterClass = filterDef.getFilterClass();
this.filter = (Filter) context.getInstanceManager().newInstance(filterClass);
} else {
this.filter = filterInstance;
filterInstance = null;
}
filter.init(this);
// Expose filter via JMX
if (org.apache.tomcat.util.Constants.ENABLE_MODELER) {
registerJMX();
}
return (this.filter);
}
/**
* Set the filter instance programmatically.
*/
public void setFilter(Filter filter) {
filterInstance = filter;
}
/**
* Return the filter instance.
*/
public Filter getFilterInstance() {
return (filterInstance != null) ? filterInstance : filter;
}
/**
* Return the filter definition we are configured for.
*/
public FilterDef getFilterDef() {
return (this.filterDef);
}
/**
* Release the Filter instance associated with this FilterConfig,
* if there is one.
*/
void release() {
if (org.apache.tomcat.util.Constants.ENABLE_MODELER) {
unregsiterJMX();
}
if (this.filter != null)
{
try {
if (Globals.IS_SECURITY_ENABLED) {
SecurityUtil.doAsPrivilege("destroy", filter);
SecurityUtil.remove(filter);
} else {
filter.destroy();
}
} catch(java.lang.Exception ex){
context.getLogger().error(MESSAGES.errorDestroyingFilter(getFilterName()), ex);
}
try {
((StandardContext) context).getInstanceManager().destroyInstance(this.filter);
} catch (Exception e) {
context.getLogger().error(MESSAGES.preDestroyException(), e);
}
}
this.filter = null;
}
// -------------------------------------------------------- Private Methods
private void registerJMX() {
String parentName = context.getName();
parentName = ("".equals(parentName)) ? "/" : parentName;
String hostName = context.getParent().getName();
hostName = (hostName == null) ? "DEFAULT" : hostName;
// domain == engine name
String domain = context.getParent().getParent().getName();
String webMod = "//" + hostName + parentName;
String onameStr = null;
if (context instanceof StandardContext) {
StandardContext standardContext = (StandardContext) context;
onameStr = domain + ":j2eeType=Filter,name=" +
filterDef.getFilterName() + ",WebModule=" + webMod +
",J2EEApplication=" +
standardContext.getJ2EEApplication() + ",J2EEServer=" +
standardContext.getJ2EEServer();
} else {
onameStr = domain + ":j2eeType=Filter,name=" +
filterDef.getFilterName() + ",WebModule=" + webMod;
}
try {
oname = new ObjectName(onameStr);
Registry.getRegistry(null, null).registerComponent(this, oname,
null);
} catch (Exception ex) {
CatalinaLogger.CORE_LOGGER.filterJmxRegistrationFailed(getFilterClass(), getFilterName(), ex);
}
}
private void unregsiterJMX() {
// unregister this component
if (oname != null) {
try {
Registry.getRegistry(null, null).unregisterComponent(oname);
} catch(Exception ex) {
CatalinaLogger.CORE_LOGGER.filterJmxUnregistrationFailed(getFilterClass(), getFilterName(), ex);
}
}
}
}
| |
package org.apache.commons.ssl.asn1;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.SimpleTimeZone;
import java.util.TimeZone;
/** Generalized time object. */
public class DERGeneralizedTime
extends ASN1Object {
String time;
/**
* return a generalized time from the passed in object
*
* @throws IllegalArgumentException if the object cannot be converted.
*/
public static DERGeneralizedTime getInstance(
Object obj) {
if (obj == null || obj instanceof DERGeneralizedTime) {
return (DERGeneralizedTime) obj;
}
if (obj instanceof ASN1OctetString) {
return new DERGeneralizedTime(((ASN1OctetString) obj).getOctets());
}
throw new IllegalArgumentException("illegal object in getInstance: " + obj.getClass().getName());
}
/**
* return a Generalized Time object from a tagged object.
*
* @param obj the tagged object holding the object we want
* @param explicit true if the object is meant to be explicitly
* tagged false otherwise.
* @throws IllegalArgumentException if the tagged object cannot
* be converted.
*/
public static DERGeneralizedTime getInstance(
ASN1TaggedObject obj,
boolean explicit) {
return getInstance(obj.getObject());
}
/**
* The correct format for this is YYYYMMDDHHMMSS[.f]Z, or without the Z
* for local time, or Z+-HHMM on the end, for difference between local
* time and UTC time. The fractional second amount f must consist of at
* least one number with trailing zeroes removed.
*
* @param time the time string.
* @throws IllegalArgumentException if String is an illegal format.
*/
public DERGeneralizedTime(
String time) {
this.time = time;
try {
this.getDate();
}
catch (ParseException e) {
throw new IllegalArgumentException("invalid date string: " + e.getMessage());
}
}
/** base constructer from a java.util.date object */
public DERGeneralizedTime(
Date time) {
SimpleDateFormat dateF = new SimpleDateFormat("yyyyMMddHHmmss'Z'");
dateF.setTimeZone(new SimpleTimeZone(0, "Z"));
this.time = dateF.format(time);
}
DERGeneralizedTime(
byte[] bytes) {
//
// explicitly convert to characters
//
char[] dateC = new char[bytes.length];
for (int i = 0; i != dateC.length; i++) {
dateC[i] = (char) (bytes[i] & 0xff);
}
this.time = new String(dateC);
}
/**
* Return the time.
*
* @return The time string as it appeared in the encoded object.
*/
public String getTimeString() {
return time;
}
/**
* return the time - always in the form of
* YYYYMMDDhhmmssGMT(+hh:mm|-hh:mm).
* <p/>
* Normally in a certificate we would expect "Z" rather than "GMT",
* however adding the "GMT" means we can just use:
* <pre>
* dateF = new SimpleDateFormat("yyyyMMddHHmmssz");
* </pre>
* To read in the time and get a date which is compatible with our local
* time zone.
*/
public String getTime() {
//
// standardise the format.
//
if (time.charAt(time.length() - 1) == 'Z') {
return time.substring(0, time.length() - 1) + "GMT+00:00";
} else {
int signPos = time.length() - 5;
char sign = time.charAt(signPos);
if (sign == '-' || sign == '+') {
return time.substring(0, signPos)
+ "GMT"
+ time.substring(signPos, signPos + 3)
+ ":"
+ time.substring(signPos + 3);
} else {
signPos = time.length() - 3;
sign = time.charAt(signPos);
if (sign == '-' || sign == '+') {
return time.substring(0, signPos)
+ "GMT"
+ time.substring(signPos)
+ ":00";
}
}
}
return time + calculateGMTOffset();
}
private String calculateGMTOffset() {
String sign = "+";
TimeZone timeZone = TimeZone.getDefault();
int offset = timeZone.getRawOffset();
if (offset < 0) {
sign = "-";
offset = -offset;
}
int hours = offset / (60 * 60 * 1000);
int minutes = (offset - (hours * 60 * 60 * 1000)) / (60 * 1000);
try {
if (timeZone.useDaylightTime() && timeZone.inDaylightTime(this.getDate())) {
hours += sign.equals("+") ? 1 : -1;
}
}
catch (ParseException e) {
// we'll do our best and ignore daylight savings
}
return "GMT" + sign + convert(hours) + ":" + convert(minutes);
}
private String convert(int time) {
if (time < 10) {
return "0" + time;
}
return Integer.toString(time);
}
public Date getDate()
throws ParseException {
SimpleDateFormat dateF;
String d = time;
if (time.endsWith("Z")) {
if (hasFractionalSeconds()) {
dateF = new SimpleDateFormat("yyyyMMddHHmmss.SSSS'Z'");
} else {
dateF = new SimpleDateFormat("yyyyMMddHHmmss'Z'");
}
dateF.setTimeZone(new SimpleTimeZone(0, "Z"));
} else if (time.indexOf('-') > 0 || time.indexOf('+') > 0) {
d = this.getTime();
if (hasFractionalSeconds()) {
dateF = new SimpleDateFormat("yyyyMMddHHmmss.SSSSz");
} else {
dateF = new SimpleDateFormat("yyyyMMddHHmmssz");
}
dateF.setTimeZone(new SimpleTimeZone(0, "Z"));
} else {
if (hasFractionalSeconds()) {
dateF = new SimpleDateFormat("yyyyMMddHHmmss.SSSS");
} else {
dateF = new SimpleDateFormat("yyyyMMddHHmmss");
}
dateF.setTimeZone(new SimpleTimeZone(0, TimeZone.getDefault().getID()));
}
return dateF.parse(d);
}
private boolean hasFractionalSeconds() {
return time.indexOf('.') == 14;
}
private byte[] getOctets() {
char[] cs = time.toCharArray();
byte[] bs = new byte[cs.length];
for (int i = 0; i != cs.length; i++) {
bs[i] = (byte) cs[i];
}
return bs;
}
void encode(
DEROutputStream out)
throws IOException {
out.writeEncoded(GENERALIZED_TIME, this.getOctets());
}
boolean asn1Equals(
DERObject o) {
if (!(o instanceof DERGeneralizedTime)) {
return false;
}
return time.equals(((DERGeneralizedTime) o).time);
}
public int hashCode() {
return time.hashCode();
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.javaFX.packaging;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.compiler.CompileContext;
import com.intellij.openapi.compiler.CompilerMessageCategory;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JavaSdk;
import com.intellij.openapi.projectRoots.JavaSdkVersion;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.util.Computable;
import com.intellij.packaging.artifacts.Artifact;
import com.intellij.packaging.artifacts.ArtifactManager;
import com.intellij.packaging.artifacts.ArtifactProperties;
import com.intellij.packaging.elements.PackagingElement;
import com.intellij.packaging.impl.artifacts.ArtifactUtil;
import com.intellij.packaging.impl.elements.ArchivePackagingElement;
import com.intellij.packaging.impl.elements.ArtifactPackagingElement;
import com.intellij.packaging.ui.ArtifactEditorContext;
import com.intellij.packaging.ui.ArtifactPropertiesEditor;
import com.intellij.util.xmlb.XmlSerializerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.javaFX.packaging.preloader.JavaFxPreloaderArtifactProperties;
import org.jetbrains.plugins.javaFX.packaging.preloader.JavaFxPreloaderArtifactPropertiesProvider;
import org.jetbrains.plugins.javaFX.packaging.preloader.JavaFxPreloaderArtifactType;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* User: anna
* Date: 3/12/13
*/
public class JavaFxArtifactProperties extends ArtifactProperties<JavaFxArtifactProperties> {
private String myTitle;
private String myVendor;
private String myDescription;
private String myAppClass;
private String myVersion;
private String myWidth = JavaFxPackagerConstants.DEFAULT_WEIGHT;
private String myHeight = JavaFxPackagerConstants.DEFAULT_HEIGHT;
private String myHtmlTemplateFile;
private String myHtmlPlaceholderId;
private String myHtmlParamFile;
private String myParamFile;
private String myUpdateMode = JavaFxPackagerConstants.UPDATE_MODE_BACKGROUND;
private boolean myEnabledSigning = false;
private boolean mySelfSigning = true;
private String myAlias;
private String myKeystore;
private String myStorepass;
private String myKeypass;
private boolean myConvertCss2Bin;
private String myNativeBundle = JavaFxPackagerConstants.NativeBundles.none.name();
private List<JavaFxManifestAttribute> myCustomManifestAttributes = new ArrayList<>();
private JavaFxApplicationIcons myIcons = new JavaFxApplicationIcons();
@Override
public void onBuildFinished(@NotNull final Artifact artifact, @NotNull final CompileContext compileContext) {
if (!(artifact.getArtifactType() instanceof JavaFxApplicationArtifactType)) {
return;
}
final Project project = compileContext.getProject();
final Set<Module> modules = ApplicationManager.getApplication().runReadAction(
(Computable<Set<Module>>)() -> ArtifactUtil.getModulesIncludedInArtifacts(Collections.singletonList(artifact), project));
if (modules.isEmpty()) {
return;
}
Sdk fxCompatibleSdk = null;
for (Module module : modules) {
final Sdk sdk = ModuleRootManager.getInstance(module).getSdk();
if (sdk != null && sdk.getSdkType() instanceof JavaSdk) {
if (((JavaSdk)sdk.getSdkType()).isOfVersionOrHigher(sdk, JavaSdkVersion.JDK_1_7)) {
fxCompatibleSdk = sdk;
break;
}
}
}
if (fxCompatibleSdk == null) {
compileContext.addMessage(CompilerMessageCategory.ERROR, "Java version 7 or higher is required to build JavaFX package", null, -1, -1);
return;
}
final JavaFxArtifactProperties properties =
(JavaFxArtifactProperties)artifact.getProperties(JavaFxArtifactPropertiesProvider.getInstance());
final JavaFxPackager javaFxPackager = new JavaFxPackager(artifact, properties, project) {
@Override
protected void registerJavaFxPackagerError(String message) {
compileContext.addMessage(CompilerMessageCategory.ERROR, message, null, -1, -1);
}
};
javaFxPackager.buildJavaFxArtifact(fxCompatibleSdk.getHomePath());
}
@Override
public ArtifactPropertiesEditor createEditor(@NotNull ArtifactEditorContext context) {
return new JavaFxArtifactPropertiesEditor(this, context.getProject(), context.getArtifact());
}
@Nullable
@Override
public JavaFxArtifactProperties getState() {
return this;
}
@Override
public void loadState(JavaFxArtifactProperties state) {
XmlSerializerUtil.copyBean(state, this);
}
public String getTitle() {
return myTitle;
}
public void setTitle(String title) {
myTitle = title;
}
public String getVendor() {
return myVendor;
}
public void setVendor(String vendor) {
myVendor = vendor;
}
public String getDescription() {
return myDescription;
}
public void setDescription(String description) {
myDescription = description;
}
public String getAppClass() {
return myAppClass;
}
public void setAppClass(String appClass) {
myAppClass = appClass;
}
public String getVersion() {
return myVersion;
}
public void setVersion(String version) {
myVersion = version;
}
public String getWidth() {
return myWidth;
}
public String getHeight() {
return myHeight;
}
public void setWidth(String width) {
myWidth = width;
}
public void setHeight(String height) {
myHeight = height;
}
public String getHtmlTemplateFile() {
return myHtmlTemplateFile;
}
public void setHtmlTemplateFile(String htmlTemplateFile) {
myHtmlTemplateFile = htmlTemplateFile;
}
public String getHtmlPlaceholderId() {
return myHtmlPlaceholderId;
}
public void setHtmlPlaceholderId(String htmlPlaceholderId) {
myHtmlPlaceholderId = htmlPlaceholderId;
}
public String getHtmlParamFile() {
return myHtmlParamFile;
}
public void setHtmlParamFile(String htmlParamFile) {
myHtmlParamFile = htmlParamFile;
}
public String getParamFile() {
return myParamFile;
}
public void setParamFile(String paramFile) {
myParamFile = paramFile;
}
public String getUpdateMode() {
return myUpdateMode;
}
public void setUpdateMode(String updateMode) {
myUpdateMode = updateMode;
}
public boolean isEnabledSigning() {
return myEnabledSigning;
}
public void setEnabledSigning(boolean enabledSigning) {
myEnabledSigning = enabledSigning;
}
public boolean isSelfSigning() {
return mySelfSigning;
}
public void setSelfSigning(boolean selfSigning) {
mySelfSigning = selfSigning;
}
public String getAlias() {
return myAlias;
}
public void setAlias(String alias) {
myAlias = alias;
}
public String getKeystore() {
return myKeystore;
}
public void setKeystore(String keystore) {
myKeystore = keystore;
}
public String getStorepass() {
return myStorepass;
}
public void setStorepass(String storepass) {
myStorepass = storepass;
}
public String getKeypass() {
return myKeypass;
}
public void setKeypass(String keypass) {
myKeypass = keypass;
}
public boolean isConvertCss2Bin() {
return myConvertCss2Bin;
}
public void setConvertCss2Bin(boolean convertCss2Bin) {
myConvertCss2Bin = convertCss2Bin;
}
public String getPreloaderClass(Artifact rootArtifact, Project project) {
final Artifact artifact = getPreloaderArtifact(rootArtifact, project);
if (artifact != null) {
final JavaFxPreloaderArtifactProperties properties =
(JavaFxPreloaderArtifactProperties)artifact.getProperties(JavaFxPreloaderArtifactPropertiesProvider.getInstance());
return properties.getPreloaderClass();
}
return null;
}
public String getPreloaderJar(Artifact rootArtifact, Project project) {
final Artifact artifact = getPreloaderArtifact(rootArtifact, project);
if (artifact != null) {
return ((ArchivePackagingElement)artifact.getRootElement()).getArchiveFileName();
}
return null;
}
private static Artifact getPreloaderArtifact(Artifact rootArtifact, Project project) {
for (PackagingElement<?> element : rootArtifact.getRootElement().getChildren()) {
if (element instanceof ArtifactPackagingElement) {
final Artifact artifact = ((ArtifactPackagingElement)element)
.findArtifact(ArtifactManager.getInstance(project).getResolvingContext());
if (artifact != null && artifact.getArtifactType() instanceof JavaFxPreloaderArtifactType) {
return artifact;
}
}
}
return null;
}
public String getNativeBundle() {
return myNativeBundle;
}
public void setNativeBundle(String nativeBundle) {
myNativeBundle = nativeBundle;
}
public List<JavaFxManifestAttribute> getCustomManifestAttributes() {
return myCustomManifestAttributes;
}
public void setCustomManifestAttributes(List<JavaFxManifestAttribute> customManifestAttributes) {
myCustomManifestAttributes = customManifestAttributes;
}
public JavaFxApplicationIcons getIcons() {
return myIcons;
}
public void setIcons(JavaFxApplicationIcons icons) {
myIcons = icons;
}
public static abstract class JavaFxPackager extends AbstractJavaFxPackager {
private final Artifact myArtifact;
private final JavaFxArtifactProperties myProperties;
private final Project myProject;
public JavaFxPackager(Artifact artifact, JavaFxArtifactProperties properties, Project project) {
myArtifact = artifact;
myProperties = properties;
myProject = project;
}
@Override
protected String getArtifactName() {
return myArtifact.getName();
}
@Override
protected String getArtifactOutputPath() {
return myArtifact.getOutputPath();
}
@Override
protected String getArtifactOutputFilePath() {
for (PackagingElement<?> element : myArtifact.getRootElement().getChildren()) {
if (element instanceof ArchivePackagingElement) {
return myArtifact.getOutputFilePath() + File.separator + ((ArchivePackagingElement)element).getArchiveFileName();
}
}
return myArtifact.getOutputFilePath();
}
@Override
protected String getAppClass() {
return myProperties.getAppClass();
}
@Override
protected String getTitle() {
return myProperties.getTitle();
}
@Override
protected String getVendor() {
return myProperties.getVendor();
}
@Override
protected String getDescription() {
return myProperties.getDescription();
}
@Override
protected String getVersion() {
return myProperties.getVersion();
}
@Override
protected JavaFxApplicationIcons getIcons() {
return myProperties.getIcons();
}
@Override
protected String getWidth() {
return myProperties.getWidth();
}
@Override
protected String getHeight() {
return myProperties.getHeight();
}
@Override
public String getPreloaderClass() {
return myProperties.getPreloaderClass(myArtifact, myProject);
}
@Override
public String getPreloaderJar() {
return myProperties.getPreloaderJar(myArtifact, myProject);
}
@Override
public boolean convertCss2Bin() {
return myProperties.isConvertCss2Bin();
}
@Override
protected String getHtmlTemplateFile() {
return myProperties.getHtmlTemplateFile();
}
@Override
protected String getHtmlPlaceholderId() {
return myProperties.getHtmlPlaceholderId();
}
@Override
protected String getHtmlParamFile() {
return myProperties.getHtmlParamFile();
}
@Override
protected String getParamFile() {
return myProperties.getParamFile();
}
@Override
protected String getUpdateMode() {
return myProperties.getUpdateMode();
}
@Override
protected JavaFxPackagerConstants.NativeBundles getNativeBundle() {
return JavaFxPackagerConstants.NativeBundles.valueOf(myProperties.getNativeBundle());
}
@Override
public String getKeypass() {
return myProperties.getKeypass();
}
@Override
public String getStorepass() {
return myProperties.getStorepass();
}
@Override
public String getKeystore() {
return myProperties.getKeystore();
}
@Override
public String getAlias() {
return myProperties.getAlias();
}
@Override
public boolean isSelfSigning() {
return myProperties.isSelfSigning();
}
@Override
public boolean isEnabledSigning() {
return myProperties.isEnabledSigning();
}
@Override
public List<JavaFxManifestAttribute> getCustomManifestAttributes() {
return myProperties.getCustomManifestAttributes();
}
}
}
| |
/*
* Copyright 2012 Uwe Trottmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.uwetrottmann.tmdb.services;
import com.google.myjson.reflect.TypeToken;
import com.uwetrottmann.tmdb.TmdbApiBuilder;
import com.uwetrottmann.tmdb.TmdbApiService;
import com.uwetrottmann.tmdb.entities.Casts;
import com.uwetrottmann.tmdb.entities.Movie;
import com.uwetrottmann.tmdb.entities.ResultsPage;
import com.uwetrottmann.tmdb.entities.Trailers;
public class MoviesService extends TmdbApiService {
/**
* Get the cast information for a specific movie id.
*
* @param id TMDb id.
* @return Builder instance.
*/
public CastsBuilder casts(int id) {
return new CastsBuilder(this, id);
}
/**
* Get the basic movie information for a specific movie id.
*
* @param id TMDb id.
* @return Builder instance.
*/
public SummaryBuilder summary(int id) {
return new SummaryBuilder(this, id);
}
/**
* Get the trailers for a specific movie id.
*
* @param id TMDb id.
* @return Builder instance.
*/
public TrailerBuilder trailers(int id) {
return new TrailerBuilder(this, id);
}
/**
* Get the list of movies playing in theaters. This list refreshes every
* day. The maximum number of items this list will include is 100.
*
* @return Builder instance.
*/
public NowPlayingBuilder nowPlaying() {
return new NowPlayingBuilder(this);
}
/**
* Get the list of popular movies on The Movie Database. This list refreshes
* every day.
*
* @return Builder instance.
*/
public PopularBuilder popular() {
return new PopularBuilder(this);
}
/**
* Get the similar movies for a specific movie id.
*
* @return Builder instance.
*/
public SimilarBuilder similarMovies(int id) {
return new SimilarBuilder(this, id);
}
/**
* Get the list of top rated movies. By default, this list will only include
* movies that have 10 or more votes. This list refreshes every day.
*
* @return Builder instance.
*/
public TopRatedBuilder topRated() {
return new TopRatedBuilder(this);
}
/**
* Get the list of upcoming movies. This list refreshes every day. The
* maximum number of items this list will include is 100.
*
* @return Builder instance.
*/
public UpcomingBuilder upcoming() {
return new UpcomingBuilder(this);
}
public static final class CastsBuilder extends TmdbApiBuilder<Casts> {
private static final String URI = "/movie/" + FIELD_ID + "/casts";
private CastsBuilder(MoviesService service, int id) {
super(service, new TypeToken<Casts>() {
}, URI);
field(FIELD_ID, id);
}
}
public static final class SummaryBuilder extends TmdbApiBuilder<Movie> {
private static final String URI = "/movie/" + FIELD_ID;
private SummaryBuilder(MoviesService service, int id) {
super(service, new TypeToken<Movie>() {
}, URI);
field(FIELD_ID, id);
}
/**
* Set the language. Attention: will not default to English, but instead
* will return empty field.
*
* @param languageCode ISO 639-1 code.
*/
public SummaryBuilder language(String languageCode) {
parameter(PARAMETER_LANGUAGE, languageCode);
return this;
}
}
public static final class TrailerBuilder extends TmdbApiBuilder<Trailers> {
private static final String URI = "/movie/" + FIELD_ID + "/trailers";
private TrailerBuilder(MoviesService service, int id) {
super(service, new TypeToken<Trailers>() {
}, URI);
field(FIELD_ID, id);
}
}
public static final class NowPlayingBuilder extends TmdbApiBuilder<ResultsPage> {
private static final String URI = "/movie/now_playing";
private NowPlayingBuilder(MoviesService service) {
super(service, new TypeToken<ResultsPage>() {
}, URI);
}
/**
* Set the language (optional). Attention: will not default to English,
* but instead will return empty field.
*
* @param languageCode ISO 639-1 code.
*/
public NowPlayingBuilder language(String languageCode) {
parameter(PARAMETER_LANGUAGE, languageCode);
return this;
}
/**
* Set the page to return (optional). Values start at 1.
*
* @param page Index of the page.
*/
public NowPlayingBuilder page(int page) {
parameter(PARAMETER_PAGE, page);
return this;
}
}
public static final class PopularBuilder extends TmdbApiBuilder<ResultsPage> {
private static final String URI = "/movie/popular";
private PopularBuilder(MoviesService service) {
super(service, new TypeToken<ResultsPage>() {
}, URI);
}
/**
* Set the language (optional). Attention: will not default to English,
* but instead will return empty field.
*
* @param languageCode ISO 639-1 code.
*/
public PopularBuilder language(String languageCode) {
parameter(PARAMETER_LANGUAGE, languageCode);
return this;
}
/**
* Set the page to return (optional). Values start at 1.
*
* @param page Index of the page.
*/
public PopularBuilder page(int page) {
parameter(PARAMETER_PAGE, page);
return this;
}
}
public static final class SimilarBuilder extends TmdbApiBuilder<ResultsPage> {
private static final String URI = "/movie/" + FIELD_ID + "/similar_movies";
private SimilarBuilder(MoviesService service, int id) {
super(service, new TypeToken<ResultsPage>() {
}, URI);
field(FIELD_ID, id);
}
/**
* Set the language (optional). Attention: will not default to English,
* but instead will return empty field.
*
* @param languageCode ISO 639-1 code.
*/
public SimilarBuilder language(String languageCode) {
parameter(PARAMETER_LANGUAGE, languageCode);
return this;
}
/**
* Set the page to return (optional). Values start at 1.
*
* @param page Index of the page.
*/
public SimilarBuilder page(int page) {
parameter(PARAMETER_PAGE, page);
return this;
}
}
public static final class TopRatedBuilder extends TmdbApiBuilder<ResultsPage> {
private static final String URI = "/movie/top_rated";
private TopRatedBuilder(MoviesService service) {
super(service, new TypeToken<ResultsPage>() {
}, URI);
}
/**
* Set the language (optional). Attention: will not default to English,
* but instead will return empty field.
*
* @param languageCode ISO 639-1 code.
*/
public TopRatedBuilder language(String languageCode) {
parameter(PARAMETER_LANGUAGE, languageCode);
return this;
}
/**
* Set the page to return (optional). Values start at 1.
*
* @param page Index of the page.
*/
public TopRatedBuilder page(int page) {
parameter(PARAMETER_PAGE, page);
return this;
}
}
public static final class UpcomingBuilder extends TmdbApiBuilder<ResultsPage> {
private static final String URI = "/movie/upcoming";
private UpcomingBuilder(MoviesService service) {
super(service, new TypeToken<ResultsPage>() {
}, URI);
}
/**
* Set the language (optional). Attention: will not default to English,
* but instead will return empty field.
*
* @param languageCode ISO 639-1 code.
*/
public UpcomingBuilder language(String languageCode) {
parameter(PARAMETER_LANGUAGE, languageCode);
return this;
}
/**
* Set the page to return (optional). Values start at 1.
*
* @param page Index of the page.
*/
public UpcomingBuilder page(int page) {
parameter(PARAMETER_PAGE, page);
return this;
}
}
}
| |
package org.spongycastle.cms;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.spongycastle.asn1.ASN1EncodableVector;
import org.spongycastle.asn1.ASN1InputStream;
import org.spongycastle.asn1.ASN1ObjectIdentifier;
import org.spongycastle.asn1.ASN1OctetString;
import org.spongycastle.asn1.ASN1Sequence;
import org.spongycastle.asn1.ASN1Set;
import org.spongycastle.asn1.BERSequence;
import org.spongycastle.asn1.DERSet;
import org.spongycastle.asn1.cms.ContentInfo;
import org.spongycastle.asn1.cms.SignedData;
import org.spongycastle.asn1.cms.SignerInfo;
import org.spongycastle.operator.DefaultSignatureAlgorithmIdentifierFinder;
import org.spongycastle.operator.OperatorCreationException;
import org.spongycastle.operator.SignatureAlgorithmIdentifierFinder;
import org.spongycastle.util.Store;
/**
* general class for handling a pkcs7-signature message.
*
* A simple example of usage - note, in the example below the validity of
* the certificate isn't verified, just the fact that one of the certs
* matches the given signer...
*
* <pre>
* Store certStore = s.getCertificates();
* SignerInformationStore signers = s.getSignerInfos();
* Collection c = signers.getSigners();
* Iterator it = c.iterator();
*
* while (it.hasNext())
* {
* SignerInformation signer = (SignerInformation)it.next();
* Collection certCollection = certStore.getMatches(signer.getSID());
*
* Iterator certIt = certCollection.iterator();
* X509CertificateHolder cert = (X509CertificateHolder)certIt.next();
*
* if (signer.verify(new JcaSimpleSignerInfoVerifierBuilder().setProvider("SC").build(cert)))
* {
* verified++;
* }
* }
* </pre>
*/
public class CMSSignedData
{
private static final CMSSignedHelper HELPER = CMSSignedHelper.INSTANCE;
SignedData signedData;
ContentInfo contentInfo;
CMSTypedData signedContent;
SignerInformationStore signerInfoStore;
private Map hashes;
private CMSSignedData(
CMSSignedData c)
{
this.signedData = c.signedData;
this.contentInfo = c.contentInfo;
this.signedContent = c.signedContent;
this.signerInfoStore = c.signerInfoStore;
}
public CMSSignedData(
byte[] sigBlock)
throws CMSException
{
this(CMSUtils.readContentInfo(sigBlock));
}
public CMSSignedData(
CMSProcessable signedContent,
byte[] sigBlock)
throws CMSException
{
this(signedContent, CMSUtils.readContentInfo(sigBlock));
}
/**
* Content with detached signature, digests precomputed
*
* @param hashes a map of precomputed digests for content indexed by name of hash.
* @param sigBlock the signature object.
*/
public CMSSignedData(
Map hashes,
byte[] sigBlock)
throws CMSException
{
this(hashes, CMSUtils.readContentInfo(sigBlock));
}
/**
* base constructor - content with detached signature.
*
* @param signedContent the content that was signed.
* @param sigData the signature object.
*/
public CMSSignedData(
CMSProcessable signedContent,
InputStream sigData)
throws CMSException
{
this(signedContent, CMSUtils.readContentInfo(new ASN1InputStream(sigData)));
}
/**
* base constructor - with encapsulated content
*/
public CMSSignedData(
InputStream sigData)
throws CMSException
{
this(CMSUtils.readContentInfo(sigData));
}
public CMSSignedData(
final CMSProcessable signedContent,
ContentInfo sigData)
throws CMSException
{
if (signedContent instanceof CMSTypedData)
{
this.signedContent = (CMSTypedData)signedContent;
}
else
{
this.signedContent = new CMSTypedData()
{
public ASN1ObjectIdentifier getContentType()
{
return signedData.getEncapContentInfo().getContentType();
}
public void write(OutputStream out)
throws IOException, CMSException
{
signedContent.write(out);
}
public Object getContent()
{
return signedContent.getContent();
}
};
}
this.contentInfo = sigData;
this.signedData = getSignedData();
}
public CMSSignedData(
Map hashes,
ContentInfo sigData)
throws CMSException
{
this.hashes = hashes;
this.contentInfo = sigData;
this.signedData = getSignedData();
}
public CMSSignedData(
ContentInfo sigData)
throws CMSException
{
this.contentInfo = sigData;
this.signedData = getSignedData();
//
// this can happen if the signed message is sent simply to send a
// certificate chain.
//
if (signedData.getEncapContentInfo().getContent() != null)
{
this.signedContent = new CMSProcessableByteArray(signedData.getEncapContentInfo().getContentType(),
((ASN1OctetString)(signedData.getEncapContentInfo()
.getContent())).getOctets());
}
else
{
this.signedContent = null;
}
}
private SignedData getSignedData()
throws CMSException
{
try
{
return SignedData.getInstance(contentInfo.getContent());
}
catch (ClassCastException e)
{
throw new CMSException("Malformed content.", e);
}
catch (IllegalArgumentException e)
{
throw new CMSException("Malformed content.", e);
}
}
/**
* Return the version number for this object
*/
public int getVersion()
{
return signedData.getVersion().getValue().intValue();
}
/**
* return the collection of signers that are associated with the
* signatures for the message.
*/
public SignerInformationStore getSignerInfos()
{
if (signerInfoStore == null)
{
ASN1Set s = signedData.getSignerInfos();
List signerInfos = new ArrayList();
SignatureAlgorithmIdentifierFinder sigAlgFinder = new DefaultSignatureAlgorithmIdentifierFinder();
for (int i = 0; i != s.size(); i++)
{
SignerInfo info = SignerInfo.getInstance(s.getObjectAt(i));
ASN1ObjectIdentifier contentType = signedData.getEncapContentInfo().getContentType();
if (hashes == null)
{
signerInfos.add(new SignerInformation(info, contentType, signedContent, null));
}
else
{
Object obj = hashes.keySet().iterator().next();
byte[] hash = (obj instanceof String) ? (byte[])hashes.get(info.getDigestAlgorithm().getAlgorithm().getId()) : (byte[])hashes.get(info.getDigestAlgorithm().getAlgorithm());
signerInfos.add(new SignerInformation(info, contentType, null, hash));
}
}
signerInfoStore = new SignerInformationStore(signerInfos);
}
return signerInfoStore;
}
/**
* Return any X.509 certificate objects in this SignedData structure as a Store of X509CertificateHolder objects.
*
* @return a Store of X509CertificateHolder objects.
*/
public Store getCertificates()
{
return HELPER.getCertificates(signedData.getCertificates());
}
/**
* Return any X.509 CRL objects in this SignedData structure as a Store of X509CRLHolder objects.
*
* @return a Store of X509CRLHolder objects.
*/
public Store getCRLs()
{
return HELPER.getCRLs(signedData.getCRLs());
}
/**
* Return any X.509 attribute certificate objects in this SignedData structure as a Store of X509AttributeCertificateHolder objects.
*
* @return a Store of X509AttributeCertificateHolder objects.
*/
public Store getAttributeCertificates()
{
return HELPER.getAttributeCertificates(signedData.getCertificates());
}
/**
* Return any OtherRevocationInfo OtherRevInfo objects of the type indicated by otherRevocationInfoFormat in
* this SignedData structure.
*
* @param otherRevocationInfoFormat OID of the format type been looked for.
*
* @return a Store of ASN1Encodable objects representing any objects of otherRevocationInfoFormat found.
*/
public Store getOtherRevocationInfo(ASN1ObjectIdentifier otherRevocationInfoFormat)
{
return HELPER.getOtherRevocationInfo(otherRevocationInfoFormat, signedData.getCRLs());
}
/**
* Return the a string representation of the OID associated with the
* encapsulated content info structure carried in the signed data.
*
* @return the OID for the content type.
*/
public String getSignedContentTypeOID()
{
return signedData.getEncapContentInfo().getContentType().getId();
}
public CMSTypedData getSignedContent()
{
return signedContent;
}
/**
* return the ContentInfo
*/
public ContentInfo toASN1Structure()
{
return contentInfo;
}
/**
* return the ASN.1 encoded representation of this object.
*/
public byte[] getEncoded()
throws IOException
{
return contentInfo.getEncoded();
}
/**
* Verify all the SignerInformation objects and their associated counter signatures attached
* to this CMS SignedData object.
*
* @param verifierProvider a provider of SignerInformationVerifier objects.
* @return true if all verify, false otherwise.
* @throws CMSException if an exception occurs during the verification process.
*/
public boolean verifySignatures(SignerInformationVerifierProvider verifierProvider)
throws CMSException
{
return verifySignatures(verifierProvider, false);
}
/**
* Verify all the SignerInformation objects and optionally their associated counter signatures attached
* to this CMS SignedData object.
*
* @param verifierProvider a provider of SignerInformationVerifier objects.
* @param ignoreCounterSignatures if true don't check counter signatures. If false check counter signatures as well.
* @return true if all verify, false otherwise.
* @throws CMSException if an exception occurs during the verification process.
*/
public boolean verifySignatures(SignerInformationVerifierProvider verifierProvider, boolean ignoreCounterSignatures)
throws CMSException
{
Collection signers = this.getSignerInfos().getSigners();
for (Iterator it = signers.iterator(); it.hasNext();)
{
SignerInformation signer = (SignerInformation)it.next();
try
{
SignerInformationVerifier verifier = verifierProvider.get(signer.getSID());
if (!signer.verify(verifier))
{
return false;
}
if (!ignoreCounterSignatures)
{
Collection counterSigners = signer.getCounterSignatures().getSigners();
for (Iterator cIt = counterSigners.iterator(); cIt.hasNext();)
{
SignerInformation counterSigner = (SignerInformation)cIt.next();
SignerInformationVerifier counterVerifier = verifierProvider.get(signer.getSID());
if (!counterSigner.verify(counterVerifier))
{
return false;
}
}
}
}
catch (OperatorCreationException e)
{
throw new CMSException("failure in verifier provider: " + e.getMessage(), e);
}
}
return true;
}
/**
* Replace the SignerInformation store associated with this
* CMSSignedData object with the new one passed in. You would
* probably only want to do this if you wanted to change the unsigned
* attributes associated with a signer, or perhaps delete one.
*
* @param signedData the signed data object to be used as a base.
* @param signerInformationStore the new signer information store to use.
* @return a new signed data object.
*/
public static CMSSignedData replaceSigners(
CMSSignedData signedData,
SignerInformationStore signerInformationStore)
{
//
// copy
//
CMSSignedData cms = new CMSSignedData(signedData);
//
// replace the store
//
cms.signerInfoStore = signerInformationStore;
//
// replace the signers in the SignedData object
//
ASN1EncodableVector digestAlgs = new ASN1EncodableVector();
ASN1EncodableVector vec = new ASN1EncodableVector();
Iterator it = signerInformationStore.getSigners().iterator();
while (it.hasNext())
{
SignerInformation signer = (SignerInformation)it.next();
digestAlgs.add(CMSSignedHelper.INSTANCE.fixAlgID(signer.getDigestAlgorithmID()));
vec.add(signer.toASN1Structure());
}
ASN1Set digests = new DERSet(digestAlgs);
ASN1Set signers = new DERSet(vec);
ASN1Sequence sD = (ASN1Sequence)signedData.signedData.toASN1Primitive();
vec = new ASN1EncodableVector();
//
// signers are the last item in the sequence.
//
vec.add(sD.getObjectAt(0)); // version
vec.add(digests);
for (int i = 2; i != sD.size() - 1; i++)
{
vec.add(sD.getObjectAt(i));
}
vec.add(signers);
cms.signedData = SignedData.getInstance(new BERSequence(vec));
//
// replace the contentInfo with the new one
//
cms.contentInfo = new ContentInfo(cms.contentInfo.getContentType(), cms.signedData);
return cms;
}
/**
* Replace the certificate and CRL information associated with this
* CMSSignedData object with the new one passed in.
*
* @param signedData the signed data object to be used as a base.
* @param certificates the new certificates to be used.
* @param attrCerts the new attribute certificates to be used.
* @param revocations the new CRLs to be used - a collection of X509CRLHolder objects, OtherRevocationInfoFormat, or both.
* @return a new signed data object.
* @exception CMSException if there is an error processing the CertStore
*/
public static CMSSignedData replaceCertificatesAndCRLs(
CMSSignedData signedData,
Store certificates,
Store attrCerts,
Store revocations)
throws CMSException
{
//
// copy
//
CMSSignedData cms = new CMSSignedData(signedData);
//
// replace the certs and revocations in the SignedData object
//
ASN1Set certSet = null;
ASN1Set crlSet = null;
if (certificates != null || attrCerts != null)
{
List certs = new ArrayList();
if (certificates != null)
{
certs.addAll(CMSUtils.getCertificatesFromStore(certificates));
}
if (attrCerts != null)
{
certs.addAll(CMSUtils.getAttributeCertificatesFromStore(attrCerts));
}
ASN1Set set = CMSUtils.createBerSetFromList(certs);
if (set.size() != 0)
{
certSet = set;
}
}
if (revocations != null)
{
ASN1Set set = CMSUtils.createBerSetFromList(CMSUtils.getCRLsFromStore(revocations));
if (set.size() != 0)
{
crlSet = set;
}
}
//
// replace the CMS structure.
//
cms.signedData = new SignedData(signedData.signedData.getDigestAlgorithms(),
signedData.signedData.getEncapContentInfo(),
certSet,
crlSet,
signedData.signedData.getSignerInfos());
//
// replace the contentInfo with the new one
//
cms.contentInfo = new ContentInfo(cms.contentInfo.getContentType(), cms.signedData);
return cms;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import java.io.EOFException;
import java.io.IOException;
import java.io.PrintStream;
import java.io.RandomAccessFile;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.NetUtils;
/**
* OfflineImageViewerPB to dump the contents of an Hadoop image file to XML or
* the console. Main entry point into utility, either via the command line or
* programmatically.
*/
@InterfaceAudience.Private
public class OfflineImageViewerPB {
public static final Log LOG = LogFactory.getLog(OfflineImageViewerPB.class);
private final static String usage = "Usage: bin/hdfs oiv [OPTIONS] -i INPUTFILE -o OUTPUTFILE\n"
+ "Offline Image Viewer\n"
+ "View a Hadoop fsimage INPUTFILE using the specified PROCESSOR,\n"
+ "saving the results in OUTPUTFILE.\n"
+ "\n"
+ "The oiv utility will attempt to parse correctly formed image files\n"
+ "and will abort fail with mal-formed image files.\n"
+ "\n"
+ "The tool works offline and does not require a running cluster in\n"
+ "order to process an image file.\n"
+ "\n"
+ "The following image processors are available:\n"
+ " * XML: This processor creates an XML document with all elements of\n"
+ " the fsimage enumerated, suitable for further analysis by XML\n"
+ " tools.\n"
+ " * FileDistribution: This processor analyzes the file size\n"
+ " distribution in the image.\n"
+ " -maxSize specifies the range [0, maxSize] of file sizes to be\n"
+ " analyzed (128GB by default).\n"
+ " -step defines the granularity of the distribution. (2MB by default)\n"
+ " * Web: Run a viewer to expose read-only WebHDFS API.\n"
+ " -addr specifies the address to listen. (localhost:5978 by default)\n"
+ " * Delimited: Generate a text file with all of the elements common\n"
+ " to both inodes and inodes-under-construction, separated by a\n"
+ " delimiter. The default delimiter is \\t, though this may be\n"
+ " changed via the -delimiter argument.\n"
+ "\n"
+ "Required command line arguments:\n"
+ "-i,--inputFile <arg> FSImage file to process.\n"
+ "\n"
+ "Optional command line arguments:\n"
+ "-o,--outputFile <arg> Name of output file. If the specified\n"
+ " file exists, it will be overwritten.\n"
+ " (output to stdout by default)\n"
+ "-p,--processor <arg> Select which type of processor to apply\n"
+ " against image file. (XML|FileDistribution|Web|Delimited)\n"
+ " (Web by default)\n"
+ "-delimiter <arg> Delimiting string to use with Delimited processor\n"
+ "-t,--temp <arg> Use temporary dir to cache intermediate result to generate\n"
+ " Delimited outputs. If not set, Delimited processor constructs\n"
+ " the namespace in memory before outputting text."
+ "-h,--help Display usage information and exit\n";
/**
* Build command-line options and descriptions
*/
private static Options buildOptions() {
Options options = new Options();
// Build in/output file arguments, which are required, but there is no
// addOption method that can specify this
OptionBuilder.isRequired();
OptionBuilder.hasArgs();
OptionBuilder.withLongOpt("inputFile");
options.addOption(OptionBuilder.create("i"));
options.addOption("o", "outputFile", true, "");
options.addOption("p", "processor", true, "");
options.addOption("h", "help", false, "");
options.addOption("maxSize", true, "");
options.addOption("step", true, "");
options.addOption("addr", true, "");
options.addOption("delimiter", true, "");
options.addOption("t", "temp", true, "");
return options;
}
/**
* Entry point to command-line-driven operation. User may specify options and
* start fsimage viewer from the command line. Program will process image file
* and exit cleanly or, if an error is encountered, inform user and exit.
*
* @param args
* Command line options
* @throws IOException
*/
public static void main(String[] args) throws Exception {
int status = run(args);
System.exit(status);
}
public static int run(String[] args) throws Exception {
Options options = buildOptions();
if (args.length == 0) {
printUsage();
return 0;
}
CommandLineParser parser = new PosixParser();
CommandLine cmd;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.out.println("Error parsing command-line options: ");
printUsage();
return -1;
}
if (cmd.hasOption("h")) { // print help and exit
printUsage();
return 0;
}
String inputFile = cmd.getOptionValue("i");
String processor = cmd.getOptionValue("p", "Web");
String outputFile = cmd.getOptionValue("o", "-");
String delimiter = cmd.getOptionValue("delimiter",
PBImageDelimitedTextWriter.DEFAULT_DELIMITER);
String tempPath = cmd.getOptionValue("t", "");
Configuration conf = new Configuration();
try (PrintStream out = outputFile.equals("-") ?
System.out : new PrintStream(outputFile, "UTF-8")) {
switch (processor) {
case "FileDistribution":
long maxSize = Long.parseLong(cmd.getOptionValue("maxSize", "0"));
int step = Integer.parseInt(cmd.getOptionValue("step", "0"));
new FileDistributionCalculator(conf, maxSize, step, out).visit(
new RandomAccessFile(inputFile, "r"));
break;
case "XML":
new PBImageXmlWriter(conf, out).visit(
new RandomAccessFile(inputFile, "r"));
break;
case "Web":
String addr = cmd.getOptionValue("addr", "localhost:5978");
try (WebImageViewer viewer = new WebImageViewer(
NetUtils.createSocketAddr(addr))) {
viewer.start(inputFile);
}
break;
case "Delimited":
try (PBImageDelimitedTextWriter writer =
new PBImageDelimitedTextWriter(out, delimiter, tempPath)) {
writer.visit(new RandomAccessFile(inputFile, "r"));
}
break;
}
return 0;
} catch (EOFException e) {
System.err.println("Input file ended unexpectedly. Exiting");
} catch (IOException e) {
System.err.println("Encountered exception. Exiting: " + e.getMessage());
}
return -1;
}
/**
* Print application usage instructions.
*/
private static void printUsage() {
System.out.println(usage);
}
}
| |
package org.rabix.engine.processor.handler.impl;
import com.google.inject.Inject;
import org.apache.commons.configuration.Configuration;
import org.rabix.bindings.BindingException;
import org.rabix.bindings.Bindings;
import org.rabix.bindings.BindingsFactory;
import org.rabix.bindings.helper.URIHelper;
import org.rabix.bindings.model.Application;
import org.rabix.bindings.model.Job;
import org.rabix.bindings.model.Job.JobStatus;
import org.rabix.bindings.model.LinkMerge;
import org.rabix.bindings.model.dag.DAGContainer;
import org.rabix.bindings.model.dag.DAGLink;
import org.rabix.bindings.model.dag.DAGLinkPort;
import org.rabix.bindings.model.dag.DAGLinkPort.LinkPortType;
import org.rabix.bindings.model.dag.DAGNode;
import org.rabix.common.helper.CloneHelper;
import org.rabix.common.helper.InternalSchemaHelper;
import org.rabix.common.helper.JSONHelper;
import org.rabix.engine.JobHelper;
import org.rabix.engine.event.Event;
import org.rabix.engine.event.impl.ContextStatusEvent;
import org.rabix.engine.event.impl.InputUpdateEvent;
import org.rabix.engine.event.impl.JobStatusEvent;
import org.rabix.engine.event.impl.OutputUpdateEvent;
import org.rabix.engine.processor.EventProcessor;
import org.rabix.engine.processor.handler.EventHandler;
import org.rabix.engine.processor.handler.EventHandlerException;
import org.rabix.engine.service.*;
import org.rabix.engine.store.model.*;
import org.rabix.engine.store.model.ContextRecord.ContextStatus;
import org.rabix.engine.store.model.JobRecord.PortCounter;
import org.rabix.engine.store.repository.JobRepository;
import org.rabix.engine.validator.JobStateValidationException;
import org.rabix.engine.validator.JobStateValidator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
public class JobStatusEventHandler implements EventHandler<JobStatusEvent> {
private final Logger logger = LoggerFactory.getLogger(JobStatusEventHandler.class);
private final DAGNodeService dagNodeService;
private final AppService appService;
private final ScatterHandler scatterHelper;
private final EventProcessor eventProcessor;
private final JobRecordService jobRecordService;
private final LinkRecordService linkRecordService;
private final VariableRecordService variableRecordService;
private final ContextRecordService contextRecordService;
private final JobStatsRecordService jobStatsRecordService;
private final IntermediaryFilesService intermediaryFilesService;
private final JobRepository jobRepository;
private final JobService jobService;
private final boolean setResources;
private JobHelper jobHelper;
@Inject
public JobStatusEventHandler(final DAGNodeService dagNodeService, final AppService appService,
final JobRecordService jobRecordService, final LinkRecordService linkRecordService,
final VariableRecordService variableRecordService, final ContextRecordService contextRecordService,
final EventProcessor eventProcessor, final ScatterHandler scatterHelper, final JobRepository jobRepository,
final JobService jobService, final JobStatsRecordService jobStatsRecordService,
final Configuration configuration, final JobHelper jobHelper, final IntermediaryFilesService intermediaryFilesService) {
this.dagNodeService = dagNodeService;
this.scatterHelper = scatterHelper;
this.eventProcessor = eventProcessor;
this.jobRecordService = jobRecordService;
this.linkRecordService = linkRecordService;
this.contextRecordService = contextRecordService;
this.jobStatsRecordService = jobStatsRecordService;
this.variableRecordService = variableRecordService;
this.appService = appService;
this.jobService = jobService;
this.jobHelper = jobHelper;
this.jobRepository = jobRepository;
this.setResources = configuration.getBoolean("engine.set_resources", false);
this.intermediaryFilesService = intermediaryFilesService;
}
@Override
public void handle(JobStatusEvent event, EventHandlingMode mode) throws EventHandlerException {
JobRecord jobRecord = jobRecordService.find(event.getJobId(), event.getContextId());
if (jobRecord == null) {
logger.info("Possible stale message. Job {} for root {} doesn't exist.", event.getJobId(), event.getContextId());
return;
}
JobStatsRecord jobStatsRecord = null;
if (mode != EventHandlingMode.REPLAY && jobRecord.isTopLevel()) {
jobStatsRecord = jobStatsRecordService.findOrCreate(jobRecord.getRootId());
}
try {
JobStateValidator.checkState(jobRecord, event.getState());
} catch (JobStateValidationException e) {
logger.warn("Cannot transition from state {} to {}", jobRecord.getState(), event.getState());
return;
}
switch (event.getState()) {
case READY:
ready(jobRecord, event);
if (jobRecord.getState().equals(JobRecord.JobState.COMPLETED)) {
break;
}
if (jobRecord.getScatterStrategy() != null && jobRecord.getScatterStrategy().skipScatter()) {
break;
}
if (shouldGenerateReadyJob(jobRecord)) {
Job job = null;
try {
job = jobHelper.createReadyJob(jobRecord, JobStatus.READY, setResources);
if (!job.getName().equals(InternalSchemaHelper.ROOT_NAME)) {
jobRepository.insert(job, event.getEventGroupId(), event.getProducedByNode());
} else {
jobRepository.update(job);
}
} catch (BindingException e1) {
// FIXME: is this really safe to ignore?
logger.info("Failed to create job", e1);
}
if (job.isRoot()) {
jobService.handleJobContainerReady(job);
}
} else {
Job containerJob = null;
try {
containerJob = jobHelper.createJob(jobRecord, JobStatus.READY, false);
} catch (BindingException e) {
throw new EventHandlerException("Failed to call onReady callback for Job " + containerJob, e);
}
jobService.handleJobContainerReady(containerJob);
}
break;
case RUNNING:
jobRecord.setState(JobRecord.JobState.RUNNING);
jobRecordService.update(jobRecord);
if (jobStatsRecord != null) {
jobStatsRecord.increaseRunning();
jobStatsRecordService.update(jobStatsRecord);
}
break;
case COMPLETED:
updateJobStats(jobRecord, jobStatsRecord);
if ((!jobRecord.isScatterWrapper() || jobRecord.isRoot()) && !jobRecord.isContainer()) {
for (PortCounter portCounter : jobRecord.getOutputCounters()) {
Object output = event.getResult().get(portCounter.getPort());
eventProcessor.send(new OutputUpdateEvent(jobRecord.getRootId(), jobRecord.getId(), portCounter.getPort(), output,
jobRecord.getNumberOfGlobalOutputs(), 1, event.getEventGroupId(), event.getProducedByNode()));
}
}
jobRecord.setState(JobRecord.JobState.COMPLETED);
jobRecordService.update(jobRecord);
if (!jobRecord.isContainer() && !jobRecord.isScatterWrapper()) {
Job job = jobRepository.get(event.getEventGroupId());
intermediaryFilesService.decrementInputFilesReferences(event.getContextId(), job.getInputs());
}
if (jobRecord.isRoot()) {
eventProcessor.send(new ContextStatusEvent(event.getContextId(), ContextStatus.COMPLETED));
try {
Job rootJob = jobHelper.createJob(jobRecord, JobStatus.COMPLETED, event.getResult());
if(!jobRecord.isContainer())
jobService.handleJobRootPartiallyCompleted(jobRecord.getRootId(), rootJob.getOutputs(), jobRecord.getExternalId());
jobService.handleJobRootCompleted(rootJob);
} catch (BindingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
try {
Job job = jobHelper.createJob(jobRecord, JobStatus.COMPLETED, event.getResult());
jobRepository.update(job);
jobService.handleJobCompleted(job);
} catch (BindingException e) {
logger.warn("Could not create completed job for {}", event.getJobId());
}
if (!jobRecord.isScattered()) {
checkJobRootPartiallyCompleted(jobRecord, mode);
}
}
break;
case ABORTED:
Set<JobRecord.JobState> jobRecordStatuses = new HashSet<>();
jobRecordStatuses.add(JobRecord.JobState.PENDING);
jobRecordStatuses.add(JobRecord.JobState.READY);
jobRecordStatuses.add(JobRecord.JobState.RUNNING);
List<JobRecord> records = jobRecordService.find(jobRecord.getRootId(), jobRecordStatuses);
for (JobRecord record : records) {
record.setState(JobRecord.JobState.ABORTED);
jobRecordService.update(record);
}
ContextRecord contextRecord = contextRecordService.find(jobRecord.getRootId());
contextRecord.setStatus(ContextStatus.ABORTED);
contextRecordService.update(contextRecord);
break;
case FAILED:
jobRecord.setState(JobRecord.JobState.READY);
jobRecordService.update(jobRecord);
if (jobRecord.isRoot()) {
try {
Job rootJob = jobHelper.createJob(jobRecord, JobStatus.FAILED, null);
rootJob = Job.cloneWithMessage(rootJob, event.getMessage());
jobService.handleJobRootFailed(rootJob);
eventProcessor.send(new ContextStatusEvent(event.getContextId(), ContextStatus.FAILED));
} catch (Exception e) {
throw new EventHandlerException("Failed to call onRootFailed callback for Job " + jobRecord.getRootId(), e);
}
} else {
try {
Job failedJob = jobHelper.createJob(jobRecord, JobStatus.FAILED);
failedJob = Job.cloneWithMessage(failedJob, event.getMessage());
jobService.handleJobFailed(failedJob);
eventProcessor.send(new JobStatusEvent(InternalSchemaHelper.ROOT_NAME, event.getContextId(), JobRecord.JobState.FAILED, event.getMessage(), event.getEventGroupId(), event.getProducedByNode()));
} catch (Exception e) {
throw new EventHandlerException("Failed to call onFailed callback for Job " + jobRecord.getId(), e);
}
}
break;
default:
break;
}
}
private void updateJobStats(JobRecord jobRecord, JobStatsRecord jobStatsRecord) {
if (jobStatsRecord != null && !(jobRecord.isRoot() && jobRecord.isContainer())) {
jobStatsRecord.increaseCompleted();
jobStatsRecordService.update(jobStatsRecord);
}
}
private void checkJobRootPartiallyCompleted(JobRecord jobRecord, EventHandlingMode mode) {
if (mode == EventHandlingMode.REPLAY) {
return;
}
List<LinkRecord> rootLinks = linkRecordService
.findBySourceAndSourceType(jobRecord.getId(), LinkPortType.OUTPUT, jobRecord.getRootId())
.stream()
.filter(p -> p.getDestinationJobId().equals(InternalSchemaHelper.ROOT_NAME))
.collect(Collectors.toList());
Map<String, Object> outs = new HashMap<>();
rootLinks.forEach(link -> outs.put(link.getDestinationJobPort(), variableRecordService.find(InternalSchemaHelper.ROOT_NAME, link.getDestinationJobPort(), LinkPortType.OUTPUT, jobRecord.getRootId()).getValue()));
if (!outs.isEmpty()) {
jobService.handleJobRootPartiallyCompleted(jobRecord.getRootId(), outs, jobRecord.getExternalId());
}
}
private boolean shouldGenerateReadyJob(JobRecord jobRecord) {
return !jobRecord.isContainer() && !jobRecord.isScatterWrapper();
}
/*
* Job is ready
*/
private void ready(JobRecord job, Event event) throws EventHandlerException {
job.setState(JobRecord.JobState.READY);
UUID rootId = event.getContextId();
if (!job.isScattered() && job.getScatterPorts().size() > 0) {
job.setState(JobRecord.JobState.RUNNING);
for (String port : job.getScatterPorts()) {
VariableRecord variable = variableRecordService.find(job.getId(), port, LinkPortType.INPUT, rootId);
scatterHelper.scatterPort(job, event, port, variableRecordService.getValue(variable), 1, null, false, false);
if (job.getScatterStrategy().skipScatter()) {
return;
}
}
} else if (job.isContainer()) {
DAGNode node = dagNodeService.get(InternalSchemaHelper.normalizeId(job.getId()), rootId, job.getDagHash());
job.setState(JobRecord.JobState.RUNNING);
DAGContainer containerNode = (DAGContainer) node;
rollOutContainer(job, containerNode, rootId);
handleTransform(job, containerNode);
List<LinkRecord> containerLinks = linkRecordService.findBySourceAndSourceType(job.getId(), LinkPortType.INPUT, rootId);
if (containerLinks.isEmpty()) {
Set<String> immediateReadyNodeIds = findImmediateReadyNodes(containerNode);
for (String readyNodeId : immediateReadyNodeIds) {
JobRecord childJobRecord = jobRecordService.find(readyNodeId, rootId);
if(childJobRecord.isContainer() || childJobRecord.isScatterWrapper()) {
ready(childJobRecord, event);
}
else {
JobStatusEvent jobStatusEvent = new JobStatusEvent(childJobRecord.getId(), rootId, JobRecord.JobState.READY, event.getEventGroupId(), event.getProducedByNode());
eventProcessor.send(jobStatusEvent);
}
}
} else {
for (LinkRecord link : containerLinks) {
VariableRecord sourceVariable = variableRecordService.find(link.getSourceJobId(), link.getSourceJobPort(), LinkPortType.INPUT, rootId);
VariableRecord destinationVariable = variableRecordService.find(link.getDestinationJobId(), link.getDestinationJobPort(), LinkPortType.INPUT, rootId);
if(destinationVariable == null) {
destinationVariable = new VariableRecord(rootId, link.getDestinationJobId(), link.getDestinationJobPort(), LinkPortType.INPUT, variableRecordService.getValue(sourceVariable), node.getLinkMerge(sourceVariable.getPortId(), sourceVariable.getType()));
variableRecordService.create(destinationVariable);
}
Event updateEvent;
if(link.getDestinationVarType().equals(LinkPortType.INPUT))
updateEvent = new InputUpdateEvent(rootId, link.getDestinationJobId(), link.getDestinationJobPort(), variableRecordService.getValue(sourceVariable), link.getPosition(), event.getEventGroupId(), event.getProducedByNode());
else
updateEvent = new OutputUpdateEvent(rootId, link.getDestinationJobId(), link.getDestinationJobPort(), variableRecordService.getValue(sourceVariable), link.getPosition(), event.getEventGroupId(), event.getProducedByNode());
eventProcessor.send(updateEvent);
}
}
}
}
private void handleTransform(JobRecord job, DAGNode node) throws EventHandlerException {
try {
boolean hasTransform = false;
for (DAGLinkPort p : node.getInputPorts()) {
if (p.getTransform() != null) {
hasTransform = true;
break;
}
}
if (!hasTransform) {
return;
}
Application app = appService.get(node.getAppHash());
Bindings bindings = null;
if (node.getProtocolType() != null) {
bindings = BindingsFactory.create(node.getProtocolType());
} else {
String encodedApp = URIHelper.createDataURI(JSONHelper.writeObject(appService.get(node.getAppHash())));
bindings = BindingsFactory.create(encodedApp);
}
List<VariableRecord> inputVariables = variableRecordService.find(job.getId(), LinkPortType.INPUT, job.getRootId());
Map<String, Object> preprocesedInputs = new HashMap<>();
for (VariableRecord inputVariable : inputVariables) {
Object value = variableRecordService.getValue(inputVariable);
preprocesedInputs.put(inputVariable.getPortId(), value);
}
for (VariableRecord inputVariable : inputVariables) {
Object value = CloneHelper.deepCopy(variableRecordService.getValue(inputVariable));
for (DAGLinkPort p : node.getInputPorts()) {
if (p.getId().equals(inputVariable.getPortId())) {
if (p.getTransform() != null) {
Object transform = p.getTransform();
if (transform != null) {
value = bindings.transformInputs(value, new Job(JSONHelper.writeObject(app), preprocesedInputs), transform);
inputVariable.setValue(value);
variableRecordService.update(inputVariable);
}
}
}
}
}
} catch (BindingException e) {
throw new EventHandlerException("Failed to set evaluate transform", e);
}
}
private Set<String> findImmediateReadyNodes(DAGNode node) {
if (node instanceof DAGContainer) {
Set<String> nodesWithoutDestination = new HashSet<>();
for (DAGNode child : ((DAGContainer) node).getChildren()) {
nodesWithoutDestination.add(child.getId());
}
for (DAGLink link : ((DAGContainer) node).getLinks()) {
nodesWithoutDestination.remove(link.getDestination().getDagNodeId());
}
return nodesWithoutDestination;
}
return Collections.<String>emptySet();
}
/**
* Unwraps {@link DAGContainer}
*/
private void rollOutContainer(JobRecord job, DAGContainer containerNode, UUID contextId) {
for (DAGNode node : containerNode.getChildren()) {
String newJobId = InternalSchemaHelper.concatenateIds(job.getId(), InternalSchemaHelper.getLastPart(node.getId()));
JobRecord childJob = scatterHelper.createJobRecord(newJobId, job.getExternalId(), node, false, contextId, job.getDagHash());
jobRecordService.create(childJob);
for (DAGLinkPort port : node.getInputPorts()) {
if (port.getTransform() != null) {
childJob.setBlocking(true);
}
VariableRecord childVariable = new VariableRecord(contextId, newJobId, port.getId(), LinkPortType.INPUT, port.getDefaultValue(), node.getLinkMerge(port.getId(), port.getType()));
variableRecordService.create(childVariable);
}
for (DAGLinkPort port : node.getOutputPorts()) {
VariableRecord childVariable = new VariableRecord(contextId, newJobId, port.getId(), LinkPortType.OUTPUT, null, node.getLinkMerge(port.getId(), port.getType()));
variableRecordService.create(childVariable);
}
}
for (DAGLink link : containerNode.getLinks()) {
String originalJobID = InternalSchemaHelper.normalizeId(job.getId());
String sourceNodeId = originalJobID;
String linkSourceNodeId = link.getSource().getDagNodeId();
if (linkSourceNodeId.startsWith(originalJobID)) {
if (linkSourceNodeId.equals(sourceNodeId)) {
sourceNodeId = job.getId();
} else {
sourceNodeId = InternalSchemaHelper.concatenateIds(job.getId(), InternalSchemaHelper.getLastPart(linkSourceNodeId));
}
}
String destinationNodeId = originalJobID;
String linkDestinationNodeId = link.getDestination().getDagNodeId();
if (linkDestinationNodeId.startsWith(originalJobID)) {
if (linkDestinationNodeId.equals(destinationNodeId)) {
destinationNodeId = job.getId();
} else {
destinationNodeId = InternalSchemaHelper.concatenateIds(job.getId(), InternalSchemaHelper.getLastPart(linkDestinationNodeId));
}
}
LinkRecord childLink = new LinkRecord(contextId, sourceNodeId, link.getSource().getId(), LinkPortType.valueOf(link.getSource().getType().toString()), destinationNodeId, link.getDestination().getId(), LinkPortType.valueOf(link.getDestination().getType().toString()), link.getPosition());
linkRecordService.create(childLink);
handleLinkPort(jobRecordService.find(sourceNodeId, contextId), link.getSource(), true);
handleLinkPort(jobRecordService.find(destinationNodeId, contextId), link.getDestination(), false);
}
for (DAGNode node : containerNode.getChildren()) {
String newJobId = InternalSchemaHelper.concatenateIds(job.getId(), InternalSchemaHelper.getLastPart(node.getId()));
JobRecord childJob = jobRecordService.find(newJobId, contextId);
if(childJob.isReady() && !childJob.isContainer()){
JobStatusEvent jobStatusEvent = new JobStatusEvent(childJob.getId(), job.getRootId(), JobRecord.JobState.READY, job.getRootId(), null);
try {
eventProcessor.send(jobStatusEvent);
} catch (EventHandlerException e) {
logger.error("Failed to start ready job:"+childJob.getId(), e);
}
}
}
}
/**
* Handle links for roll-out
*/
private void handleLinkPort(JobRecord job, DAGLinkPort linkPort, boolean isSource) {
if (linkPort.getType().equals(LinkPortType.INPUT)) {
if (job.getState().equals(JobRecord.JobState.PENDING)) {
jobRecordService.incrementPortCounter(job, linkPort, LinkPortType.INPUT);
jobRecordService.increaseInputPortIncoming(job, linkPort.getId());
if (job.getInputPortIncoming(linkPort.getId()) > 1) {
if (LinkMerge.isBlocking(linkPort.getLinkMerge())) {
job.setBlocking(true);
}
}
}
} else {
jobRecordService.increaseOutputPortIncoming(job, linkPort.getId());
jobRecordService.incrementPortCounter(job, linkPort, LinkPortType.OUTPUT);
if (isSource) {
job.getOutputCounter(linkPort.getId()).updatedAsSource(1);
}
}
jobRecordService.update(job);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.app.engine.impl.db;
import java.sql.Connection;
import java.sql.SQLException;
import org.apache.commons.lang3.StringUtils;
import org.flowable.app.engine.AppEngineConfiguration;
import org.flowable.app.engine.impl.util.CommandContextUtil;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.impl.db.SchemaManager;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import liquibase.Liquibase;
import liquibase.database.Database;
import liquibase.database.DatabaseConnection;
import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.DatabaseException;
import liquibase.exception.LiquibaseException;
import liquibase.resource.ClassLoaderResourceAccessor;
public class AppDbSchemaManager implements SchemaManager {
private static final Logger LOGGER = LoggerFactory.getLogger(AppDbSchemaManager.class);
public static final String LIQUIBASE_CHANGELOG = "org/flowable/app/db/liquibase/flowable-app-db-changelog.xml";
public void initSchema() {
initSchema(CommandContextUtil.getAppEngineConfiguration());
}
public void initSchema(AppEngineConfiguration appEngineConfiguration) {
initSchema(appEngineConfiguration, appEngineConfiguration.getDatabaseSchemaUpdate());
}
public void initSchema(AppEngineConfiguration appEngineConfiguration, String databaseSchemaUpdate) {
Liquibase liquibase = null;
try {
if (AppEngineConfiguration.DB_SCHEMA_UPDATE_CREATE_DROP.equals(databaseSchemaUpdate)) {
schemaCreate();
} else if (AppEngineConfiguration.DB_SCHEMA_UPDATE_DROP_CREATE.equals(databaseSchemaUpdate)) {
schemaDrop();
schemaCreate();
} else if (AppEngineConfiguration.DB_SCHEMA_UPDATE_TRUE.equals(databaseSchemaUpdate)) {
schemaUpdate();
} else if (AppEngineConfiguration.DB_SCHEMA_UPDATE_FALSE.equals(databaseSchemaUpdate)) {
schemaCheckVersion();
}
} catch (Exception e) {
throw new FlowableException("Error initialising app data model", e);
} finally {
closeDatabase(liquibase);
}
}
protected Liquibase createLiquibaseInstance(AppEngineConfiguration appEngineConfiguration)
throws SQLException, DatabaseException, LiquibaseException {
// If a command context is currently active, the current connection needs to be reused.
Connection jdbcConnection = null;
CommandContext commandContext = CommandContextUtil.getCommandContext();
if (commandContext == null) {
jdbcConnection = appEngineConfiguration.getDataSource().getConnection();
} else {
jdbcConnection = CommandContextUtil.getDbSqlSession(commandContext).getSqlSession().getConnection();
}
// A commit is needed here, because one of the things that Liquibase does when acquiring its lock
// is doing a rollback, which removes all changes done so far.
// For most databases, this is not a problem as DDL statements are not transactional.
// However for some (e.g. sql server), this would remove all previous statements, which is not wanted,
// hence the extra commit here.
if (!jdbcConnection.getAutoCommit()) {
jdbcConnection.commit();
}
DatabaseConnection connection = new JdbcConnection(jdbcConnection);
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(connection);
database.setDatabaseChangeLogTableName(AppEngineConfiguration.LIQUIBASE_CHANGELOG_PREFIX + database.getDatabaseChangeLogTableName());
database.setDatabaseChangeLogLockTableName(AppEngineConfiguration.LIQUIBASE_CHANGELOG_PREFIX + database.getDatabaseChangeLogLockTableName());
String databaseSchema = appEngineConfiguration.getDatabaseSchema();
if (StringUtils.isNotEmpty(databaseSchema)) {
database.setDefaultSchemaName(databaseSchema);
database.setLiquibaseSchemaName(databaseSchema);
}
String databaseCatalog = appEngineConfiguration.getDatabaseCatalog();
if (StringUtils.isNotEmpty(databaseCatalog)) {
database.setDefaultCatalogName(databaseCatalog);
database.setLiquibaseCatalogName(databaseCatalog);
}
return createLiquibaseInstance(database);
}
public Liquibase createLiquibaseInstance(Database database) throws LiquibaseException {
return new Liquibase(LIQUIBASE_CHANGELOG, new ClassLoaderResourceAccessor(), database);
}
@Override
public void schemaCreate() {
Liquibase liquibase = null;
try {
getCommonSchemaManager().schemaCreate();
getIdentityLinkSchemaManager().schemaCreate();
getVariableSchemaManager().schemaCreate();
liquibase = createLiquibaseInstance(CommandContextUtil.getAppEngineConfiguration());
liquibase.update("app");
} catch (Exception e) {
throw new FlowableException("Error creating App engine tables", e);
} finally {
closeDatabase(liquibase);
}
}
@Override
public void schemaDrop() {
Liquibase liquibase = null;
try {
liquibase = createLiquibaseInstance(CommandContextUtil.getAppEngineConfiguration());
liquibase.dropAll();
} catch (Exception e) {
LOGGER.info("Error dropping App engine tables", e);
} finally {
closeDatabase(liquibase);
}
try {
getVariableSchemaManager().schemaDrop();
} catch (Exception e) {
LOGGER.info("Error dropping variable tables", e);
}
try {
getIdentityLinkSchemaManager().schemaDrop();
} catch (Exception e) {
LOGGER.info("Error dropping identity link tables", e);
}
try {
getCommonSchemaManager().schemaDrop();
} catch (Exception e) {
LOGGER.info("Error dropping common tables", e);
}
}
@Override
public String schemaUpdate() {
Liquibase liquibase = null;
try {
getCommonSchemaManager().schemaUpdate();
if (CommandContextUtil.getAppEngineConfiguration().isExecuteServiceSchemaManagers()) {
getIdentityLinkSchemaManager().schemaUpdate();
getVariableSchemaManager().schemaUpdate();
}
liquibase = createLiquibaseInstance(CommandContextUtil.getAppEngineConfiguration());
liquibase.update("cmmn");
} catch (Exception e) {
throw new FlowableException("Error updating App engine tables", e);
} finally {
closeDatabase(liquibase);
}
return null;
}
@Override
public void schemaCheckVersion() {
Liquibase liquibase = null;
try {
liquibase = createLiquibaseInstance(CommandContextUtil.getAppEngineConfiguration());
liquibase.validate();
} catch (Exception e) {
throw new FlowableException("Error validating app engine schema", e);
} finally {
closeDatabase(liquibase);
}
}
protected SchemaManager getCommonSchemaManager() {
return CommandContextUtil.getAppEngineConfiguration().getCommonSchemaManager();
}
protected SchemaManager getIdentityLinkSchemaManager() {
return CommandContextUtil.getAppEngineConfiguration().getIdentityLinkSchemaManager();
}
protected SchemaManager getVariableSchemaManager() {
return CommandContextUtil.getAppEngineConfiguration().getVariableSchemaManager();
}
private void closeDatabase(Liquibase liquibase) {
if (liquibase != null) {
Database database = liquibase.getDatabase();
if (database != null) {
// do not close the shared connection if a command context is currently active
if (CommandContextUtil.getCommandContext() == null) {
try {
database.close();
} catch (DatabaseException e) {
LOGGER.warn("Error closing database", e);
}
}
}
}
}
}
| |
/*
* Copyright 2014 - 2015 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.real_logic.aeron.driver.media;
import uk.co.real_logic.aeron.driver.event.EventCode;
import uk.co.real_logic.aeron.driver.event.EventLogger;
import uk.co.real_logic.aeron.protocol.HeaderFlyweight;
import uk.co.real_logic.aeron.driver.Configuration;
import uk.co.real_logic.aeron.driver.LossGenerator;
import uk.co.real_logic.agrona.LangUtil;
import uk.co.real_logic.agrona.concurrent.UnsafeBuffer;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.NetworkInterface;
import java.net.SocketOption;
import java.net.StandardSocketOptions;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedByInterruptException;
import java.nio.channels.DatagramChannel;
import java.nio.channels.SelectionKey;
import static uk.co.real_logic.aeron.logbuffer.FrameDescriptor.frameLength;
import static uk.co.real_logic.aeron.logbuffer.FrameDescriptor.frameVersion;
public abstract class UdpChannelTransport implements AutoCloseable
{
private final UdpChannel udpChannel;
private final LossGenerator lossGenerator;
private final EventLogger logger;
private final ByteBuffer receiveByteBuffer = ByteBuffer.allocateDirect(Configuration.RECEIVE_BYTE_BUFFER_LENGTH);
private final UnsafeBuffer receiveBuffer = new UnsafeBuffer(receiveByteBuffer);
private DatagramChannel datagramChannel;
private SelectionKey selectionKey;
private TransportPoller transportPoller;
private InetSocketAddress bindSocketAddress;
private InetSocketAddress endPointSocketAddress;
public UdpChannelTransport(
final UdpChannel udpChannel,
final InetSocketAddress endPointSocketAddress,
final InetSocketAddress bindSocketAddress,
final LossGenerator lossGenerator,
final EventLogger logger)
{
this.udpChannel = udpChannel;
this.lossGenerator = lossGenerator;
this.logger = logger;
this.endPointSocketAddress = endPointSocketAddress;
this.bindSocketAddress = bindSocketAddress;
}
/**
* Create the underlying channel for reading and writing.
*/
public void openDatagramChannel()
{
try
{
datagramChannel = DatagramChannel.open(udpChannel.protocolFamily());
if (udpChannel.isMulticast())
{
final NetworkInterface localInterface = udpChannel.localInterface();
datagramChannel.setOption(StandardSocketOptions.SO_REUSEADDR, true);
datagramChannel.bind(new InetSocketAddress(endPointSocketAddress.getPort()));
datagramChannel.join(endPointSocketAddress.getAddress(), localInterface);
datagramChannel.setOption(StandardSocketOptions.IP_MULTICAST_IF, localInterface);
}
else
{
datagramChannel.bind(bindSocketAddress);
}
if (0 != Configuration.SOCKET_SNDBUF_LENGTH)
{
datagramChannel.setOption(StandardSocketOptions.SO_SNDBUF, Configuration.SOCKET_SNDBUF_LENGTH);
}
if (0 != Configuration.SOCKET_RCVBUF_LENGTH)
{
datagramChannel.setOption(StandardSocketOptions.SO_RCVBUF, Configuration.SOCKET_RCVBUF_LENGTH);
}
datagramChannel.configureBlocking(false);
}
catch (final IOException ex)
{
throw new RuntimeException(String.format(
"channel \"%s\" : %s", udpChannel.originalUriString(), ex.toString()), ex);
}
}
/**
* Register this transport for reading from a {@link TransportPoller}.
*
* @param transportPoller to register read with
*/
public void registerForRead(final TransportPoller transportPoller)
{
this.transportPoller = transportPoller;
selectionKey = transportPoller.registerForRead(this);
}
/**
* Return underlying {@link UdpChannel}
*
* @return underlying channel
*/
public UdpChannel udpChannel()
{
return udpChannel;
}
/**
* The {@link DatagramChannel} for this transport channel.
*
* @return {@link DatagramChannel} for this transport channel.
*/
public DatagramChannel datagramChannel()
{
return datagramChannel;
}
/**
* Send contents of {@link java.nio.ByteBuffer} to remote address
*
* @param buffer to send
* @param remoteAddress to send to
* @return number of bytes sent
*/
public int sendTo(final ByteBuffer buffer, final InetSocketAddress remoteAddress)
{
logger.logFrameOut(buffer, remoteAddress);
int bytesSent = 0;
try
{
bytesSent = datagramChannel.send(buffer, remoteAddress);
}
catch (final IOException ex)
{
LangUtil.rethrowUnchecked(ex);
}
return bytesSent;
}
/**
* Close transport, canceling any pending read operations and closing channel
*/
public void close()
{
try
{
if (null != selectionKey)
{
selectionKey.cancel();
}
if (null != transportPoller)
{
transportPoller.cancelRead(this);
}
datagramChannel.close();
}
catch (final Exception ex)
{
logger.logException(ex);
}
}
/**
* Is transport representing a multicast media or unicast
*
* @return if transport is multicast media
*/
public boolean isMulticast()
{
return udpChannel.isMulticast();
}
/**
* Return socket option value
*
* @param name of the socket option
* @param <T> type of option
* @return option value
*/
public <T> T getOption(final SocketOption<T> name)
{
T option = null;
try
{
option = datagramChannel.getOption(name);
}
catch (final IOException ex)
{
LangUtil.rethrowUnchecked(ex);
}
return option;
}
/**
* Return the capacity of the {@link ByteBuffer} used for reception
*
* @return capacity of receiving byte buffer
*/
public int receiveBufferCapacity()
{
return receiveByteBuffer.capacity();
}
protected abstract int dispatch(final UnsafeBuffer receiveBuffer, final int length, final InetSocketAddress srcAddress);
/**
* Attempt to receive waiting data.
*
* @return number of bytes received.
*/
public int pollForData()
{
int bytesReceived = 0;
final InetSocketAddress srcAddress = receive();
if (null != srcAddress)
{
final int length = receiveByteBuffer.position();
if (lossGenerator.shouldDropFrame(srcAddress, receiveBuffer, length))
{
logger.logFrameInDropped(receiveByteBuffer, 0, length, srcAddress);
}
else
{
logger.logFrameIn(receiveByteBuffer, 0, length, srcAddress);
if (isValidFrame(receiveBuffer, length))
{
bytesReceived = dispatch(receiveBuffer, length, srcAddress);
}
}
}
return bytesReceived;
}
protected UnsafeBuffer receiveBuffer()
{
return receiveBuffer;
}
private boolean isValidFrame(final UnsafeBuffer receiveBuffer, final int length)
{
boolean isFrameValid = true;
if (frameVersion(receiveBuffer, 0) != HeaderFlyweight.CURRENT_VERSION)
{
logger.log(EventCode.INVALID_VERSION, receiveBuffer, 0, frameLength(receiveBuffer, 0));
isFrameValid = false;
}
else if (length < HeaderFlyweight.HEADER_LENGTH)
{
logger.log(EventCode.MALFORMED_FRAME_LENGTH, receiveBuffer, 0, length);
isFrameValid = false;
}
return isFrameValid;
}
private InetSocketAddress receive()
{
receiveByteBuffer.clear();
InetSocketAddress address = null;
try
{
address = (InetSocketAddress)datagramChannel.receive(receiveByteBuffer);
}
catch (final ClosedByInterruptException ignored)
{
// do nothing
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
return address;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger;
import org.springframework.shell.core.CommandMarker;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.ConverterHint;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.CliUtil;
import org.apache.geode.management.internal.cli.functions.ExportLogsFunction;
import org.apache.geode.management.internal.cli.functions.ExportedLogsSizeInfo;
import org.apache.geode.management.internal.cli.functions.SizeExportLogsFunction;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.ResultBuilder;
import org.apache.geode.management.internal.cli.util.ExportLogsCacheWriter;
import org.apache.geode.management.internal.configuration.utils.ZipUtils;
import org.apache.geode.management.internal.security.ResourceOperation;
import org.apache.geode.security.ResourcePermission;
public class ExportLogsCommand implements CommandMarker {
private static final Logger logger = LogService.getLogger();
public static final String FORMAT = "yyyy/MM/dd/HH/mm/ss/SSS/z";
public static final String ONLY_DATE_FORMAT = "yyyy/MM/dd";
public final static String DEFAULT_EXPORT_LOG_LEVEL = "ALL";
private static final Pattern DISK_SPACE_LIMIT_PATTERN = Pattern.compile("(\\d+)([mgtMGT]?)");
private InternalCache getCache() {
return (InternalCache) CacheFactory.getAnyInstance();
}
@CliCommand(value = CliStrings.EXPORT_LOGS, help = CliStrings.EXPORT_LOGS__HELP)
@CliMetaData(shellOnly = false, isFileDownloadOverHttp = true,
interceptor = "org.apache.geode.management.internal.cli.commands.ExportLogsInterceptor",
relatedTopic = {CliStrings.TOPIC_GEODE_SERVER, CliStrings.TOPIC_GEODE_DEBUG_UTIL})
@ResourceOperation(resource = ResourcePermission.Resource.CLUSTER,
operation = ResourcePermission.Operation.READ)
public Result exportLogs(
@CliOption(key = CliStrings.EXPORT_LOGS__DIR, help = CliStrings.EXPORT_LOGS__DIR__HELP,
mandatory = false) String dirName,
@CliOption(key = CliStrings.EXPORT_LOGS__GROUP,
unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
optionContext = ConverterHint.MEMBERGROUP,
help = CliStrings.EXPORT_LOGS__GROUP__HELP) String[] groups,
@CliOption(key = CliStrings.EXPORT_LOGS__MEMBER,
unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
optionContext = ConverterHint.ALL_MEMBER_IDNAME,
help = CliStrings.EXPORT_LOGS__MEMBER__HELP) String[] memberIds,
@CliOption(key = CliStrings.EXPORT_LOGS__LOGLEVEL,
unspecifiedDefaultValue = DEFAULT_EXPORT_LOG_LEVEL,
optionContext = ConverterHint.LOG_LEVEL,
help = CliStrings.EXPORT_LOGS__LOGLEVEL__HELP) String logLevel,
@CliOption(key = CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL, unspecifiedDefaultValue = "false",
help = CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL__HELP) boolean onlyLogLevel,
@CliOption(key = CliStrings.EXPORT_LOGS__MERGELOG, unspecifiedDefaultValue = "false",
help = CliStrings.EXPORT_LOGS__MERGELOG__HELP) boolean mergeLog,
@CliOption(key = CliStrings.EXPORT_LOGS__STARTTIME,
unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
help = CliStrings.EXPORT_LOGS__STARTTIME__HELP) String start,
@CliOption(key = CliStrings.EXPORT_LOGS__ENDTIME,
unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
help = CliStrings.EXPORT_LOGS__ENDTIME__HELP) String end,
@CliOption(key = CliStrings.EXPORT_LOGS__LOGSONLY, unspecifiedDefaultValue = "false",
specifiedDefaultValue = "true",
help = CliStrings.EXPORT_LOGS__LOGSONLY__HELP) boolean logsOnly,
@CliOption(key = CliStrings.EXPORT_LOGS__STATSONLY, unspecifiedDefaultValue = "false",
specifiedDefaultValue = "true",
help = CliStrings.EXPORT_LOGS__STATSONLY__HELP) boolean statsOnly) {
// @CliOption(key = CliStrings.EXPORT_LOGS__FILESIZELIMIT,
// unspecifiedDefaultValue = CliStrings.EXPORT_LOGS__FILESIZELIMIT__UNSPECIFIED_DEFAULT,
// specifiedDefaultValue = CliStrings.EXPORT_LOGS__FILESIZELIMIT__SPECIFIED_DEFAULT,
// help = CliStrings.EXPORT_LOGS__FILESIZELIMIT__HELP) String fileSizeLimit) {
Result result = null;
InternalCache cache = getCache();
try {
Set<DistributedMember> targetMembers =
CliUtil.findMembersIncludingLocators(groups, memberIds);
if (targetMembers.isEmpty()) {
return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE);
}
if (false) {
// TODO: get estimated size of exported logs from all servers first
Map<String, Integer> fileSizesFromMembers = new HashMap<>();
for (DistributedMember server : targetMembers) {
SizeExportLogsFunction.Args args = new SizeExportLogsFunction.Args(start, end, logLevel,
onlyLogLevel, logsOnly, statsOnly);
List<Object> results = (List<Object>) CliUtil
.executeFunction(new SizeExportLogsFunction(), args, server).getResult();
long estimatedSize = 0;
long diskAvailable = 0;
long diskSize = 0;
List<?> res = (List<?>) results.get(0);
if (res.get(0) instanceof ExportedLogsSizeInfo) {
ExportedLogsSizeInfo sizeInfo = (ExportedLogsSizeInfo) res.get(0);
estimatedSize = sizeInfo.getLogsSize();
diskAvailable = sizeInfo.getDiskAvailable();
diskSize = sizeInfo.getDiskSize();
} else {
estimatedSize = 0;
}
logger.info("Received file size from member {}: {}", server.getId(), estimatedSize);
}
// TODO: Check log size limits on the locator
}
// get zipped files from all servers next
Map<String, Path> zipFilesFromMembers = new HashMap<>();
for (DistributedMember server : targetMembers) {
Region region = ExportLogsFunction.createOrGetExistingExportLogsRegion(true, cache);
ExportLogsCacheWriter cacheWriter =
(ExportLogsCacheWriter) region.getAttributes().getCacheWriter();
cacheWriter.startFile(server.getName());
CliUtil.executeFunction(new ExportLogsFunction(),
new ExportLogsFunction.Args(start, end, logLevel, onlyLogLevel, logsOnly, statsOnly),
server).getResult();
Path zipFile = cacheWriter.endFile();
ExportLogsFunction.destroyExportLogsRegion(cache);
// only put the zipfile in the map if it is not null
if (zipFile != null) {
logger.info("Received zip file from member {}: {}", server.getId(), zipFile);
zipFilesFromMembers.put(server.getId(), zipFile);
}
}
if (zipFilesFromMembers.isEmpty()) {
return ResultBuilder.createUserErrorResult("No files to be exported.");
}
Path tempDir = Files.createTempDirectory("exportedLogs");
// make sure the directory is created, so that even if there is no files unzipped to this dir,
// we can still zip it and send an empty zip file back to the client
Path exportedLogsDir = tempDir.resolve("exportedLogs");
FileUtils.forceMkdir(exportedLogsDir.toFile());
for (Path zipFile : zipFilesFromMembers.values()) {
Path unzippedMemberDir =
exportedLogsDir.resolve(zipFile.getFileName().toString().replace(".zip", ""));
ZipUtils.unzip(zipFile.toAbsolutePath().toString(), unzippedMemberDir.toString());
FileUtils.deleteQuietly(zipFile.toFile());
}
Path dirPath;
if (StringUtils.isBlank(dirName)) {
dirPath = Paths.get(System.getProperty("user.dir"));
} else {
dirPath = Paths.get(dirName);
}
Path exportedLogsZipFile =
dirPath.resolve("exportedLogs_" + System.currentTimeMillis() + ".zip").toAbsolutePath();
logger.info("Zipping into: " + exportedLogsZipFile.toString());
ZipUtils.zipDirectory(exportedLogsDir, exportedLogsZipFile);
FileUtils.deleteDirectory(tempDir.toFile());
result = ResultBuilder.createInfoResult(exportedLogsZipFile.toString());
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
result = ResultBuilder.createGemFireErrorResult(ex.getMessage());
} finally {
ExportLogsFunction.destroyExportLogsRegion(cache);
}
logger.debug("Exporting logs returning = {}", result);
return result;
}
/**
* Returns file size limit in bytes
*/
int parseFileSizeLimit(String fileSizeLimit) {
if (StringUtils.isEmpty(fileSizeLimit)) {
return 0;
}
int sizeLimit = parseSize(fileSizeLimit);
int byteMultiplier = parseByteMultiplier(fileSizeLimit);
return sizeLimit * byteMultiplier;
}
/**
* Throws IllegalArgumentException if file size is over fileSizeLimitBytes
*/
void checkOverDiskSpaceThreshold(int fileSizeLimitBytes, File file) {
// TODO:GEODE-2420: warn user if exportedLogsZipFile size > threshold
if (FileUtils.sizeOf(file) > fileSizeLimitBytes) {
throw new IllegalArgumentException("TOO BIG"); // FileTooBigException
}
}
/**
* Throws IllegalArgumentException if file size is over fileSizeLimitBytes false == limit is zero
* true == file size is less than limit exception == file size is over limit
*/
boolean isFileSizeCheckEnabledAndWithinLimit(int fileSizeLimitBytes, File file) {
// TODO:GEODE-2420: warn user if exportedLogsZipFile size > threshold
if (fileSizeLimitBytes < 1) {
return false;
}
if (FileUtils.sizeOf(file) < fileSizeLimitBytes) {
return true;
}
throw new IllegalArgumentException("TOO BIG: fileSizeLimit = " + fileSizeLimitBytes
+ ", fileSize = " + FileUtils.sizeOf(file)); // FileTooBigException
}
static int parseSize(String diskSpaceLimit) {
Matcher matcher = DISK_SPACE_LIMIT_PATTERN.matcher(diskSpaceLimit);
if (matcher.matches()) {
return Integer.parseInt(matcher.group(1));
} else {
throw new IllegalArgumentException();
}
}
static int parseByteMultiplier(String diskSpaceLimit) {
Matcher matcher = DISK_SPACE_LIMIT_PATTERN.matcher(diskSpaceLimit);
if (!matcher.matches()) {
throw new IllegalArgumentException();
}
switch (matcher.group(2).toLowerCase()) {
case "t":
return (int) TERABYTE;
case "g":
return (int) GIGABYTE;
case "m":
default:
return (int) MEGABYTE;
}
}
static final int MEGABYTE = (int) Math.pow(1024, 2);
static final int GIGABYTE = (int) Math.pow(1024, 3);
static final int TERABYTE = (int) Math.pow(1024, 4);
}
| |
package org.myrobotlab.service;
import java.io.File;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.myrobotlab.framework.Registration;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.interfaces.ServiceInterface;
import org.myrobotlab.framework.interfaces.Attachable;
import org.myrobotlab.io.FileIO;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.service.data.LeapData;
import org.myrobotlab.service.data.LeapHand;
import org.myrobotlab.service.data.PinData;
import org.myrobotlab.service.interfaces.LeapDataListener;
import org.myrobotlab.service.interfaces.PinArrayListener;
import org.myrobotlab.service.config.InMoov2HandConfig;
import org.myrobotlab.service.config.ServiceConfig;
import org.myrobotlab.service.config.ServoConfig;
import org.myrobotlab.service.interfaces.ServoControl;
import org.myrobotlab.service.interfaces.ServoController;
import org.slf4j.Logger;
/**
* InMoovHand - The Hand sub service for the InMoov Robot. This service has 6
* servos controlled by an ServoController.
* thumb,index,majeure,ringFinger,pinky, and wrist
*
* There is also leap motion support.
*/
public class InMoov2Hand extends Service implements LeapDataListener, PinArrayListener {
public final static Logger log = LoggerFactory.getLogger(InMoov2Hand.class);
private static final long serialVersionUID = 1L;
/**
* peer services FIXME - need to be protected !
*/
transient public LeapMotion leap;
transient public ServoController controller;
transient public ServoControl thumb;
transient public ServoControl index;
transient public ServoControl majeure;
transient public ServoControl ringFinger;
transient public ServoControl pinky;
transient public ServoControl wrist;
// The pins for the finger tip sensors
public String[] sensorPins = new String[] { "A0", "A1", "A2", "A3", "A4" };
// public int[] sensorLastValues = new int[] {0,0,0,0,0};
public boolean sensorsEnabled = false;
public int[] sensorThresholds = new int[] { 500, 500, 500, 500, 500 };
/**
* list of names of possible controllers
*/
public List<String> controllers = Runtime.getServiceNamesFromInterface(ServoController.class);
public String controllerName;
boolean isAttached = false;
public InMoov2Hand(String n, String id) {
super(n, id);
}
public void startService() {
super.startService();
thumb = (ServoControl) startPeer("thumb");
index = (ServoControl) startPeer("index");
majeure = (ServoControl) startPeer("majeure");
ringFinger = (ServoControl) startPeer("ringFinger");
pinky = (ServoControl) startPeer("pinky");
wrist = (ServoControl) startPeer("wrist");
}
public void bird() {
moveTo(150.0, 180.0, 0.0, 180.0, 180.0, 90.0);
}
public void onRegistered(Registration s) {
refreshControllers();
broadcastState();
}
public List<String> refreshControllers() {
controllers = Runtime.getServiceNamesFromInterface(ServoController.class);
return controllers;
}
// @Override
public ServoController getController() {
return controller;
}
public String getControllerName() {
String controlerName = null;
if (controller != null) {
controlerName = controller.getName();
}
return controlerName;
}
public boolean isAttached() {
if (controller != null) {
if (((Arduino) controller).getDeviceId((Attachable) this) != null) {
isAttached = true;
return true;
}
controller = null;
}
isAttached = false;
return false;
}
@Override
public void broadcastState() {
if (thumb != null)
thumb.broadcastState();
if (index != null)
index.broadcastState();
if (majeure != null)
majeure.broadcastState();
if (ringFinger != null)
ringFinger.broadcastState();
if (pinky != null)
pinky.broadcastState();
if (wrist != null)
wrist.broadcastState();
}
public void close() {
moveTo(130, 180, 180, 180, 180);
}
public void closePinch() {
moveTo(130, 140, 180, 180, 180);
}
public void releaseService() {
try {
disable();
super.releaseService();
} catch (Exception e) {
error(e);
}
}
public void count() {
one();
sleep(1);
two();
sleep(1);
three();
sleep(1);
four();
sleep(1);
five();
}
public void devilHorns() {
moveTo(150.0, 0.0, 180.0, 180.0, 0.0, 90.0);
}
public void disable() {
if (thumb != null)
thumb.disable();
if (index != null)
index.disable();
if (majeure != null)
majeure.disable();
if (ringFinger != null)
ringFinger.disable();
if (pinky != null)
pinky.disable();
if (wrist != null)
wrist.disable();
}
public boolean enable() {
if (thumb != null)
thumb.enable();
if (index != null)
index.enable();
if (majeure != null)
majeure.enable();
if (ringFinger != null)
ringFinger.enable();
if (pinky != null)
pinky.enable();
if (wrist != null)
wrist.enable();
return true;
}
@Deprecated
public void enableAutoDisable(Boolean param) {
setAutoDisable(param);
}
@Deprecated
public void enableAutoEnable(Boolean param) {
}
public void five() {
open();
}
public void four() {
moveTo(150.0, 0.0, 0.0, 0.0, 0.0, 90.0);
}
public void fullSpeed() {
if (thumb != null)
thumb.fullSpeed();
if (index != null)
index.fullSpeed();
if (majeure != null)
majeure.fullSpeed();
if (ringFinger != null)
ringFinger.fullSpeed();
if (pinky != null)
pinky.fullSpeed();
if (wrist != null)
wrist.fullSpeed();
}
/**
* this method returns the analog pins that the hand is listening to. The
* InMoovHand listens on analog pins A0-A4 for the finger tip sensors.
*
*/
@Override
public String[] getActivePins() {
// TODO Auto-generated method stub
// for the InMoov hand, we're just going to say A0 - A4 ... for now..
return sensorPins;
}
public long getLastActivityTime() {
long lastActivityTime = Math.max(index.getLastActivityTime(), thumb.getLastActivityTime());
lastActivityTime = Math.max(lastActivityTime, index.getLastActivityTime());
lastActivityTime = Math.max(lastActivityTime, majeure.getLastActivityTime());
lastActivityTime = Math.max(lastActivityTime, ringFinger.getLastActivityTime());
lastActivityTime = Math.max(lastActivityTime, pinky.getLastActivityTime());
lastActivityTime = Math.max(lastActivityTime, wrist.getLastActivityTime());
return lastActivityTime;
}
@Deprecated /* use LangUtils */
public String getScript(String inMoovServiceName) {
String side = getName().contains("left") ? "left" : "right";
return String.format(Locale.ENGLISH, "%s.moveHand(\"%s\",%.2f,%.2f,%.2f,%.2f,%.2f,%.2f)\n", inMoovServiceName, side, thumb.getCurrentInputPos(), index.getCurrentInputPos(),
majeure.getCurrentInputPos(), ringFinger.getCurrentInputPos(), pinky.getCurrentInputPos(), wrist.getCurrentInputPos());
}
public void hangTen() {
moveTo(0.0, 180.0, 180.0, 180.0, 0.0, 90.0);
}
public void map(double minX, double maxX, double minY, double maxY) {
if (thumb != null) {
thumb.map(minX, maxX, minY, maxY);
}
if (index != null) {
index.map(minX, maxX, minY, maxY);
}
if (majeure != null) {
majeure.map(minX, maxX, minY, maxY);
}
if (ringFinger != null) {
ringFinger.map(minX, maxX, minY, maxY);
}
if (pinky != null) {
pinky.map(minX, maxX, minY, maxY);
}
}
// TODO - waving thread fun
public void moveTo(double thumb, double index, double majeure, double ringFinger, double pinky) {
moveTo(thumb, index, majeure, ringFinger, pinky, null);
}
public void moveTo(Double thumbPos, Double indexPos, Double majeurePos, Double ringFingerPos, Double pinkyPos, Double wristPos) {
if (log.isDebugEnabled()) {
log.debug("{}.moveTo {} {} {} {} {} {}", getName(), thumbPos, indexPos, majeurePos, ringFingerPos, pinkyPos, wristPos);
}
if (thumb != null && thumbPos != null) {
thumb.moveTo(thumbPos);
}
if (index != null && indexPos != null) {
index.moveTo(indexPos);
}
if (majeure != null && majeurePos != null) {
majeure.moveTo(majeurePos);
}
if (ringFinger != null && ringFingerPos != null) {
ringFinger.moveTo(ringFingerPos);
}
if (pinky != null && pinkyPos != null) {
pinky.moveTo(pinkyPos);
}
if (wrist != null && wristPos != null) {
wrist.moveTo(wristPos);
}
}
public void moveToBlocking(double thumb, double index, double majeure, double ringFinger, double pinky) {
moveToBlocking(thumb, index, majeure, ringFinger, pinky, null);
}
public void moveToBlocking(double thumb, double index, double majeure, double ringFinger, double pinky, Double wrist) {
log.info("init {} moveToBlocking ", getName());
moveTo(thumb, index, majeure, ringFinger, pinky, wrist);
waitTargetPos();
log.info("end {} moveToBlocking ", getName());
}
public void ok() {
moveTo(150.0, 180.0, 0.0, 0.0, 0.0, 90.0);
}
public void one() {
moveTo(150.0, 0.0, 180.0, 180.0, 180.0, 90.0);
}
public void attach(String controllerName, int sensorPin) throws Exception {
attach((ServoController) Runtime.getService(controllerName), sensorPin);
}
public void attach(String controllerName, String sensorPin) throws Exception {
attach((ServoController) Runtime.getService(controllerName), Integer.parseInt(sensorPin));
}
public void attach(ServoController controller, int sensorPin) {
try {
if (controller == null) {
error("setting null as controller");
return;
}
if (isAttached) {
log.info("Sensor already attached");
return;
}
controller.attach(controller);
log.info("{} setController {}", getName(), controller.getName());
this.controller = controller;
controllerName = this.controller.getName();
isAttached = true;
broadcastState();
} catch (Exception e) {
error(e);
}
}
public void detach(ServoController controller) {
// let the controller you want to detach this device
if (controller != null) {
controller.detach(this);
}
// setting controller reference to null
this.controller = null;
isAttached = false;
refreshControllers();
broadcastState();
}
public void refresh() {
broadcastState();
}
@Override
public LeapData onLeapData(LeapData data) {
String side = getName().contains("left") ? "left" : "right";
if (!data.frame.isValid()) {
// TODO: we could return void here? not sure
// who wants the return value form this method.
log.info("Leap data frame not valid.");
return data;
}
LeapHand h;
if ("right".equalsIgnoreCase(side)) {
if (data.frame.hands().rightmost().isValid()) {
h = data.rightHand;
} else {
log.info("Right hand frame not valid.");
// return this hand isn't valid
return data;
}
} else if ("left".equalsIgnoreCase(side)) {
if (data.frame.hands().leftmost().isValid()) {
h = data.leftHand;
} else {
log.info("Left hand frame not valid.");
// return this frame isn't valid.
return data;
}
} else {
// side could be null?
log.info("Unknown Side or side not set on hand (Side = {})", side);
// we can default to the right side?
// TODO: come up with a better default or at least document this
// behavior.
if (data.frame.hands().rightmost().isValid()) {
h = data.rightHand;
} else {
log.info("Right(unknown) hand frame not valid.");
// return this hand isn't valid
return data;
}
}
// If the hand data came from a valid frame, update the finger postions.
// move all fingers
if (index != null) {
index.moveTo(h.index);
} else {
log.debug("Index finger isn't attached or is null.");
}
if (thumb != null) {
thumb.moveTo(h.thumb);
} else {
log.debug("Thumb isn't attached or is null.");
}
if (pinky != null) {
pinky.moveTo(h.pinky);
} else {
log.debug("Pinky finger isn't attached or is null.");
}
if (ringFinger != null) {
ringFinger.moveTo(h.ring);
} else {
log.debug("Ring finger isn't attached or is null.");
}
if (majeure != null) {
majeure.moveTo(h.middle);
} else {
log.debug("Middle(Majeure) finger isn't attached or is null.");
}
return data;
}
// FIXME - use pub/sub attach to set this up without having this method !
@Override
public void onPinArray(PinData[] pindata) {
log.info("On Pin Data: {}", pindata.length);
if (!sensorsEnabled)
return;
// just return ? TOOD: maybe still track the last read values...
// TODO : change the interface to get a map of pin data, keyed off the name.
// ?
for (PinData pin : pindata) {
log.info("Pin Data: {}", pin);
// p
// if (sensorPins.contains(pin.pin)) {
// // it's one of our finger pins.. let's operate on it.
// log.info("Pin Data : {} value {}", pin.pin, pin.value );
// if (sensorPins[0].equalsIgnoreCase(pin.pin)) {
// // thumb / A0
// // here we want to test the pin state.. and potentially take an action
// // based on the updated sensor pin state
// if (pin.value > sensorThresholds[0])
// thumb.stop();
// } else if (sensorPins[1].equalsIgnoreCase(pin.pin)) {
// // index / A1
// if (pin.value > sensorThresholds[1])
// index.stop();
//
// } else if (sensorPins[2].equalsIgnoreCase(pin.pin)) {
// // middle / A2
// if (pin.value > sensorThresholds[2])
// majeure.stop();
//
// } else if (sensorPins[3].equalsIgnoreCase(pin.pin)) {
// // ring / A3
// if (pin.value > sensorThresholds[3])
// ringFinger.stop();
//
// } else if (sensorPins[4].equalsIgnoreCase(pin.pin)) {
// // pinky / A4
// if (pin.value > sensorThresholds[4])
// pinky.stop();
// }
// }
}
}
public void open() {
rest();
}
public void openPinch() {
moveTo(0, 0, 180, 180, 180);
}
public void release() {
disable();
}
public void rest() {
if (thumb != null)
thumb.rest();
if (index != null)
index.rest();
if (majeure != null)
majeure.rest();
if (ringFinger != null)
ringFinger.rest();
if (pinky != null)
pinky.rest();
if (wrist != null)
wrist.rest();
}
@Override
public boolean save() {
super.save();
if (thumb != null)
thumb.save();
if (index != null)
index.save();
if (majeure != null)
majeure.save();
if (ringFinger != null)
ringFinger.save();
if (pinky != null)
pinky.save();
if (wrist != null)
wrist.save();
return true;
}
@Deprecated
public boolean loadFile(String file) {
File f = new File(file);
Python p = (Python) Runtime.getService("python");
log.info("Loading Python file {}", f.getAbsolutePath());
if (p == null) {
log.error("Python instance not found");
return false;
}
String script = null;
try {
script = FileIO.toString(f.getAbsolutePath());
} catch (IOException e) {
log.error("IO Error loading file : ", e);
return false;
}
// evaluate the scripts in a blocking way.
boolean result = p.exec(script, true);
if (!result) {
log.error("Error while loading file {}", f.getAbsolutePath());
return false;
} else {
log.debug("Successfully loaded {}", f.getAbsolutePath());
}
return true;
}
public void setAutoDisable(Boolean param) {
if (thumb != null)
thumb.setAutoDisable(param);
if (index != null)
index.setAutoDisable(param);
if (majeure != null)
majeure.setAutoDisable(param);
if (ringFinger != null)
ringFinger.setAutoDisable(param);
if (pinky != null)
pinky.setAutoDisable(param);
if (wrist != null)
wrist.setAutoDisable(param);
}
public void setPins(int thumbPin, int indexPin, int majeurePin, int ringFingerPin, int pinkyPin, int wristPin) {
log.info("setPins {} {} {} {} {} {}", thumbPin, indexPin, majeurePin, ringFingerPin, pinkyPin, wristPin);
if (thumb != null)
thumb.setPin(thumbPin);
if (index != null)
index.setPin(indexPin);
if (majeure != null)
majeure.setPin(majeurePin);
if (ringFinger != null)
ringFinger.setPin(ringFingerPin);
if (pinky != null)
pinky.setPin(pinkyPin);
if (wrist != null)
wrist.setPin(wristPin);
}
public void setRest(double thumb, double index, double majeure, double ringFinger, double pinky) {
setRest(thumb, index, majeure, ringFinger, pinky, null);
}
public void setRest(double thumbRest, double indexRest, double majeureRest, double ringFingerRest, double pinkyRest, Double wristRest) {
log.info("setRest {} {} {} {} {} {}", thumb, index, majeure, ringFinger, pinky, wrist);
if (thumb != null)
thumb.setRest(thumbRest);
if (index != null)
index.setRest(indexRest);
if (majeure != null)
majeure.setRest(majeureRest);
if (ringFinger != null)
ringFinger.setRest(ringFingerRest);
if (pinky != null)
pinky.setRest(pinkyRest);
if (wrist != null) {
wrist.setRest(wristRest);
}
}
/**
* @param pins
* Set the array of pins that should be listened to.
*
*/
public void setSensorPins(String[] pins) {
// TODO, this should probably be a sorted set.. and sensorPins itself should
// probably be a map to keep the mapping of pin to finger
this.sensorPins = pins;
}
public void setSpeed(Double thumbSpeed, Double indexSpeed, Double majeureSpeed, Double ringFingerSpeed, Double pinkySpeed, Double wristSpeed) {
if (thumb != null)
thumb.setSpeed(thumbSpeed);
if (index != null)
index.setSpeed(indexSpeed);
if (majeure != null)
majeure.setSpeed(majeureSpeed);
if (ringFinger != null)
ringFinger.setSpeed(ringFingerSpeed);
if (pinky != null)
pinky.setSpeed(pinkySpeed);
if (wrist != null)
wrist.setSpeed(wristSpeed);
}
@Deprecated
public void setVelocity(Double thumb, Double index, Double majeure, Double ringFinger, Double pinky, Double wrist) {
log.warn("setspeed deprecated please use setSpeed");
setSpeed(thumb, index, majeure, ringFinger, pinky, wrist);
}
// FIXME - if multiple systems are dependent on the ServoControl map and
// limits to be a certain value
// leap should change its output, and leave the map and limits here alone
// FIXME !!! - should not have LeapMotion defined here at all - it should be
// pub/sub !!!
public void startLeapTracking() throws Exception {
if (leap == null) {
leap = (LeapMotion) startPeer("leap");
}
this.index.map(90.0, 0.0, this.index.getMin(), this.index.getMax());
this.thumb.map(90.0, 50.0, this.thumb.getMin(), this.thumb.getMax());
this.majeure.map(90.0, 0.0, this.majeure.getMin(), this.majeure.getMax());
this.ringFinger.map(90.0, 0.0, this.ringFinger.getMin(), this.ringFinger.getMax());
this.pinky.map(90.0, 0.0, this.pinky.getMin(), this.pinky.getMax());
leap.addLeapDataListener(this);
leap.startTracking();
return;
}
public void stop() {
if (thumb != null)
thumb.stop();
if (index != null)
index.stop();
if (majeure != null)
majeure.stop();
if (ringFinger != null)
ringFinger.stop();
if (pinky != null)
pinky.stop();
if (wrist != null)
wrist.stop();
}
// FIXME !!! - should not have LeapMotion defined here at all - it should be
// pub/sub !!!
public void stopLeapTracking() {
leap.stopTracking();
index.map(index.getMin(), index.getMax(), index.getMin(), index.getMax());
thumb.map(thumb.getMin(), thumb.getMax(), thumb.getMin(), thumb.getMax());
majeure.map(majeure.getMin(), majeure.getMax(), majeure.getMin(), majeure.getMax());
ringFinger.map(ringFinger.getMin(), ringFinger.getMax(), ringFinger.getMin(), ringFinger.getMax());
pinky.map(pinky.getMin(), pinky.getMax(), pinky.getMin(), pinky.getMax());
rest();
return;
}
public void test() {
if (thumb != null)
thumb.moveTo(thumb.getCurrentInputPos() + 2);
if (index != null)
index.moveTo(index.getCurrentInputPos() + 2);
if (majeure != null)
majeure.moveTo(majeure.getCurrentInputPos() + 2);
if (ringFinger != null)
ringFinger.moveTo(ringFinger.getCurrentInputPos() + 2);
if (pinky != null)
pinky.moveTo(pinky.getCurrentInputPos() + 2);
if (wrist != null)
wrist.moveTo(wrist.getCurrentInputPos() + 2);
info("test completed");
}
public void three() {
moveTo(150.0, 0.0, 0.0, 0.0, 180.0, 90.0);
}
public void thumbsUp() {
moveTo(0.0, 180.0, 180.0, 180.0, 180.0, 90.0);
}
public void two() {
victory();
}
public void victory() {
moveTo(150.0, 0.0, 0.0, 180.0, 180.0, 90.0);
}
public void waitTargetPos() {
if (thumb != null)
thumb.waitTargetPos();
if (index != null)
index.waitTargetPos();
if (majeure != null)
majeure.waitTargetPos();
if (ringFinger != null)
ringFinger.waitTargetPos();
if (pinky != null)
pinky.waitTargetPos();
if (wrist != null)
wrist.waitTargetPos();
}
@Override
public boolean isAttached(String name) {
return controller != null && name.equals(controller.getName());
}
@Override
public Set<String> getAttached() {
Set<String> ret = new HashSet<String>();
if (controller != null) {
ret.add(controller.getName());
}
return ret;
}
public static void main(String[] args) {
LoggingFactory.init(Level.INFO);
try {
InMoov2 i01 = (InMoov2) Runtime.start("i01", "InMoov2");
i01.startRightHand();
ServoController controller = (ServoController) Runtime.getService("i01.right");
InMoov2Hand rightHand = (InMoov2Hand) Runtime.start("r01", "InMoov2Hand");// InMoovHand("r01");
Runtime.createAndStart("gui", "SwingGui");
Runtime.createAndStart("webgui", "WebGui");
// rightHand.connect("COM12"); TEST RECOVERY !!!
rightHand.close();
rightHand.open();
rightHand.openPinch();
rightHand.closePinch();
rightHand.rest();
} catch (Exception e) {
log.error("main threw", e);
}
}
}
| |
/*
* Copyright 2009-2016 DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*
*/
package org.mrgeo.data.raster;
import org.gdal.gdal.Dataset;
import org.junit.*;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mrgeo.junit.UnitTest;
import org.mrgeo.test.TestUtils;
import java.awt.image.DataBuffer;
import java.awt.image.Raster;
import java.io.IOException;
@SuppressWarnings("all") // test code, not included in production
public class MrGeoRasterTest
{
private static boolean GEN_BASELINE_DATA_ONLY = false;
private static int width;
private static int height;
private static MrGeoRaster numberedInt;
private static MrGeoRaster numberedFloat;
private static MrGeoRaster numberedDouble;
@Rule
public TestName testname = new TestName();
private TestUtils testutils;
@BeforeClass
public static void init() throws MrGeoRaster.MrGeoRasterException
{
width = 13;
height = 13;
numberedInt = TestUtils.createNumberedRaster(width, height, DataBuffer.TYPE_INT);
numberedFloat = TestUtils.createNumberedRaster(width, height, DataBuffer.TYPE_FLOAT);
numberedDouble = TestUtils.createNumberedRaster(width, height, DataBuffer.TYPE_DOUBLE);
}
@AfterClass
public static void tearDown() throws Exception
{
numberedInt = null;
numberedFloat = null;
numberedDouble = null;
}
@Before
public void setUp() throws Exception
{
testutils = new TestUtils(MrGeoRasterTest.class);
}
@Test
@Category(UnitTest.class)
public void toFromDatasetByte() throws IOException
{
MrGeoRaster src = TestUtils.createConstRaster(10, 10, DataBuffer.TYPE_BYTE, 1);
Dataset ds = src.toDataset();
MrGeoRaster dst = MrGeoRaster.fromDataset(ds);
TestUtils.compareRasters(src, dst);
}
@Test
@Category(UnitTest.class)
public void toFromDatasetShort() throws IOException
{
MrGeoRaster src = TestUtils.createConstRaster(10, 10, DataBuffer.TYPE_SHORT, 1);
Dataset ds = src.toDataset();
MrGeoRaster dst = MrGeoRaster.fromDataset(ds);
TestUtils.compareRasters(src, dst);
}
@Test
@Category(UnitTest.class)
public void toFromDatasetUShort() throws IOException
{
MrGeoRaster src = TestUtils.createConstRaster(10, 10, DataBuffer.TYPE_USHORT, 1);
Dataset ds = src.toDataset();
MrGeoRaster dst = MrGeoRaster.fromDataset(ds);
TestUtils.compareRasters(src, dst);
}
@Test
@Category(UnitTest.class)
public void toFromDatasetInt() throws IOException
{
MrGeoRaster src = TestUtils.createConstRaster(10, 10, DataBuffer.TYPE_INT, 1);
Dataset ds = src.toDataset();
MrGeoRaster dst = MrGeoRaster.fromDataset(ds);
TestUtils.compareRasters(src, dst);
}
@Test
@Category(UnitTest.class)
public void toFromDatasetFloat() throws IOException
{
MrGeoRaster src = TestUtils.createConstRaster(10, 10, DataBuffer.TYPE_FLOAT, 1);
Dataset ds = src.toDataset();
MrGeoRaster dst = MrGeoRaster.fromDataset(ds);
TestUtils.compareRasters(src, dst);
}
@Test
@Category(UnitTest.class)
public void toFromDatasetDouble() throws IOException
{
MrGeoRaster src = TestUtils.createConstRaster(10, 10, DataBuffer.TYPE_DOUBLE, 1);
Dataset ds = src.toDataset();
MrGeoRaster dst = MrGeoRaster.fromDataset(ds);
TestUtils.compareRasters(src, dst);
}
@Test
@Category(UnitTest.class)
public void scaleRasterNearestInt() throws IOException
{
int scale;
MrGeoRaster scaled;
// scale up
for (scale = 1; scale < 15; scale++)
{
scaled = numberedInt.scale(width * scale, height * scale, false, new double[]{Double.NaN});
compareResult(scale, scaled);
}
// scale down
for (scale = 2; scale < 8; scale++)
{
scaled = numberedInt.scale(width / scale, height / scale, false, new double[]{Double.NaN});
compareResult(1.0 / scale, scaled);
}
}
@Test
@Category(UnitTest.class)
public void scaleRasterNearestFloat() throws IOException
{
int scale;
MrGeoRaster scaled;
// scale up
for (scale = 1; scale < 15; scale++)
{
scaled = numberedFloat.scale(width * scale, height * scale, false, new double[]{Double.NaN});
compareResult(scale, scaled);
}
// scale down
for (scale = 2; scale < 8; scale++)
{
scaled = numberedFloat.scale(width / scale, height / scale, false, new double[]{Double.NaN});
compareResult(1.0 / scale, scaled);
}
}
@Test
@Category(UnitTest.class)
public void scaleRasterNearestDouble() throws IOException
{
int scale;
MrGeoRaster scaled;
// scale up
for (scale = 1; scale < 15; scale++)
{
scaled = numberedDouble.scale(width * scale, height * scale, false, new double[]{Double.NaN});
compareResult(scale, scaled);
}
// scale down
for (scale = 2; scale < 8; scale += 1)
{
scaled = numberedDouble.scale(width / scale, height / scale, false, new double[]{Double.NaN});
compareResult(1.0 / scale, scaled);
}
}
@Test
@Category(UnitTest.class)
public void toRasterByte() throws IOException
{
MrGeoRaster numberedByte = TestUtils.createNumberedRaster(width, height, DataBuffer.TYPE_BYTE);
Raster raster = numberedByte.toRaster();
compareRaster(numberedByte, raster);
}
@Test
@Category(UnitTest.class)
public void toRasterShort() throws IOException
{
MrGeoRaster numberedShort = TestUtils.createNumberedRaster(width, height, DataBuffer.TYPE_SHORT);
Raster raster = numberedShort.toRaster();
compareRaster(numberedShort, raster);
}
@Test
@Category(UnitTest.class)
public void toRasterUShort() throws IOException
{
MrGeoRaster numberedUShort = TestUtils.createNumberedRaster(width, height, DataBuffer.TYPE_USHORT);
Raster raster = numberedUShort.toRaster();
compareRaster(numberedUShort, raster);
}
@Test
@Category(UnitTest.class)
public void toRasterInt() throws IOException
{
Raster raster = numberedInt.toRaster();
compareRaster(numberedInt, raster);
}
@Test
@Category(UnitTest.class)
public void toRasterFloat() throws IOException
{
Raster raster = numberedFloat.toRaster();
compareRaster(numberedFloat, raster);
}
@Test
@Category(UnitTest.class)
public void toRasterDouble() throws IOException
{
Raster raster = numberedDouble.toRaster();
compareRaster(numberedDouble, raster);
}
private void compareRaster(MrGeoRaster mrgeoraster, Raster raster)
{
boolean intish = mrgeoraster.datatype() == DataBuffer.TYPE_BYTE || mrgeoraster.datatype() == DataBuffer.TYPE_INT
|| mrgeoraster.datatype() == DataBuffer.TYPE_SHORT || mrgeoraster.datatype() == DataBuffer.TYPE_USHORT;
for (int b = 0; b < mrgeoraster.bands(); b++)
{
for (int y = 0; y < mrgeoraster.height(); y++)
{
for (int x = 0; x < mrgeoraster.width(); x++)
{
float v1 = mrgeoraster.getPixelFloat(x, y, b);
float v2 = raster.getSampleFloat(x, y, b);
if (Float.isNaN(v1) != Float.isNaN(v2))
{
org.junit.Assert.assertEquals("Pixel NaN mismatch: px: " + x + " py: " + y
+ " b: " + b + " v1: " + v1 + " v2: " + v2, v1, v2, 0);
}
// make delta something reasonable relative to the data
//NOTE: this formula is not very reliable. An error of 2e-3f for pixel v1=1 fails, but passes for v1=2.
float delta = intish ? 1.0001f : Math.max(Math.abs(v1 * 1e-3f), 1e-3f);
org.junit.Assert.assertEquals("Pixel value mismatch: px: " + x + " py: " + y
+ " b: " + b + " v1: " + v1 + " v2: " + v2, v1, v2, delta);
}
}
}
}
private void compareResult(double scale, MrGeoRaster scaled) throws IOException
{
String name = testname.getMethodName() + String.format("-%.3f", scale);
if (GEN_BASELINE_DATA_ONLY)
{
testutils.saveBaselineTif(name, scaled);
}
else
{
testutils.compareRasters(name, scaled);
}
}
//@Test
//@Category(UnitTest.class)
//public void scaleRasterInterp()
//{
// int scale;
// WritableRaster scaled;
//
// scale = 2;
// scaled = RasterUtils.scaleRasterInterp(numberedFloat,
// numberedFloat.width() * scale, numberedFloat.height() * scale, Double.NaN);
//}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
/**
* A unit class that encapsulates all in-exact search
* parsing and conversion from similarities to edit distances
* etc.
*/
public final class Fuzziness implements ToXContentFragment, Writeable {
public static final String X_FIELD_NAME = "fuzziness";
public static final Fuzziness ZERO = new Fuzziness(0);
public static final Fuzziness ONE = new Fuzziness(1);
public static final Fuzziness TWO = new Fuzziness(2);
public static final Fuzziness AUTO = new Fuzziness("AUTO");
public static final ParseField FIELD = new ParseField(X_FIELD_NAME);
private static final int DEFAULT_LOW_DISTANCE = 3;
private static final int DEFAULT_HIGH_DISTANCE = 6;
private final String fuzziness;
private int lowDistance = DEFAULT_LOW_DISTANCE;
private int highDistance = DEFAULT_HIGH_DISTANCE;
private Fuzziness(int fuzziness) {
if (fuzziness != 0 && fuzziness != 1 && fuzziness != 2) {
throw new IllegalArgumentException("Valid edit distances are [0, 1, 2] but was [" + fuzziness + "]");
}
this.fuzziness = Integer.toString(fuzziness);
}
private Fuzziness(String fuzziness) {
if (fuzziness == null || fuzziness.isEmpty()) {
throw new IllegalArgumentException("fuzziness can't be null!");
}
this.fuzziness = fuzziness.toUpperCase(Locale.ROOT);
}
private Fuzziness(String fuzziness, int lowDistance, int highDistance) {
this(fuzziness);
if (lowDistance < 0 || highDistance < 0 || lowDistance > highDistance) {
throw new IllegalArgumentException("fuzziness wrongly configured, must be: lowDistance > 0, highDistance" +
" > 0 and lowDistance <= highDistance ");
}
this.lowDistance = lowDistance;
this.highDistance = highDistance;
}
/**
* Read from a stream.
*/
public Fuzziness(StreamInput in) throws IOException {
fuzziness = in.readString();
if (in.getVersion().onOrAfter(Version.V_6_1_0) && in.readBoolean()) {
lowDistance = in.readVInt();
highDistance = in.readVInt();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(fuzziness);
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
// we cannot serialize the low/high bounds since the other node does not know about them.
// This is a best-effort to not fail queries in case the cluster is being upgraded and users
// start using features that are not available on all nodes.
if (isAutoWithCustomValues()) {
out.writeBoolean(true);
out.writeVInt(lowDistance);
out.writeVInt(highDistance);
} else {
out.writeBoolean(false);
}
}
}
/**
* Creates a {@link Fuzziness} instance from an edit distance. The value must be one of <tt>[0, 1, 2]</tt>
*
* Note: Using this method only makes sense if the field you are applying Fuzziness to is some sort of string.
*/
public static Fuzziness fromEdits(int edits) {
return new Fuzziness(edits);
}
public static Fuzziness build(Object fuzziness) {
if (fuzziness instanceof Fuzziness) {
return (Fuzziness) fuzziness;
}
String string = fuzziness.toString();
if (AUTO.asString().equalsIgnoreCase(string)) {
return AUTO;
} else if (string.toUpperCase(Locale.ROOT).startsWith(AUTO.asString() + ":")) {
return parseCustomAuto(string);
}
return new Fuzziness(string);
}
private static Fuzziness parseCustomAuto( final String string) {
assert string.toUpperCase(Locale.ROOT).startsWith(AUTO.asString() + ":");
String[] fuzzinessLimit = string.substring(AUTO.asString().length() + 1).split(",");
if (fuzzinessLimit.length == 2) {
try {
int lowerLimit = Integer.parseInt(fuzzinessLimit[0]);
int highLimit = Integer.parseInt(fuzzinessLimit[1]);
return new Fuzziness("AUTO", lowerLimit, highLimit);
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("failed to parse [{}] as a \"auto:int,int\"", e,
string);
}
} else {
throw new ElasticsearchParseException("failed to find low and high distance values");
}
}
public static Fuzziness parse(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
switch (token) {
case VALUE_STRING:
case VALUE_NUMBER:
final String fuzziness = parser.text();
if (AUTO.asString().equalsIgnoreCase(fuzziness)) {
return AUTO;
} else if (fuzziness.toUpperCase(Locale.ROOT).startsWith(AUTO.asString() + ":")) {
return parseCustomAuto(fuzziness);
}
try {
final int minimumSimilarity = Integer.parseInt(fuzziness);
switch (minimumSimilarity) {
case 0:
return ZERO;
case 1:
return ONE;
case 2:
return TWO;
default:
return build(fuzziness);
}
} catch (NumberFormatException ex) {
return build(fuzziness);
}
default:
throw new IllegalArgumentException("Can't parse fuzziness on token: [" + token + "]");
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(X_FIELD_NAME, fuzziness);
return builder;
}
public int asDistance() {
return asDistance(null);
}
public int asDistance(String text) {
if (this.equals(AUTO)) { //AUTO
final int len = termLen(text);
if (len < lowDistance) {
return 0;
} else if (len < highDistance) {
return 1;
} else {
return 2;
}
}
return Math.min(2, (int) asFloat());
}
public float asFloat() {
if (this.equals(AUTO) || isAutoWithCustomValues()) {
return 1f;
}
return Float.parseFloat(fuzziness.toString());
}
private int termLen(String text) {
return text == null ? 5 : text.codePointCount(0, text.length()); // 5 avg term length in english
}
public String asString() {
if (isAutoWithCustomValues()) {
return fuzziness.toString() + ":" + lowDistance + "," + highDistance;
}
return fuzziness.toString();
}
private boolean isAutoWithCustomValues() {
return fuzziness.startsWith("AUTO") && (lowDistance != DEFAULT_LOW_DISTANCE ||
highDistance != DEFAULT_HIGH_DISTANCE);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Fuzziness other = (Fuzziness) obj;
return Objects.equals(fuzziness, other.fuzziness);
}
@Override
public int hashCode() {
return fuzziness.hashCode();
}
}
| |
/*
* Copyright ScenPro, Inc and SemanticBits, LLC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/labviewer/LICENSE.txt for details.
*/
package gov.nih.nci.cagrid.labviewer.service;
import gov.nih.nci.cabig.ctms.suite.authorization.SuiteAuthorizationAccessException;
import gov.nih.nci.cabig.ctms.suite.authorization.SuiteRole;
import gov.nih.nci.cabig.ctms.suite.authorization.SuiteRoleMembership;
import gov.nih.nci.cagrid.labviewer.service.globus.LabLoaderAuthorization;
import gov.nih.nci.cagrid.labviewer.xml.HL7v3CtLabUnMarshaller;
import gov.nih.nci.lv.auth.LabViewerAuthorizationHelper;
import gov.nih.nci.ctom.ctlab.handler.ProtocolHandler;
import java.rmi.RemoteException;
import java.sql.Connection;
import java.util.Map;
import org.apache.log4j.Logger;
/**
* LabLoaderImpl is the implementation of the Lab Loader grid service to accept
* HL7v3 messages and persist them to the CTODS database.
*
* @author Michael Holck
*/
public class LabLoaderImpl extends LabLoaderImplBase
{
Logger log = Logger.getLogger(getClass());
private LabViewerAuthorizationHelper authorizationHelper;
public LabLoaderImpl() throws RemoteException
{
super();
}
/**
* loadLab method unmarshalls Lab message and calls into the
* DAO to the persist the Lab message data
* @param string
* @throws RemoteException
*/
public void loadLab(java.lang.String string) throws RemoteException
{
log.info("LabLoader loadLab method called.");
checkAuthorization(LabLoaderAuthorization.getCallerIdentity(), string);
// Now unmarshall the HL7v3 message
HL7v3CtLabUnMarshaller unMarshaller = new HL7v3CtLabUnMarshaller();
Object obj = null;
Connection con = null;
try
{
obj = unMarshaller.parseXmlToObject(string);
// Now save the lab
ProtocolHandler dao = new ProtocolHandler();
// obtain the connection
con = dao.getConnection();
con.setAutoCommit(false);
if (obj != null)
{
// Call into the DAO save Protocol method.
dao.persist(con,
(gov.nih.nci.ctom.ctlab.domain.Protocol) obj);
}
// call connection commit
con.commit();
log
.debug("Message succussfully saved to the CTODS Database");
}// end of try
catch (Exception ex)
{
log.debug(ex.getMessage());
try
{
// issue rollback in case of exception
con.rollback();
}
catch (Exception e)
{
// throw the remote exception
RemoteException re1 = new RemoteException(e.getMessage());
throw re1;
}
}
finally
{
try
{
con.close();
}
catch (Exception ex)
{
log.error("Error closing connection",ex);
}
}
}
/**
* @param callerId
* @throws RemoteException
*/
private void checkAuthorization(String callerId, String xml) throws RemoteException
{
if (callerId == null)
{
log.error("Error saving lab: no user credentials provided");
throw new RemoteException("No user credentials provided");
}
log.debug("Service called by: " + callerId);
int beginIndex = callerId.lastIndexOf("=") + 1;
int endIndex = callerId.length();
String username = callerId.substring(beginIndex, endIndex);
log.debug("Username = " + username);
try
{
Map<SuiteRole, SuiteRoleMembership> userRoleMemberships = getAuthorizationHelper().getUserRoleMemberships(username);
SuiteRole labLoaderRole = SuiteRole.LAB_DATA_USER;
if (userRoleMemberships.containsKey(labLoaderRole))
{
log.debug("User role memberships contains role: " + labLoaderRole.toString());
if (labLoaderRole.isScoped())
{
log.debug("Role is scoped: " + labLoaderRole.toString());
SuiteRoleMembership userRoleMembership = userRoleMemberships.get(labLoaderRole);
if (labLoaderRole.isStudyScoped())
{
log.debug("Role is study scoped: " + labLoaderRole.toString());
HL7v3CtLabUnMarshaller unMarshaller = new HL7v3CtLabUnMarshaller();
String studyId = unMarshaller.getStudyId(xml);
if (studyId == null)
{
throw new SuiteAuthorizationAccessException("Role %s is study scoped - study identifier is null", labLoaderRole.getDisplayName());
}
log.debug("StudyId = " + studyId);
// if the user has permission to access specific studies (not all studies), then verify the study
if (!userRoleMembership.isAllStudies() && !userRoleMembership.getStudyIdentifiers().contains(studyId))
{
throw new SuiteAuthorizationAccessException("Username %s is not authorized for study %s", username, studyId);
}
log.debug("User is authorized for study");
}
if (labLoaderRole.isSiteScoped())
{
log.debug("Role is site scoped: " + labLoaderRole.toString());
HL7v3CtLabUnMarshaller unMarshaller = new HL7v3CtLabUnMarshaller();
String siteNciInstituteCode = unMarshaller.getSiteNciInstituteCode(xml);
if (siteNciInstituteCode == null)
{
throw new SuiteAuthorizationAccessException("Role %s is site scoped - site NCI institute code is null", labLoaderRole.getDisplayName());
}
log.debug("Site NCI institute code = " + siteNciInstituteCode);
// if the user has permission to access specific sites (not all sites), then verify the sites
if (!userRoleMembership.isAllSites() && !userRoleMembership.getSiteIdentifiers().contains(siteNciInstituteCode))
{
throw new SuiteAuthorizationAccessException("Username %s is not authorized for site %s", username, siteNciInstituteCode);
}
log.debug("User is authorized for site");
}
}
}
else
{
throw new SuiteAuthorizationAccessException("Username %s is not authorized for role %s", username, labLoaderRole.getDisplayName());
}
}
catch (SuiteAuthorizationAccessException e)
{
log.error("Error saving lab: ", e);
throw new RemoteException(e.getMessage());
}
catch (Exception e)
{
log.error("Error saving lab: ", e);
throw new RemoteException(e.getMessage());
}
}
private synchronized LabViewerAuthorizationHelper getAuthorizationHelper()
{
if (authorizationHelper == null)
{
authorizationHelper = new LabViewerAuthorizationHelper();
}
return authorizationHelper;
}
/**
* @param string
* @throws RemoteException
*/
public void rollback(java.lang.String string) throws RemoteException
{
log.info("LabLoader rollback method called: Not Implemented");
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.util.io;
import com.intellij.openapi.Forceable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.vfs.CharsetToolkit;
import org.jetbrains.annotations.NotNull;
import java.io.*;
import java.nio.ByteBuffer;
public class RandomAccessDataFile implements Forceable, Closeable {
protected static final Logger LOG = Logger.getInstance("#com.intellij.util.io.RandomAccessDataFile");
private static final OpenChannelsCache ourCache = new OpenChannelsCache(150, "rw");
private static int ourFilesCount = 0;
private final int myCount = ourFilesCount++;
private final File myFile;
private final PagePool myPool;
private long lastSeek = -1L;
private final byte[] myTypedIOBuffer = new byte[8];
private final FileWriter log;
private volatile long mySize;
private volatile boolean myIsDirty = false;
private volatile boolean myIsDisposed = false;
private static final boolean DEBUG = false;
public RandomAccessDataFile(@NotNull File file) throws IOException {
this(file, PagePool.SHARED);
}
public RandomAccessDataFile(@NotNull File file, @NotNull PagePool pool) throws IOException {
myPool = pool;
myFile = file;
if (!file.exists()) {
throw new FileNotFoundException(file.getPath() + " does not exist");
}
mySize = file.length();
if (DEBUG) {
log = new FileWriter(file.getPath() + ".log");
}
else {
log = null;
}
}
@NotNull
public File getFile() {
return myFile;
}
public void put(long addr, byte[] bytes, int off, int len) {
assertNotDisposed();
myIsDirty = true;
mySize = Math.max(mySize, addr + len);
while (len > 0) {
final Page page = myPool.alloc(this, addr);
int written = page.put(addr, bytes, off, len);
len -= written;
addr += written;
off += written;
}
}
public void get(long addr, byte[] bytes, int off, int len) {
assertNotDisposed();
while (len > 0) {
final Page page = myPool.alloc(this, addr);
int read = page.get(addr, bytes, off, len);
len -= read;
addr += read;
off += read;
}
}
private void releaseFile() {
ourCache.releaseChannel(myFile);
}
private RandomAccessFile getRandomAccessFile() throws FileNotFoundException {
return ourCache.getChannel(myFile);
}
public void putInt(long addr, int value) {
Bits.putInt(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 4);
}
public int getInt(long addr) {
get(addr, myTypedIOBuffer, 0, 4);
return Bits.getInt(myTypedIOBuffer, 0);
}
public void putLong(long addr, long value) {
Bits.putLong(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 8);
}
public void putByte(final long addr, final byte b) {
myTypedIOBuffer[0] = b;
put(addr, myTypedIOBuffer, 0, 1);
}
public byte getByte(long addr) {
get(addr, myTypedIOBuffer, 0, 1);
return myTypedIOBuffer[0];
}
public long getLong(long addr) {
get(addr, myTypedIOBuffer, 0, 8);
return Bits.getLong(myTypedIOBuffer, 0);
}
public String getUTF(long addr) {
int len = getInt(addr);
byte[] bytes = new byte[len];
get(addr + 4, bytes, 0, len);
return new String(bytes, CharsetToolkit.UTF8_CHARSET);
}
public void putUTF(long addr, String value) {
final byte[] bytes = value.getBytes(CharsetToolkit.UTF8_CHARSET);
putInt(addr, bytes.length);
put(addr + 4, bytes, 0, bytes.length);
}
public long length() {
assertNotDisposed();
return mySize;
}
public long physicalLength() {
assertNotDisposed();
long res;
try {
RandomAccessFile file = getRandomAccessFile();
try {
synchronized (file) {
res = file.length();
}
}
finally {
releaseFile();
}
}
catch (IOException e) {
return 0;
}
return res;
}
public void dispose() {
if (myIsDisposed) return;
myPool.flushPages(this);
ourCache.closeChannel(myFile);
myIsDisposed = true;
}
@Override
public void close() {
dispose();
}
/**
* Flushes dirty pages to underlying buffers
*/
@Override
public void force() {
assertNotDisposed();
if (isDirty()) {
myPool.flushPages(this);
myIsDirty = false;
}
}
/**
* Flushes dirty pages to buffers and saves them to disk
*/
public void sync() {
force();
try {
RandomAccessFile file = getRandomAccessFile();
file.getChannel().force(true);
}
catch (IOException ignored) {
}
finally {
releaseFile();
}
}
public void flushSomePages(int maxPagesToFlush) {
assertNotDisposed();
if (isDirty()) {
myIsDirty = !myPool.flushPages(this, maxPagesToFlush);
}
}
@Override
public boolean isDirty() {
assertNotDisposed();
return myIsDirty;
}
public boolean isDisposed() {
return myIsDisposed;
}
private void assertNotDisposed() {
if (myIsDisposed) {
LOG.error("storage file is disposed: " + myFile);
}
}
public static int totalReads = 0;
public static long totalReadBytes = 0;
public static int seekcount = 0;
public static int totalWrites = 0;
public static long totalWriteBytes = 0;
void loadPage(final Page page) {
assertNotDisposed();
try {
final RandomAccessFile file = getRandomAccessFile();
try {
synchronized (file) {
seek(file, page.getOffset());
final ByteBuffer buf = page.getBuf();
totalReads++;
totalReadBytes += Page.PAGE_SIZE;
if (DEBUG) {
log.write("Read at: \t" + page.getOffset() + "\t len: " + Page.PAGE_SIZE + ", size: " + mySize + "\n");
}
file.read(buf.array(), 0, Page.PAGE_SIZE);
lastSeek += Page.PAGE_SIZE;
}
}
finally {
releaseFile();
}
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
void flushPage(final Page page, int start, int end) {
assertNotDisposed();
try {
flush(page.getBuf(), page.getOffset() + start, start, end - start);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
private void flush(final ByteBuffer buf, final long fileOffset, final int bufOffset, int length) throws IOException {
if (fileOffset + length > mySize) {
length = (int)(mySize - fileOffset);
}
final RandomAccessFile file = getRandomAccessFile();
try {
synchronized (file) {
seek(file, fileOffset);
totalWrites++;
totalWriteBytes += length;
if (DEBUG) {
log.write("Write at: \t" + fileOffset + "\t len: " + length + ", size: " + mySize + ", filesize: " + file.length() + "\n");
}
file.write(buf.array(), bufOffset, length);
lastSeek += length;
}
}
finally {
releaseFile();
}
}
private void seek(final RandomAccessFile file, final long fileOffset) throws IOException {
if (DEBUG) {
if (lastSeek != -1L && fileOffset != lastSeek) {
long delta = fileOffset - lastSeek;
seekcount++;
log.write("Seeking: " + delta + "\n");
}
lastSeek = fileOffset;
}
file.seek(fileOffset);
}
@Override
public int hashCode() {
return myCount;
}
@Override
public synchronized String toString() {
return "RandomAccessFile[" + myFile + ", dirty=" + myIsDirty + "]";
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.builder.impl;
import java.io.Externalizable;
import java.io.IOException;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.drools.compiler.compiler.PackageRegistry;
import org.drools.compiler.compiler.TypeDeclarationError;
import org.drools.compiler.lang.descr.AbstractClassTypeDeclarationDescr;
import org.drools.compiler.lang.descr.AnnotationDescr;
import org.drools.compiler.lang.descr.EnumDeclarationDescr;
import org.drools.compiler.lang.descr.EnumLiteralDescr;
import org.drools.compiler.lang.descr.PatternDescr;
import org.drools.compiler.lang.descr.QualifiedName;
import org.drools.compiler.lang.descr.TypeDeclarationDescr;
import org.drools.compiler.lang.descr.TypeFieldDescr;
import org.drools.compiler.rule.builder.util.AnnotationFactory;
import org.drools.core.factmodel.AnnotationDefinition;
import org.drools.core.factmodel.ClassDefinition;
import org.drools.core.factmodel.EnumClassDefinition;
import org.drools.core.factmodel.EnumLiteralDefinition;
import org.drools.core.factmodel.FieldDefinition;
import org.drools.core.factmodel.traits.Thing;
import org.drools.core.factmodel.traits.Traitable;
import org.drools.core.factmodel.traits.TraitableBean;
import org.drools.core.rule.TypeDeclaration;
import org.drools.core.util.ClassUtils;
import org.drools.core.util.asm.ClassFieldInspector;
import org.kie.api.definition.type.Key;
import org.kie.api.definition.type.Position;
import org.kie.api.io.Resource;
import org.kie.soup.project.datamodel.commons.types.TypeResolver;
public class ClassDefinitionFactory {
protected KnowledgeBuilderImpl kbuilder;
public ClassDefinitionFactory(KnowledgeBuilderImpl kbuilder) {
this.kbuilder = kbuilder;
}
/**
* Generates a bean, and adds it to the composite class loader that
* everything is using.
*/
public ClassDefinition generateDeclaredBean(AbstractClassTypeDeclarationDescr typeDescr,
TypeDeclaration type,
PackageRegistry pkgRegistry,
List<TypeDefinition> unresolvedTypeDefinitions,
Map<String, AbstractClassTypeDeclarationDescr> unprocesseableDescrs) {
ClassDefinition def = createClassDefinition(typeDescr, type);
boolean success = true;
success &= wireAnnotationDefs(typeDescr, type, def, pkgRegistry.getTypeResolver());
success &= wireEnumLiteralDefs(typeDescr, type, def);
success &= wireFields(typeDescr, type, def, pkgRegistry, unresolvedTypeDefinitions);
if (!success) {
unprocesseableDescrs.put(typeDescr.getType().getFullName(), typeDescr);
}
// attach the class definition, it will be completed later
type.setTypeClassDef(def);
return def;
}
protected ClassDefinition createClassDefinition(AbstractClassTypeDeclarationDescr typeDescr, TypeDeclaration type) {
// extracts type, supertype and interfaces
String fullName = typeDescr.getType().getFullName();
if (type.getKind().equals(TypeDeclaration.Kind.CLASS)) {
TypeDeclarationDescr tdescr = (TypeDeclarationDescr) typeDescr;
if (tdescr.getSuperTypes().size() > 1) {
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr, "Declared class " + fullName + " - has more than one supertype;"));
return null;
} else if (tdescr.getSuperTypes().isEmpty()) {
tdescr.addSuperType("java.lang.Object");
}
}
Traitable traitableAnn = typeDescr.getTypedAnnotation(Traitable.class);
boolean traitable = traitableAnn != null;
String[] fullSuperTypes = new String[typeDescr.getSuperTypes().size() + 1];
int j = 0;
for (QualifiedName qname : typeDescr.getSuperTypes()) {
fullSuperTypes[j++] = qname.getFullName();
}
fullSuperTypes[j] = Thing.class.getName();
List<String> interfaceList = new ArrayList<String>();
interfaceList.add(traitable ? Externalizable.class.getName() : Serializable.class.getName());
if (traitable) {
interfaceList.add(TraitableBean.class.getName());
}
String[] interfaces = interfaceList.toArray(new String[interfaceList.size()]);
// prepares a class definition
ClassDefinition def;
switch (type.getKind()) {
case TRAIT:
def = new ClassDefinition(fullName,
Object.class.getName(),
fullSuperTypes);
break;
case ENUM:
def = new EnumClassDefinition(fullName,
fullSuperTypes[0],
null);
break;
case CLASS:
default:
def = new ClassDefinition(fullName,
fullSuperTypes[0],
interfaces);
def.setTraitable(traitable, traitableAnn != null && traitableAnn.logical());
}
return def;
}
protected boolean wireAnnotationDefs(AbstractClassTypeDeclarationDescr typeDescr, TypeDeclaration type, ClassDefinition def, TypeResolver resolver) {
for (AnnotationDescr annotationDescr : typeDescr.getAnnotations()) {
Class annotation = null;
try {
annotation = annotationDescr.getFullyQualifiedName() != null ? resolver.resolveType(annotationDescr.getFullyQualifiedName()) : null;
} catch (ClassNotFoundException e) {
continue;
}
if (annotation != null && annotation.isAnnotation()) {
try {
AnnotationDefinition annotationDefinition = AnnotationDefinition.build(annotation,
annotationDescr.getValueMap(),
resolver);
def.addAnnotation(annotationDefinition);
} catch (NoSuchMethodException nsme) {
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr,
"Annotated type " + typeDescr.getType().getFullName() +
" - undefined property in @annotation " +
annotationDescr.getName() + ": " +
nsme.getMessage() + ";"));
}
}
if (annotation == null || annotation.getCanonicalName().startsWith("org.kie.api.definition.type")) {
def.addMetaData(annotationDescr.getName(), annotationDescr.getSingleValue());
}
}
return true;
}
protected boolean wireEnumLiteralDefs(AbstractClassTypeDeclarationDescr typeDescr, TypeDeclaration type, ClassDefinition def) {
// add enum literals, if appropriate
if (type.getKind() == TypeDeclaration.Kind.ENUM) {
for (EnumLiteralDescr lit : ((EnumDeclarationDescr) typeDescr).getLiterals()) {
((EnumClassDefinition) def).addLiteral(
new EnumLiteralDefinition(lit.getName(), lit.getConstructorArgs())
);
}
}
return true;
}
protected boolean wireFields(AbstractClassTypeDeclarationDescr typeDescr,
TypeDeclaration type,
ClassDefinition def,
PackageRegistry pkgRegistry,
List<TypeDefinition> unresolvedTypeDefinitions) {
// fields definitions are created. will be used by subclasses, if any.
// Fields are SORTED in the process
if (!typeDescr.getFields().isEmpty()) {
if (unresolvedTypeDefinitions != null && !unresolvedTypeDefinitions.isEmpty()) {
for (TypeFieldDescr fld : typeDescr.getFields().values()) {
if (unresolvedTypeDefinitions != null) {
for (TypeDefinition typeDef : unresolvedTypeDefinitions) {
if (fld.getPattern().getObjectType().equals(typeDef.getTypeClassName())) {
return false;
}
}
}
}
}
List<FieldDefinition> fieldDefs = sortFields(typeDescr.getFields(), pkgRegistry.getTypeResolver(), kbuilder);
int i = 0;
for (FieldDefinition fieldDef : fieldDefs) {
fieldDef.setIndex(i++);
def.addField(fieldDef);
}
}
return true;
}
private static List<FieldDefinition> sortFields(Map<String, TypeFieldDescr> fields,
TypeResolver typeResolver,
KnowledgeBuilderImpl kbuilder) {
List<FieldDefinition> fieldDefs = new ArrayList<FieldDefinition>(fields.size());
int maxDeclaredPos = 0;
BitSet occupiedPositions = new BitSet(fields.size());
for (TypeFieldDescr field : fields.values()) {
String typeName = field.getPattern().getObjectType();
String typeNameKey = typeName;
String fullFieldType = kbuilder != null ?
TypeDeclarationUtils.toBuildableType(typeNameKey, kbuilder.getRootClassLoader()) :
typeNameKey;
FieldDefinition fieldDef = new FieldDefinition(field.getFieldName(), fullFieldType);
fieldDefs.add(fieldDef);
if (field.hasOverride()) {
fieldDef.setOverriding(field.getOverriding().getPattern().getObjectType());
}
fieldDef.setInherited(field.isInherited());
fieldDef.setRecursive(field.isRecursive());
fieldDef.setInitExpr(TypeDeclarationUtils.rewriteInitExprWithImports(field.getInitExpr(), typeResolver));
if (field.getIndex() >= 0) {
int pos = field.getIndex();
occupiedPositions.set(pos);
maxDeclaredPos = Math.max(maxDeclaredPos, pos);
fieldDef.addMetaData("position", pos);
} else {
Position position = field.getTypedAnnotation(Position.class);
if (position != null) {
int pos = position.value();
field.setIndex(pos);
occupiedPositions.set(pos);
maxDeclaredPos = Math.max(maxDeclaredPos, pos);
fieldDef.addMetaData("position", pos);
}
}
if (field.hasAnnotation(Key.class)) {
fieldDef.setKey(true);
fieldDef.addMetaData("key", null);
}
for (AnnotationDescr annotationDescr : field.getAnnotations()) {
if (annotationDescr.getFullyQualifiedName() == null) {
if (annotationDescr.isStrict()) {
kbuilder.addBuilderResult(new TypeDeclarationError(field,
"Unknown annotation @" + annotationDescr.getName() + " on field " + field.getFieldName()));
} else {
// Annotation is custom metadata
fieldDef.addMetaData(annotationDescr.getName(), annotationDescr.getSingleValue());
continue;
}
}
Annotation annotation = AnnotationFactory.buildAnnotation(typeResolver, annotationDescr);
if (annotation != null) {
try {
AnnotationDefinition annotationDefinition = AnnotationDefinition.build(annotation.annotationType(),
field.getAnnotation(annotationDescr.getFullyQualifiedName()).getValueMap(),
typeResolver);
fieldDef.addAnnotation(annotationDefinition);
} catch (Exception e) {
kbuilder.addBuilderResult(new TypeDeclarationError(field,
"Annotated field " + field.getFieldName() +
" - undefined property in @annotation " +
annotationDescr.getName() + ": " + e.getMessage() + ";"));
}
} else {
if (annotationDescr.isStrict()) {
kbuilder.addBuilderResult(new TypeDeclarationError(field,
"Unknown annotation @" + annotationDescr.getName() + " on field " + field.getFieldName()));
}
}
}
fieldDef.setDeclIndex(field.getIndex());
}
int curr = 0;
for (FieldDefinition fieldDef : fieldDefs) {
if (fieldDef.getDeclIndex() < 0) {
int freePos = occupiedPositions.nextClearBit(0);
if (freePos < maxDeclaredPos) {
occupiedPositions.set(freePos);
} else {
freePos = maxDeclaredPos + 1;
}
fieldDef.setPriority(freePos * 256 + curr++);
} else {
fieldDef.setPriority(fieldDef.getDeclIndex() * 256 + curr++);
}
}
Collections.sort(fieldDefs);
return fieldDefs;
}
public static void populateDefinitionFromClass(ClassDefinition def, Resource resource, Class<?> concrete, boolean asTrait) {
try {
def.setClassName(concrete.getName());
if (concrete.getSuperclass() != null) {
def.setSuperClass(concrete.getSuperclass().getName());
}
ClassFieldInspector inspector = new ClassFieldInspector(concrete);
Map<String, Method> methods = inspector.getGetterMethods();
Map<String, Method> setters = inspector.getSetterMethods();
int j = 0;
Map<String, TypeFieldDescr> fields = new HashMap<String, TypeFieldDescr>();
for (String fieldName : methods.keySet()) {
if (asTrait && ("core".equals(fieldName) || "fields".equals(fieldName))) {
continue;
}
if (!inspector.isNonGetter(fieldName) && setters.keySet().contains(fieldName)) {
Position position = null;
if (!concrete.isInterface()) {
try {
Field fld = concrete.getDeclaredField(fieldName);
position = fld.getAnnotation(Position.class);
} catch (NoSuchFieldException nsfe) {
// @Position can only annotate fields. This x means that a getter/setter pair was found with no field
}
}
Class ret = methods.get(fieldName).getReturnType();
TypeFieldDescr field = new TypeFieldDescr();
field.setResource(resource);
field.setFieldName(fieldName);
field.setPattern(new PatternDescr(ret.getName()));
field.setIndex(position != null ? position.value() : -1);
fields.put(fieldName, field);
}
}
if (!fields.isEmpty()) {
List<FieldDefinition> fieldDefs = sortFields(fields, null, null);
int i = 0;
for (FieldDefinition fieldDef : fieldDefs) {
fieldDef.setIndex(i++);
def.addField(fieldDef);
}
}
Set<String> interfaces = new HashSet<String>();
Collections.addAll(interfaces, def.getInterfaces());
for (Class iKlass : ClassUtils.getAllImplementedInterfaceNames(concrete)) {
interfaces.add(iKlass.getName());
}
def.setInterfaces(interfaces.toArray(new String[interfaces.size()]));
def.setDefinedClass(concrete);
} catch (IOException e) {
e.printStackTrace();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.kafka;
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap;
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableSet;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
import org.apache.drill.shaded.guava.com.google.common.collect.Sets;
import org.apache.drill.common.expression.BooleanOperator;
import org.apache.drill.common.expression.FunctionCall;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
public class KafkaPartitionScanSpecBuilder extends
AbstractExprVisitor<List<KafkaPartitionScanSpec>,Void,RuntimeException> {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(KafkaPartitionScanSpecBuilder.class);
private final LogicalExpression le;
private final KafkaGroupScan groupScan;
private final KafkaConsumer<?, ?> kafkaConsumer;
private ImmutableMap<TopicPartition, KafkaPartitionScanSpec> fullScanSpec;
private static final long CLOSE_TIMEOUT_MS = 200;
public KafkaPartitionScanSpecBuilder(KafkaGroupScan groupScan, LogicalExpression conditionExp) {
this.groupScan = groupScan;
kafkaConsumer = new KafkaConsumer<>(groupScan.getKafkaStoragePluginConfig().getKafkaConsumerProps(),
new ByteArrayDeserializer(), new ByteArrayDeserializer());
le = conditionExp;
}
public List<KafkaPartitionScanSpec> parseTree() {
ImmutableMap.Builder<TopicPartition, KafkaPartitionScanSpec> builder = ImmutableMap.builder();
for(KafkaPartitionScanSpec scanSpec : groupScan.getPartitionScanSpecList()) {
builder.put(new TopicPartition(scanSpec.getTopicName(), scanSpec.getPartitionId()), scanSpec);
}
fullScanSpec = builder.build();
List<KafkaPartitionScanSpec> pushdownSpec = le.accept(this, null);
/*
Non-existing / invalid partitions may result in empty scan spec.
This results in a "ScanBatch" with no reader. DRILL currently requires
at least one reader to be present in a scan batch.
*/
if(pushdownSpec != null && pushdownSpec.isEmpty()) {
TopicPartition firstPartition = new TopicPartition(groupScan.getKafkaScanSpec().getTopicName(), 0);
KafkaPartitionScanSpec emptySpec =
new KafkaPartitionScanSpec(firstPartition.topic(),firstPartition.partition(),
fullScanSpec.get(firstPartition).getEndOffset(), fullScanSpec.get(firstPartition).getEndOffset());
pushdownSpec.add(emptySpec);
}
return pushdownSpec;
}
@Override
public List<KafkaPartitionScanSpec> visitUnknown(LogicalExpression e, Void value)
throws RuntimeException {
return null;
}
@Override
public List<KafkaPartitionScanSpec> visitBooleanOperator(BooleanOperator op, Void value)
throws RuntimeException {
Map<TopicPartition, KafkaPartitionScanSpec> specMap = Maps.newHashMap();
ImmutableList<LogicalExpression> args = op.args;
if(op.getName().equals("booleanOr")) {
for(LogicalExpression expr : args) {
List<KafkaPartitionScanSpec> parsedSpec = expr.accept(this, null);
//parsedSpec is null if expression cannot be pushed down
if(parsedSpec != null) {
for(KafkaPartitionScanSpec newSpec : parsedSpec) {
TopicPartition tp = new TopicPartition(newSpec.getTopicName(), newSpec.getPartitionId());
KafkaPartitionScanSpec existingSpec = specMap.get(tp);
//If existing spec does not contain topic-partition
if(existingSpec == null) {
specMap.put(tp, newSpec); //Add topic-partition to spec for OR
} else {
existingSpec.mergeScanSpec(op.getName(), newSpec);
specMap.put(tp, existingSpec);
}
}
} else {
return null; //At any level, all arguments of booleanOr should support pushdown, else return null
}
}
} else { //booleanAnd
specMap.putAll(fullScanSpec);
for(LogicalExpression expr : args) {
List<KafkaPartitionScanSpec> parsedSpec = expr.accept(this, null);
//parsedSpec is null if expression cannot be pushed down
if(parsedSpec != null) {
Set<TopicPartition> partitionsInNewSpec = Sets.newHashSet(); //Store topic-partitions returned from new spec.
for (KafkaPartitionScanSpec newSpec : parsedSpec) {
TopicPartition tp = new TopicPartition(newSpec.getTopicName(), newSpec.getPartitionId());
partitionsInNewSpec.add(tp);
KafkaPartitionScanSpec existingSpec = specMap.get(tp);
if (existingSpec != null) {
existingSpec.mergeScanSpec(op.getName(), newSpec);
specMap.put(tp, existingSpec);
}
}
/*
For "booleanAnd", handle the case where condition is on `kafkaPartitionId`.
In this case, we would not want unnecessarily scan all the topic-partitions.
Hence we remove the unnecessary topic-partitions from the spec.
*/
specMap.keySet().removeIf(partition -> !partitionsInNewSpec.contains(partition));
}
}
}
return Lists.newArrayList(specMap.values());
}
@Override
public List<KafkaPartitionScanSpec> visitFunctionCall(FunctionCall call, Void value)
throws RuntimeException {
String functionName = call.getName();
if(KafkaNodeProcessor.isPushdownFunction(functionName)) {
KafkaNodeProcessor kafkaNodeProcessor = KafkaNodeProcessor.process(call);
if(kafkaNodeProcessor.isSuccess()) {
switch (kafkaNodeProcessor.getPath()) {
case "kafkaMsgTimestamp":
return createScanSpecForTimestamp(kafkaNodeProcessor.getFunctionName(),
kafkaNodeProcessor.getValue());
case "kafkaMsgOffset":
return createScanSpecForOffset(kafkaNodeProcessor.getFunctionName(),
kafkaNodeProcessor.getValue());
case "kafkaPartitionId":
return createScanSpecForPartition(kafkaNodeProcessor.getFunctionName(),
kafkaNodeProcessor.getValue());
}
}
}
return null; //Return null, do not pushdown
}
private List<KafkaPartitionScanSpec> createScanSpecForTimestamp(String functionName,
Long fieldValue) {
List<KafkaPartitionScanSpec> scanSpec = Lists.newArrayList();
Map<TopicPartition, Long> timesValMap = Maps.newHashMap();
ImmutableSet<TopicPartition> topicPartitions = fullScanSpec.keySet();
for(TopicPartition partitions : topicPartitions) {
timesValMap.put(partitions, functionName.equals("greater_than") ? fieldValue+1 : fieldValue);
}
Map<TopicPartition, OffsetAndTimestamp> offsetAndTimestamp = kafkaConsumer.offsetsForTimes(timesValMap);
for(TopicPartition tp : topicPartitions) {
OffsetAndTimestamp value = offsetAndTimestamp.get(tp);
//OffsetAndTimestamp is null if there is no offset greater or equal to requested timestamp
if(value == null) {
scanSpec.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getEndOffset(), fullScanSpec.get(tp).getEndOffset()));
} else {
scanSpec.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
value.offset(), fullScanSpec.get(tp).getEndOffset()));
}
}
return scanSpec;
}
private List<KafkaPartitionScanSpec> createScanSpecForOffset(String functionName,
Long fieldValue) {
List<KafkaPartitionScanSpec> scanSpec = Lists.newArrayList();
ImmutableSet<TopicPartition> topicPartitions = fullScanSpec.keySet();
/*
We should handle the case where the specified offset does not exist in the current context,
i.e., fieldValue < startOffset or fieldValue > endOffset in a particular topic-partition.
Else, KafkaConsumer.poll will throw "TimeoutException".
*/
switch (functionName) {
case "equal":
for(TopicPartition tp : topicPartitions) {
if(fieldValue < fullScanSpec.get(tp).getStartOffset()) {
//Offset does not exist
scanSpec.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getEndOffset(), fullScanSpec.get(tp).getEndOffset()));
} else {
long val = Math.min(fieldValue, fullScanSpec.get(tp).getEndOffset());
long nextVal = Math.min(val+1, fullScanSpec.get(tp).getEndOffset());
scanSpec.add(new KafkaPartitionScanSpec(tp.topic(), tp.partition(), val, nextVal));
}
}
break;
case "greater_than_or_equal_to":
for(TopicPartition tp : topicPartitions) {
//Ensure scan range is between startOffset and endOffset,
long val = bindOffsetToRange(tp, fieldValue);
scanSpec.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(), val,
fullScanSpec.get(tp).getEndOffset()));
}
break;
case "greater_than":
for(TopicPartition tp : topicPartitions) {
//Ensure scan range is between startOffset and endOffset,
long val = bindOffsetToRange(tp, fieldValue+1);
scanSpec.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
val, fullScanSpec.get(tp).getEndOffset()));
}
break;
case "less_than_or_equal_to":
for(TopicPartition tp : topicPartitions) {
//Ensure scan range is between startOffset and endOffset,
long val = bindOffsetToRange(tp, fieldValue+1);
scanSpec.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(), val));
}
break;
case "less_than":
for(TopicPartition tp : topicPartitions) {
//Ensure scan range is between startOffset and endOffset,
long val = bindOffsetToRange(tp, fieldValue);
scanSpec.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(), val));
}
break;
}
return scanSpec;
}
private List<KafkaPartitionScanSpec> createScanSpecForPartition(String functionName,
Long fieldValue) {
List<KafkaPartitionScanSpec> scanSpecList = Lists.newArrayList();
ImmutableSet<TopicPartition> topicPartitions = fullScanSpec.keySet();
switch (functionName) {
case "equal":
for(TopicPartition tp : topicPartitions) {
if(tp.partition() == fieldValue) {
scanSpecList.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(),
fullScanSpec.get(tp).getEndOffset()));
}
}
break;
case "not_equal":
for(TopicPartition tp : topicPartitions) {
if(tp.partition() != fieldValue) {
scanSpecList.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(),
fullScanSpec.get(tp).getEndOffset()));
}
}
break;
case "greater_than_or_equal_to":
for(TopicPartition tp : topicPartitions) {
if(tp.partition() >= fieldValue) {
scanSpecList.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(),
fullScanSpec.get(tp).getEndOffset()));
}
}
break;
case "greater_than":
for(TopicPartition tp : topicPartitions) {
if(tp.partition() > fieldValue) {
scanSpecList.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(),
fullScanSpec.get(tp).getEndOffset()));
}
}
break;
case "less_than_or_equal_to":
for(TopicPartition tp : topicPartitions) {
if(tp.partition() <= fieldValue) {
scanSpecList.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(),
fullScanSpec.get(tp).getEndOffset()));
}
}
break;
case "less_than":
for(TopicPartition tp : topicPartitions) {
if(tp.partition() < fieldValue) {
scanSpecList.add(
new KafkaPartitionScanSpec(tp.topic(), tp.partition(),
fullScanSpec.get(tp).getStartOffset(),
fullScanSpec.get(tp).getEndOffset()));
}
}
break;
}
return scanSpecList;
}
void close() {
kafkaConsumer.close(CLOSE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
}
private long bindOffsetToRange(TopicPartition tp, long offset) {
return Math.max(fullScanSpec.get(tp).getStartOffset(), Math.min(offset, fullScanSpec.get(tp).getEndOffset()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.conf;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.commons.configuration.tree.ConfigurationNode;
import org.apache.zeppelin.notebook.repo.ElasticSearchRepo;
import org.apache.zeppelin.notebook.repo.VFSNotebookRepo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URL;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Zeppelin configuration.
*/
public class ZeppelinConfiguration extends XMLConfiguration {
private static final String ZEPPELIN_SITE_XML = "zeppelin-site.xml";
private static final long serialVersionUID = 4749305895693848035L;
private static final Logger LOG = LoggerFactory.getLogger(ZeppelinConfiguration.class);
private static ZeppelinConfiguration conf;
public ZeppelinConfiguration(URL url) throws ConfigurationException {
setDelimiterParsingDisabled(true);
load(url);
}
public ZeppelinConfiguration() {
ConfVars[] vars = ConfVars.values();
for (ConfVars v : vars) {
if (v.getType() == ConfVars.VarType.BOOLEAN) {
this.setProperty(v.getVarName(), v.getBooleanValue());
} else if (v.getType() == ConfVars.VarType.LONG) {
this.setProperty(v.getVarName(), v.getLongValue());
} else if (v.getType() == ConfVars.VarType.INT) {
this.setProperty(v.getVarName(), v.getIntValue());
} else if (v.getType() == ConfVars.VarType.FLOAT) {
this.setProperty(v.getVarName(), v.getFloatValue());
} else if (v.getType() == ConfVars.VarType.STRING) {
this.setProperty(v.getVarName(), v.getStringValue());
} else {
throw new RuntimeException("Unsupported VarType");
}
}
}
/**
* Load from resource. url = ZeppelinConfiguration.class.getResource(ZEPPELIN_SITE_XML);
*/
public static synchronized ZeppelinConfiguration create() {
if (conf != null) {
return conf;
}
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
URL url;
url = ZeppelinConfiguration.class.getResource(ZEPPELIN_SITE_XML);
if (url == null) {
ClassLoader cl = ZeppelinConfiguration.class.getClassLoader();
if (cl != null) {
url = cl.getResource(ZEPPELIN_SITE_XML);
}
}
if (url == null) {
url = classLoader.getResource(ZEPPELIN_SITE_XML);
}
if (url == null) {
LOG.warn("Failed to load configuration, proceeding with a default");
conf = new ZeppelinConfiguration();
} else {
try {
LOG.info("Load configuration from " + url);
conf = new ZeppelinConfiguration(url);
} catch (ConfigurationException e) {
LOG.warn("Failed to load configuration from " + url + " proceeding with a default", e);
conf = new ZeppelinConfiguration();
}
}
return conf;
}
private String getStringValue(String name, String d) {
List<ConfigurationNode> properties = getRootNode().getChildren();
if (properties == null || properties.size() == 0) {
return d;
}
for (ConfigurationNode p : properties) {
if (p.getChildren("name") != null && p.getChildren("name").size() > 0
&& name.equals(p.getChildren("name").get(0).getValue())) {
return (String) p.getChildren("value").get(0).getValue();
}
}
return d;
}
private int getIntValue(String name, int d) {
List<ConfigurationNode> properties = getRootNode().getChildren();
if (properties == null || properties.size() == 0) {
return d;
}
for (ConfigurationNode p : properties) {
if (p.getChildren("name") != null && p.getChildren("name").size() > 0
&& name.equals(p.getChildren("name").get(0).getValue())) {
return Integer.parseInt((String) p.getChildren("value").get(0).getValue());
}
}
return d;
}
private long getLongValue(String name, long d) {
List<ConfigurationNode> properties = getRootNode().getChildren();
if (properties == null || properties.size() == 0) {
return d;
}
for (ConfigurationNode p : properties) {
if (p.getChildren("name") != null && p.getChildren("name").size() > 0
&& name.equals(p.getChildren("name").get(0).getValue())) {
return Long.parseLong((String) p.getChildren("value").get(0).getValue());
}
}
return d;
}
private float getFloatValue(String name, float d) {
List<ConfigurationNode> properties = getRootNode().getChildren();
if (properties == null || properties.size() == 0) {
return d;
}
for (ConfigurationNode p : properties) {
if (p.getChildren("name") != null && p.getChildren("name").size() > 0
&& name.equals(p.getChildren("name").get(0).getValue())) {
return Float.parseFloat((String) p.getChildren("value").get(0).getValue());
}
}
return d;
}
private boolean getBooleanValue(String name, boolean d) {
List<ConfigurationNode> properties = getRootNode().getChildren();
if (properties == null || properties.size() == 0) {
return d;
}
for (ConfigurationNode p : properties) {
if (p.getChildren("name") != null && p.getChildren("name").size() > 0
&& name.equals(p.getChildren("name").get(0).getValue())) {
return Boolean.parseBoolean((String) p.getChildren("value").get(0).getValue());
}
}
return d;
}
public String getString(ConfVars c) {
return getString(c.name(), c.getVarName(), c.getStringValue());
}
public String getString(String envName, String propertyName, String defaultValue) {
if (System.getenv(envName) != null) {
return System.getenv(envName);
}
if (System.getProperty(propertyName) != null) {
return System.getProperty(propertyName);
}
return getStringValue(propertyName, defaultValue);
}
public int getInt(ConfVars c) {
return getInt(c.name(), c.getVarName(), c.getIntValue());
}
public int getInt(String envName, String propertyName, int defaultValue) {
if (System.getenv(envName) != null) {
return Integer.parseInt(System.getenv(envName));
}
if (System.getProperty(propertyName) != null) {
return Integer.parseInt(System.getProperty(propertyName));
}
return getIntValue(propertyName, defaultValue);
}
public long getLong(ConfVars c) {
return getLong(c.name(), c.getVarName(), c.getLongValue());
}
public long getLong(String envName, String propertyName, long defaultValue) {
if (System.getenv(envName) != null) {
return Long.parseLong(System.getenv(envName));
}
if (System.getProperty(propertyName) != null) {
return Long.parseLong(System.getProperty(propertyName));
}
return getLongValue(propertyName, defaultValue);
}
public float getFloat(ConfVars c) {
return getFloat(c.name(), c.getVarName(), c.getFloatValue());
}
public float getFloat(String envName, String propertyName, float defaultValue) {
if (System.getenv(envName) != null) {
return Float.parseFloat(System.getenv(envName));
}
if (System.getProperty(propertyName) != null) {
return Float.parseFloat(System.getProperty(propertyName));
}
return getFloatValue(propertyName, defaultValue);
}
public boolean getBoolean(ConfVars c) {
return getBoolean(c.name(), c.getVarName(), c.getBooleanValue());
}
public boolean getBoolean(String envName, String propertyName, boolean defaultValue) {
if (System.getenv(envName) != null) {
return Boolean.parseBoolean(System.getenv(envName));
}
if (System.getProperty(propertyName) != null) {
return Boolean.parseBoolean(System.getProperty(propertyName));
}
return getBooleanValue(propertyName, defaultValue);
}
public boolean useSsl() {
return getBoolean(ConfVars.ZEPPELIN_SSL);
}
public boolean useClientAuth() {
return getBoolean(ConfVars.ZEPPELIN_SSL_CLIENT_AUTH);
}
public String getServerAddress() {
return getString(ConfVars.ZEPPELIN_ADDR);
}
public int getServerPort() {
return getInt(ConfVars.ZEPPELIN_PORT);
}
public String getServerContextPath() {
return getString(ConfVars.ZEPPELIN_SERVER_CONTEXT_PATH);
}
public String getKeyStorePath() {
String path = getString(ConfVars.ZEPPELIN_SSL_KEYSTORE_PATH);
if (path != null && path.startsWith("/") || isWindowsPath(path)) {
return path;
} else {
return getRelativeDir(
String.format("%s/%s",
getConfDir(),
getString(path)));
}
}
public String getKeyStoreType() {
return getString(ConfVars.ZEPPELIN_SSL_KEYSTORE_TYPE);
}
public String getKeyStorePassword() {
return getString(ConfVars.ZEPPELIN_SSL_KEYSTORE_PASSWORD);
}
public String getKeyManagerPassword() {
String password = getString(ConfVars.ZEPPELIN_SSL_KEY_MANAGER_PASSWORD);
if (password == null) {
return getKeyStorePassword();
} else {
return password;
}
}
public String getTrustStorePath() {
String path = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_PATH);
if (path != null && path.startsWith("/") || isWindowsPath(path)) {
return path;
} else {
return getRelativeDir(
String.format("%s/%s",
getConfDir(),
getString(path)));
}
}
public String getTrustStoreType() {
String type = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_TYPE);
if (type == null) {
return getKeyStoreType();
} else {
return type;
}
}
public String getTrustStorePassword() {
String password = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_PASSWORD);
if (password == null) {
return getKeyStorePassword();
} else {
return password;
}
}
public String getNotebookDir() {
return getString(ConfVars.ZEPPELIN_NOTEBOOK_DIR);
}
public String getUser() {
return getString(ConfVars.ZEPPELIN_NOTEBOOK_S3_USER);
}
public String getBucketName() {
return getString(ConfVars.ZEPPELIN_NOTEBOOK_S3_BUCKET);
}
public String getEndpoint() {
return getString(ConfVars.ZEPPELIN_NOTEBOOK_S3_ENDPOINT);
}
public String getS3KMSKeyID() {
return getString(ConfVars.ZEPPELIN_NOTEBOOK_S3_KMS_KEY_ID);
}
public String getS3EncryptionMaterialsProviderClass() {
return getString(ConfVars.ZEPPELIN_NOTEBOOK_S3_EMP);
}
public String getInterpreterListPath() {
return getRelativeDir(String.format("%s/interpreter-list", getConfDir()));
}
public String getInterpreterDir() {
return getRelativeDir(ConfVars.ZEPPELIN_INTERPRETER_DIR);
}
public String getInterpreterJson() {
return getString(ConfVars.ZEPPELIN_INTERPRETER_JSON);
}
public String getInterpreterSettingPath() {
return getRelativeDir(String.format("%s/interpreter.json", getConfDir()));
}
public String getHeliumConfPath() {
return getRelativeDir(String.format("%s/helium.json", getConfDir()));
}
public String getHeliumDefaultLocalRegistryPath() {
return getRelativeDir(ConfVars.ZEPPELIN_HELIUM_LOCALREGISTRY_DEFAULT);
}
public String getNotebookAuthorizationPath() {
return getRelativeDir(String.format("%s/notebook-authorization.json", getConfDir()));
}
public Boolean credentialsPersist() {
return getBoolean(ConfVars.ZEPPELIN_CREDENTIALS_PERSIST);
}
public String getCredentialsPath() {
return getRelativeDir(String.format("%s/credentials.json", getConfDir()));
}
public String getShiroPath() {
return getRelativeDir(String.format("%s/shiro.ini", getConfDir()));
}
public String getInterpreterRemoteRunnerPath() {
return getRelativeDir(ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER);
}
public String getInterpreterLocalRepoPath() {
return getRelativeDir(ConfVars.ZEPPELIN_INTERPRETER_LOCALREPO);
}
public String getRelativeDir(ConfVars c) {
return getRelativeDir(getString(c));
}
public String getRelativeDir(String path) {
if (path != null && path.startsWith("/") || isWindowsPath(path)) {
return path;
} else {
return getString(ConfVars.ZEPPELIN_HOME) + "/" + path;
}
}
public boolean isWindowsPath(String path) {
return path.matches("^[A-Za-z]:\\\\.*");
}
public String getConfDir() {
return getString(ConfVars.ZEPPELIN_CONF_DIR);
}
public List<String> getAllowedOrigins() {
if (getString(ConfVars.ZEPPELIN_ALLOWED_ORIGINS).isEmpty()) {
return Arrays.asList(new String[0]);
}
return Arrays.asList(getString(ConfVars.ZEPPELIN_ALLOWED_ORIGINS).toLowerCase().split(","));
}
public String getWebsocketMaxTextMessageSize() {
return getString(ConfVars.ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE);
}
public Map<String, String> dumpConfigurations(ZeppelinConfiguration conf,
ConfigurationKeyPredicate predicate) {
Map<String, String> configurations = new HashMap<>();
for (ZeppelinConfiguration.ConfVars v : ZeppelinConfiguration.ConfVars.values()) {
String key = v.getVarName();
if (!predicate.apply(key)) {
continue;
}
ConfVars.VarType type = v.getType();
Object value = null;
if (type == ConfVars.VarType.BOOLEAN) {
value = conf.getBoolean(v);
} else if (type == ConfVars.VarType.LONG) {
value = conf.getLong(v);
} else if (type == ConfVars.VarType.INT) {
value = conf.getInt(v);
} else if (type == ConfVars.VarType.FLOAT) {
value = conf.getFloat(v);
} else if (type == ConfVars.VarType.STRING) {
value = conf.getString(v);
}
if (value != null) {
configurations.put(key, value.toString());
}
}
return configurations;
}
/**
* Predication whether key/value pair should be included or not
*/
public interface ConfigurationKeyPredicate {
boolean apply(String key);
}
/**
* Wrapper class.
*/
public static enum ConfVars {
ZEPPELIN_HOME("zeppelin.home", "../"), //TODO(qy): restore to ./
ZEPPELIN_ADDR("zeppelin.server.addr", "0.0.0.0"),
ZEPPELIN_PORT("zeppelin.server.port", 8080),
ZEPPELIN_SERVER_CONTEXT_PATH("zeppelin.server.context.path", "/"),
ZEPPELIN_SSL("zeppelin.ssl", false),
ZEPPELIN_SSL_CLIENT_AUTH("zeppelin.ssl.client.auth", false),
ZEPPELIN_SSL_KEYSTORE_PATH("zeppelin.ssl.keystore.path", "keystore"),
ZEPPELIN_SSL_KEYSTORE_TYPE("zeppelin.ssl.keystore.type", "JKS"),
ZEPPELIN_SSL_KEYSTORE_PASSWORD("zeppelin.ssl.keystore.password", ""),
ZEPPELIN_SSL_KEY_MANAGER_PASSWORD("zeppelin.ssl.key.manager.password", null),
ZEPPELIN_SSL_TRUSTSTORE_PATH("zeppelin.ssl.truststore.path", null),
ZEPPELIN_SSL_TRUSTSTORE_TYPE("zeppelin.ssl.truststore.type", null),
ZEPPELIN_SSL_TRUSTSTORE_PASSWORD("zeppelin.ssl.truststore.password", null),
ZEPPELIN_WAR("zeppelin.war", "../zeppelin-web/dist"),
ZEPPELIN_WAR_TEMPDIR("zeppelin.war.tempdir", "webapps"),
ZEPPELIN_INTERPRETERS("zeppelin.interpreters", "org.apache.zeppelin.spark.SparkInterpreter,"
+ "org.apache.zeppelin.spark.PySparkInterpreter,"
+ "org.apache.zeppelin.rinterpreter.RRepl,"
+ "org.apache.zeppelin.rinterpreter.KnitR,"
+ "org.apache.zeppelin.spark.SparkRInterpreter,"
+ "org.apache.zeppelin.spark.SparkSqlInterpreter,"
+ "org.apache.zeppelin.spark.DepInterpreter,"
+ "org.apache.zeppelin.markdown.Markdown,"
+ "org.apache.zeppelin.angular.AngularInterpreter,"
+ "org.apache.zeppelin.shell.ShellInterpreter,"
+ "org.apache.zeppelin.livy.LivySparkInterpreter,"
+ "org.apache.zeppelin.livy.LivySparkSQLInterpreter,"
+ "org.apache.zeppelin.livy.LivyPySparkInterpreter,"
+ "org.apache.zeppelin.livy.LivySparkRInterpreter,"
+ "org.apache.zeppelin.alluxio.AlluxioInterpreter,"
+ "org.apache.zeppelin.file.HDFSFileInterpreter,"
+ "org.apache.zeppelin.postgresql.PostgreSqlInterpreter,"
+ "org.apache.zeppelin.flink.FlinkInterpreter,"
+ "org.apache.zeppelin.python.PythonInterpreter,"
+ "org.apache.zeppelin.python.PythonInterpreterPandasSql,"
+ "org.apache.zeppelin.ignite.IgniteInterpreter,"
+ "org.apache.zeppelin.ignite.IgniteSqlInterpreter,"
+ "org.apache.zeppelin.lens.LensInterpreter,"
+ "org.apache.zeppelin.cassandra.CassandraInterpreter,"
+ "org.apache.zeppelin.geode.GeodeOqlInterpreter,"
+ "org.apache.zeppelin.kylin.KylinInterpreter,"
+ "org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter,"
+ "org.apache.zeppelin.scalding.ScaldingInterpreter,"
+ "org.apache.zeppelin.jdbc.JDBCInterpreter,"
+ "org.apache.zeppelin.hbase.HbaseInterpreter,"
+ "org.apache.zeppelin.bigquery.BigQueryInterpreter"),
ZEPPELIN_INTERPRETER_JSON("zeppelin.interpreter.setting", "interpreter-setting.json"),
ZEPPELIN_INTERPRETER_DIR("zeppelin.interpreter.dir", "interpreter"),
ZEPPELIN_INTERPRETER_LOCALREPO("zeppelin.interpreter.localRepo", "local-repo"),
ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT("zeppelin.interpreter.connect.timeout", 30000),
ZEPPELIN_INTERPRETER_MAX_POOL_SIZE("zeppelin.interpreter.max.poolsize", 10),
ZEPPELIN_INTERPRETER_GROUP_ORDER("zeppelin.interpreter.group.order", "spark,md,angular,sh,"
+ "livy,alluxio,file,psql,flink,python,ignite,lens,cassandra,geode,kylin,elasticsearch,"
+ "scalding,jdbc,hbase,bigquery"),
ZEPPELIN_ENCODING("zeppelin.encoding", "UTF-8"),
ZEPPELIN_NOTEBOOK_DIR("zeppelin.notebook.dir", "notebook"),
// use specified notebook (id) as homescreen
ZEPPELIN_NOTEBOOK_HOMESCREEN("zeppelin.notebook.homescreen", null),
// whether homescreen notebook will be hidden from notebook list or not
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE("zeppelin.notebook.homescreen.hide", false),
ZEPPELIN_NOTEBOOK_S3_BUCKET("zeppelin.notebook.s3.bucket", "zeppelin"),
ZEPPELIN_NOTEBOOK_S3_ENDPOINT("zeppelin.notebook.s3.endpoint", "s3.amazonaws.com"),
ZEPPELIN_NOTEBOOK_S3_USER("zeppelin.notebook.s3.user", "user"),
ZEPPELIN_NOTEBOOK_S3_EMP("zeppelin.notebook.s3.encryptionMaterialsProvider", null),
ZEPPELIN_NOTEBOOK_S3_KMS_KEY_ID("zeppelin.notebook.s3.kmsKeyID", null),
ZEPPELIN_NOTEBOOK_AZURE_CONNECTION_STRING("zeppelin.notebook.azure.connectionString", null),
ZEPPELIN_NOTEBOOK_AZURE_SHARE("zeppelin.notebook.azure.share", "zeppelin"),
ZEPPELIN_NOTEBOOK_AZURE_USER("zeppelin.notebook.azure.user", "user"),
ZEPPELIN_NOTEBOOK_STORAGE("zeppelin.notebook.storage", ElasticSearchRepo.class.getName() + "," + VFSNotebookRepo.class.getName()),
ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC("zeppelin.notebook.one.way.sync", false),
ZEPPELIN_INTERPRETER_REMOTE_RUNNER("zeppelin.interpreter.remoterunner",
System.getProperty("os.name")
.startsWith("Windows") ? "bin/interpreter.cmd" : "bin/interpreter.sh"),
// Decide when new note is created, interpreter settings will be binded automatically or not.
ZEPPELIN_NOTEBOOK_AUTO_INTERPRETER_BINDING("zeppelin.notebook.autoInterpreterBinding", true),
ZEPPELIN_CONF_DIR("zeppelin.conf.dir", "conf"),
ZEPPELIN_DEP_LOCALREPO("zeppelin.dep.localrepo", "local-repo"),
ZEPPELIN_HELIUM_LOCALREGISTRY_DEFAULT("zeppelin.helium.localregistry.default", "helium"),
// Allows a way to specify a ',' separated list of allowed origins for rest and websockets
// i.e. http://localhost:8080
ZEPPELIN_ALLOWED_ORIGINS("zeppelin.server.allowed.origins", "*"),
ZEPPELIN_ANONYMOUS_ALLOWED("zeppelin.anonymous.allowed", true),
ZEPPELIN_CREDENTIALS_PERSIST("zeppelin.credentials.persist", true),
//default page size when searching note
ZEPPELIN_NOTE_SEARCH_PAGE_SIZE("zeppelin.es.search.repo.page.size", 20),
ZEPPELIN_NOTE_REPO_ES_HOST("zeppelin.es.search.repo.host", "localhost"),
ZEPPELIN_NOTE_REPO_ES_PORT("zeppelin.es.search.repo.port", 9300),
ZEPPELIN_NOTE_REPO_ES_INDEX_NAME("zeppelin.es.search.repo.index.name", "zeppelin"),
ZEPPELIN_NOTE_REPO_ES_NOTE_TYPE_NAME("zeppelin.es.search.repo.note.index.type", "note"),
ZEPPELIN_NOTE_REPO_ES_PARAGRAPH_TYPE_NAME("zeppelin.es.search.repo.paragraph.index.type", "paragraph"),
ZEPPELIN_NOTE_REPO_ES_TERMS_AGGREGATION_SIZE("zeppelin.es.search.aggregation.size", 10),//max terms aggregation size
ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE("zeppelin.websocket.max.text.message.size", "1024000");
private String varName;
@SuppressWarnings("rawtypes")
private Class varClass;
private String stringValue;
private VarType type;
private int intValue;
private float floatValue;
private boolean booleanValue;
private long longValue;
ConfVars(String varName, String varValue) {
this.varName = varName;
this.varClass = String.class;
this.stringValue = varValue;
this.intValue = -1;
this.floatValue = -1;
this.longValue = -1;
this.booleanValue = false;
this.type = VarType.STRING;
}
ConfVars(String varName, int intValue) {
this.varName = varName;
this.varClass = Integer.class;
this.stringValue = null;
this.intValue = intValue;
this.floatValue = -1;
this.longValue = -1;
this.booleanValue = false;
this.type = VarType.INT;
}
ConfVars(String varName, long longValue) {
this.varName = varName;
this.varClass = Integer.class;
this.stringValue = null;
this.intValue = -1;
this.floatValue = -1;
this.longValue = longValue;
this.booleanValue = false;
this.type = VarType.LONG;
}
ConfVars(String varName, float floatValue) {
this.varName = varName;
this.varClass = Float.class;
this.stringValue = null;
this.intValue = -1;
this.longValue = -1;
this.floatValue = floatValue;
this.booleanValue = false;
this.type = VarType.FLOAT;
}
ConfVars(String varName, boolean booleanValue) {
this.varName = varName;
this.varClass = Boolean.class;
this.stringValue = null;
this.intValue = -1;
this.longValue = -1;
this.floatValue = -1;
this.booleanValue = booleanValue;
this.type = VarType.BOOLEAN;
}
public String getVarName() {
return varName;
}
@SuppressWarnings("rawtypes")
public Class getVarClass() {
return varClass;
}
public int getIntValue() {
return intValue;
}
public long getLongValue() {
return longValue;
}
public float getFloatValue() {
return floatValue;
}
public String getStringValue() {
return stringValue;
}
public boolean getBooleanValue() {
return booleanValue;
}
public VarType getType() {
return type;
}
enum VarType {
STRING {
@Override
void checkType(String value) throws Exception {
}
},
INT {
@Override
void checkType(String value) throws Exception {
Integer.valueOf(value);
}
},
LONG {
@Override
void checkType(String value) throws Exception {
Long.valueOf(value);
}
},
FLOAT {
@Override
void checkType(String value) throws Exception {
Float.valueOf(value);
}
},
BOOLEAN {
@Override
void checkType(String value) throws Exception {
Boolean.valueOf(value);
}
};
boolean isType(String value) {
try {
checkType(value);
} catch (Exception e) {
LOG.error("Exception in ZeppelinConfiguration while isType", e);
return false;
}
return true;
}
String typeString() {
return name().toUpperCase();
}
abstract void checkType(String value) throws Exception;
}
}
}
| |
package fr.lip6.reden.nelinker;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.log4j.Logger;
import org.jgrapht.graph.SimpleDirectedWeightedGraph;
import dk.aaue.sna.alg.centrality.BrandesBetweennessCentrality;
import dk.aaue.sna.alg.centrality.CentralityMeasure;
import dk.aaue.sna.alg.centrality.CentralityResult;
import dk.aaue.sna.alg.centrality.DegreeCentrality;
import dk.aaue.sna.alg.centrality.EigenvectorCentrality;
import dk.aaue.sna.alg.centrality.FreemanClosenessCentrality;
/**
* This class implements the method for graph centrality calculation.
*
* @author @author Brando & Frontini
*/
public class CentralityHandler {
private static Logger logger = Logger.getLogger(CentralityHandler.class);
/**
*
*
* @param graph
* , the graph of URIs
* @param mentionsWithURIs
* , the mentions and their URIs for every possible candidate
* @param mentionsPerParagraph
* , mentions of the paragraph
* @param baseURIS
* , URIs to consider as vertex of the graph
* @param invertedIndex
* , index of URI to obtain mention
* @return the chosen URIs
*/
/**
* Keeps only meaningful edges and vertex of the graph.
* @param graph, the fused graph
* @param mentionsWithURIs, the mentions and their URIs for every possible candidate
* @param mentionsPerParagraph, mentions of the paragraph
* @param baseURIS, reference source
* @param invertedIndex, index of URI to obtain mention
* @param measure, the centrality measure
* @param preferedURI, the URI to use in the XML-TEI output
* @param namefile, name of the source TEI-XML file
* @param countParagraph, number of the current paragraph
* @param writerGraph, where to write the final graph (for debugging purposes)
* @param edgeFrequenceByLabel, frequency by predicate
* @return the chosen URIs
*/
@SuppressWarnings("rawtypes")
public static Map<String, String> simplifyGraphsAndCalculateCentrality(
SimpleDirectedWeightedGraph<String, LabeledEdge> graph,
Map<String, List<List<String>>> mentionsWithURIs,
List<String> mentionsPerParagraph, String baseURIS,
Map<String, String> invertedIndex, String measure,
String preferedURI, String namefile,
Integer countParagraph, FileWriterWithEncoding writerGraph,
Map<String, Double>edgeFrequenceByLabel,
Map<String, Double> choosenScoresperMention) {
Map<String, String> choosenUris = new HashMap<String, String>();
try {
List<String> urisColoredNodes = new ArrayList<String>();
for (String mention : mentionsPerParagraph) {
List<List<String>> listUrisCurrentMention = mentionsWithURIs
.get(mention);
if (listUrisCurrentMention != null) {
for (List<String> listUris : listUrisCurrentMention) {
for (String uri : listUris) {
String baseURL = baseURIS.trim();
if (uri.contains(baseURL)) { // avoid some URI
urisColoredNodes.add(uri);
if (!urisColoredNodes
.contains(Util.decompose(uri)))
urisColoredNodes.add(Util.decompose(uri));
//the candidates
}
}
}
}
}
// filtering non-interesting nodes and edges
List<String> vertexToDelete = new ArrayList<String>();
for (String vertex : graph.vertexSet()) {
if (!urisColoredNodes.contains(vertex)) {
Set<String> vertexCheck = new HashSet<String>();
for (LabeledEdge edgeOfVertex : graph.edgesOf(vertex)) {
String vertex1 = graph.getEdgeSource(edgeOfVertex);
String vertex2 = graph.getEdgeTarget(edgeOfVertex);
if (!vertex1.equals(vertex)
&& urisColoredNodes.contains(vertex1)) {
vertexCheck.add(invertedIndex.get(vertex1));
}
if (!vertex2.equals(vertex)
&& urisColoredNodes.contains(vertex2)) {
vertexCheck.add(invertedIndex.get(vertex2));
}
}
if (vertexCheck.size() < 2) {
vertexToDelete.add(vertex);
}
}
}
graph.removeAllVertices(vertexToDelete);
//for debugging: relation frequency
for (LabeledEdge edge : graph.edgeSet()) {
if (edgeFrequenceByLabel.get(edge.toString()) == null) {
edgeFrequenceByLabel.put(edge.toString(), 1.0);
} else {
Double val = edgeFrequenceByLabel.get(edge.toString());
val++;
edgeFrequenceByLabel.put(edge.toString(), val);
}
}
// calculate centrality
CentralityMeasure<String> cm = null;
logger.info("Centrality measure used is " + measure);
if (measure.equals("DegreeCentrality")) {
cm = new DegreeCentrality<String, LabeledEdge>(graph);
} else if (measure.equals("BrandesBetweennessCentrality")) {
cm = new BrandesBetweennessCentrality<String, LabeledEdge>(
graph);
} else if (measure.equals("FreemanClosenessCentrality")) {
cm = new FreemanClosenessCentrality<String, LabeledEdge>(
graph);
} else if (measure.equals("EigenvectorCentrality")) {
cm = new EigenvectorCentrality<String, LabeledEdge>(graph);
} else {
System.out.println("please provide valid centrality measure");
return null;
}
if (cm != null) {
for (String key : mentionsWithURIs.keySet()) {
Map<String, Double> results = new HashMap<String, Double>();
List<List<String>> listuris = mentionsWithURIs.get(key);
if (listuris != null) {
for (List<String> uris : listuris) {
for (String uri : uris) {
if (urisColoredNodes.contains(Util.decompose(uri))) {
CentralityResult<String> cr = cm
.calculate();
Double val = cr.get(Util.decompose(uri));
if (cr.get(Util.decompose(uri)) != null) {
results.put(Util.decompose(uri), val);
} else {
results.put(Util.decompose(uri), 0.0);
}
}
}
}
}
Map<String, Double> orderedMap = Util.sortByValue(results);
logger.info("For mention: " + key);
for (String ur : orderedMap.keySet()) {
if (orderedMap.get(ur) != 0) {
logger.info("Centrality of " + ur + " is: "
+ orderedMap.get(ur));
}
}
// choose the highest
String[] o = {};
o = orderedMap.keySet().toArray(o);
if (o.length > 0) { // there are uris
// select preferred URI, the one defined in
// config.parameters
String selectedURI = "";
String correspondingMention = invertedIndex
.get(o[o.length - 1]);
List<List<String>> correspondingURIs = mentionsWithURIs
.get(correspondingMention);
for (List<String> uris : correspondingURIs) {
if (uris.contains(o[o.length - 1])) { // it is the
// right
// list
boolean found = false;
for (String uri : uris) {
if (preferedURI.equals("ALL")) {
selectedURI += uri + " ";
found = true;
} else if (uri.contains(preferedURI)) {
selectedURI = uri;
found = true;
}
}
if (!found) {
selectedURI = o[o.length - 1]; // default
// URI
}
}
}
choosenUris.put(key, selectedURI.trim());
choosenScoresperMention.put(key, orderedMap.get(o[o.length - 1]));
}
}
// printing graph
writerGraph.write("Paragraph# " + countParagraph + "\n");
Util.printGraph(graph, writerGraph);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return choosenUris;
}
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Metric;
import com.codahale.metrics.Timer;
import com.eclipsesource.json.Json;
import com.eclipsesource.json.JsonArray;
import com.eclipsesource.json.JsonObject;
import com.google.common.collect.ImmutableMap;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.metrics.MetricsService;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.osgi.TestServiceDirectory;
import org.onlab.rest.BaseResource;
import org.onosproject.codec.CodecService;
import org.onosproject.codec.impl.CodecManager;
import javax.ws.rs.client.WebTarget;
import java.util.concurrent.TimeUnit;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* Unit tests for Metrics REST APIs.
*/
public class MetricsResourceTest extends ResourceTest {
MetricsService mockMetricsService;
/**
* Initializes test mocks and environment.
*/
@Before
public void setUpTest() {
mockMetricsService = createMock(MetricsService.class);
// Register the services needed for the test
final CodecManager codecService = new CodecManager();
codecService.activate();
ServiceDirectory testDirectory =
new TestServiceDirectory()
.add(MetricsService.class, mockMetricsService)
.add(CodecService.class, codecService);
BaseResource.setServiceDirectory(testDirectory);
}
/**
* Verifies mocks.
*/
@After
public void tearDownTest() {
verify(mockMetricsService);
}
/**
* Tests GetAllMetrics method.
*/
@Test
public void testGetAllMetrics() {
Counter onosCounter = new Counter();
onosCounter.inc();
Meter onosMeter = new Meter();
onosMeter.mark();
Timer onosTimer = new Timer();
onosTimer.update(1, TimeUnit.MILLISECONDS);
ImmutableMap<String, Metric> metrics =
new ImmutableMap.Builder<String, Metric>()
.put("onosCounter", onosCounter)
.put("onosMeter", onosMeter)
.put("onosTimer", onosTimer)
.build();
expect(mockMetricsService.getMetrics())
.andReturn(metrics)
.anyTimes();
replay(mockMetricsService);
WebTarget wt = target();
String response = wt.path("metrics").request().get(String.class);
assertThat(response, containsString("{\"metrics\":["));
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
JsonArray jsonMetrics = result.get("metrics").asArray();
assertThat(jsonMetrics, notNullValue());
assertThat(jsonMetrics.size(), is(3));
assertTrue(matchesMetric(metrics.get("onosCounter")).matchesSafely(jsonMetrics.get(0).asObject()));
assertTrue(matchesMetric(metrics.get("onosMeter")).matchesSafely(jsonMetrics.get(1).asObject()));
assertTrue(matchesMetric(metrics.get("onosTimer")).matchesSafely(jsonMetrics.get(2).asObject()));
}
/**
* Hamcrest matcher to check that a metric representation in JSON matches
* the actual metric.
*/
public static class MetricJsonMatcher extends TypeSafeMatcher<JsonObject> {
private final Metric metric;
private String reason = "";
public MetricJsonMatcher(Metric metricValue) {
this.metric = metricValue;
}
@Override
public boolean matchesSafely(JsonObject jsonObject) {
JsonObject jsonMetric = jsonObject.get("metric").asObject();
JsonObject jsonCounter;
JsonObject jsonMeter;
JsonObject jsonTimer;
Counter counter;
Meter meter;
Timer timer;
// check counter metric
if (jsonMetric.get("counter") != null) {
jsonCounter = jsonMetric.get("counter").asObject();
counter = (Counter) metric;
if (jsonCounter.get("counter").asLong() != counter.getCount()) {
reason = "counter " + counter.getCount();
return false;
}
}
// check meter metric
if (jsonMetric.get("meter") != null) {
jsonMeter = jsonMetric.get("meter").asObject();
meter = (Meter) metric;
if (jsonMeter.get("counter").asLong() != meter.getCount()) {
reason = "counter " + meter.getCount();
return false;
}
if (jsonMeter.get("1_min_rate").asDouble() != meter.getOneMinuteRate()) {
reason = "1 minute rate " + meter.getOneMinuteRate();
return false;
}
if (jsonMeter.get("5_min_rate").asDouble() != meter.getOneMinuteRate()) {
reason = "5 minute rate " + meter.getFiveMinuteRate();
return false;
}
if (jsonMeter.get("15_min_rate").asDouble() != meter.getFifteenMinuteRate()) {
reason = "15 minute rate " + meter.getFifteenMinuteRate();
return false;
}
}
if (jsonMetric.get("timer") != null) {
jsonTimer = jsonMetric.get("timer").asObject();
timer = (Timer) metric;
if (jsonTimer.get("counter").asLong() != timer.getCount()) {
reason = "counter " + timer.getCount();
return false;
}
if (jsonTimer.get("1_min_rate").asDouble() != timer.getOneMinuteRate()) {
reason = "1 minute rate " + timer.getOneMinuteRate();
return false;
}
if (jsonTimer.get("5_min_rate").asDouble() != timer.getOneMinuteRate()) {
reason = "5 minute rate " + timer.getFiveMinuteRate();
return false;
}
if (jsonTimer.get("15_min_rate").asDouble() != timer.getFifteenMinuteRate()) {
reason = "15 minute rate " + timer.getFifteenMinuteRate();
return false;
}
if (jsonTimer.get("mean").asDouble() != nanoToMs(timer.getSnapshot().getMean())) {
reason = "mean " + timer.getSnapshot().getMean();
return false;
}
if (jsonTimer.get("min").asDouble() != nanoToMs(timer.getSnapshot().getMin())) {
reason = "min " + timer.getSnapshot().getMin();
return false;
}
if (jsonTimer.get("max").asDouble() != nanoToMs(timer.getSnapshot().getMax())) {
reason = "max " + timer.getSnapshot().getMax();
return false;
}
if (jsonTimer.get("stddev").asDouble() != nanoToMs(timer.getSnapshot().getStdDev())) {
reason = "stddev " + timer.getSnapshot().getStdDev();
return false;
}
}
return true;
}
@Override
public void describeTo(Description description) {
description.appendText(reason);
}
private double nanoToMs(double nano) {
return nano / 1_000_000D;
}
}
/**
* Factory to allocate an metric matcher.
*
* @param metric metric object we are looking for
* @return matcher
*/
private static MetricJsonMatcher matchesMetric(Metric metric) {
return new MetricJsonMatcher(metric);
}
}
| |
/*
* Copyright 2016 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.navercorp.pinpoint.test.classloader;
import com.navercorp.pinpoint.test.util.BytecodeUtils;
import java.io.InputStream;
import java.security.ProtectionDomain;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* copy & modify javassist.Loader
*
* @author Woonduk Kang(emeroad)
*/
public class TransformClassLoader extends ClassLoader {
private final Logger logger = Logger.getLogger(TransformClassLoader.class.getName());
private final ConcurrentMap<String, Object> lockMap = new ConcurrentHashMap<String, Object>();
private final Set<String> notDefinedClass = new CopyOnWriteArraySet<String>();
private final List<String> notDefinedPackages = new CopyOnWriteArrayList<String>();
private Translator translator;
private ProtectionDomain domain;
/**
* Specifies the algorithm of class loading.
* <p>
* <p>This class loader uses the parent class loader for
* <code>java.*</code> and <code>javax.*</code> classes.
* If this variable <code>doDelegation</code>
* is <code>false</code>, this class loader does not delegate those
* classes to the parent class loader.
* <p>
* <p>The default value is <code>true</code>.
*/
public boolean doDelegation = true;
/**
* Creates a new class loader.
*/
public TransformClassLoader() {
}
/**
* Creates a new class loader
* using the specified parent class loader for delegation.
*
* @param parent the parent class loader.
*/
public TransformClassLoader(ClassLoader parent) {
super(parent);
init();
}
private void init() {
translator = null;
domain = null;
delegateLoadingOf("com.navercorp.pinpoint.test.classloader.TransformClassLoader");
}
/**
* Adds a translator, which is called whenever a class is loaded.
*
* @param t a translator.
*/
public void addTranslator(Translator t) {
translator = t;
t.start();
}
/**
* Records a class so that the loading of that class is delegated
* to the parent class loader.
* <p>
* <p>If the given class name ends with <code>.</code> (dot), then
* that name is interpreted as a package name. All the classes
* in that package and the sub packages are delegated.
*/
public void delegateLoadingOf(String classname) {
if (classname.endsWith(".")) {
notDefinedPackages.add(classname);
}
else {
notDefinedClass.add(classname);
}
}
/**
* Sets the protection domain for the classes handled by this class
* loader. Without registering an appropriate protection domain,
* the program loaded by this loader will not work with a security
* manager or a signed jar file.
*/
public void setDomain(ProtectionDomain d) {
domain = d;
}
/**
* Requests the class loader to load a class.
*/
protected Class loadClass(String name, boolean resolve)
throws ClassFormatError, ClassNotFoundException {
synchronized (getClassLoadingLock(name)) {
Class<?> c = findLoadedClass(name);
if (c == null) {
c = loadClassByDelegation(name);
}
if (c == null) {
c = findClass(name);
}
if (c == null) {
c = delegateToParent(name);
}
if (resolve) {
resolveClass(c);
}
return c;
}
}
protected Object getClassLoadingLock(String className) {
final Object newLock = new Object();
final Object existLock = lockMap.putIfAbsent(className, newLock);
if (existLock != null) {
return existLock;
}
return newLock;
}
/**
* Finds the specified class using <code>ClassPath</code>.
* If the source throws an exception, this returns null.
* <p>
* <p>This method can be overridden by a subclass of
* <code>Loader</code>. Note that the overridden method must not throw
* an exception when it just fails to find a class file.
*
* @return null if the specified class could not be found.
* @throws ClassNotFoundException if an exception is thrown while
* obtaining a class file.
*/
protected Class<?> findClass(String name) throws ClassNotFoundException {
byte[] classfile;
try {
if (translator != null) {
try {
classfile = translator.transform(name);
} catch (ClassNotFoundException e) {
return null;
}
} else {
String jarname = "/" + name.replace('.', '/') + ".class";
InputStream in = this.getClass().getClassLoader().getResourceAsStream(jarname);
if (in == null) {
return null;
}
classfile = BytecodeUtils.readClass(in, true);
}
} catch (Exception e) {
throw new ClassNotFoundException("caught an exception while obtaining a class file for " + name, e);
}
final int i = name.lastIndexOf('.');
if (i != -1) {
String pname = name.substring(0, i);
if (getPackage(pname) == null)
try {
definePackage(pname, null, null, null, null, null, null, null);
} catch (IllegalArgumentException e) {
// ignore. maybe the package object for the same
// name has been created just right away.
}
}
if (domain == null) {
if (logger.isLoggable(Level.FINE)) {
this.logger.fine("defineClass:" + name);
}
return defineClass(name, classfile, 0, classfile.length);
}
else {
if (logger.isLoggable(Level.FINE)) {
this.logger.fine("defineClass:" + name);
}
return defineClass(name, classfile, 0, classfile.length, domain);
}
}
protected Class<?> loadClassByDelegation(String name)
throws ClassNotFoundException {
/* The swing components must be loaded by a system
* class loader.
* javax.swing.UIManager loads a (concrete) subclass
* of LookAndFeel by a system class loader and cast
* an instance of the class to LookAndFeel for
* (maybe) a security reason. To avoid failure of
* type conversion, LookAndFeel must not be loaded
* by this class loader.
*/
Class<?> c = null;
if (doDelegation) {
if (isJdkPackage(name) || notDelegated(name))
c = delegateToParent(name);
}
return c;
}
private boolean isJdkPackage(String name) {
return name.startsWith("java.")
|| name.startsWith("javax.")
|| name.startsWith("sun.")
|| name.startsWith("com.sun.")
|| name.startsWith("org.w3c.")
|| name.startsWith("org.xml.");
}
private boolean notDelegated(String name) {
if (notDefinedClass.contains(name)) {
return true;
}
for (String notDefinedPackage : notDefinedPackages) {
if (name.startsWith(notDefinedPackage)) {
return true;
}
}
return false;
}
protected Class<?> delegateToParent(String classname)
throws ClassNotFoundException {
ClassLoader cl = getParent();
if (cl != null) {
return cl.loadClass(classname);
} else {
return findSystemClass(classname);
}
}
}
| |
/*
* Copyright 2013-2014 Richard M. Hightower
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* __________ _____ __ .__
* \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____
* | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\
* | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ >
* |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ /
* \/ \/ \/ \/ \/ \//_____/
* ____. ___________ _____ ______________.___.
* | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | |
* | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | |
* /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ |
* \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______|
* \/ \/ \/ \/ \/ \/
*/
package io.advantageous.boon.collections;
import io.advantageous.boon.core.reflection.fields.FieldAccess;
import io.advantageous.boon.primitive.Lng;
import io.advantageous.boon.core.StringScanner;
import io.advantageous.boon.core.reflection.BeanUtils;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import static io.advantageous.boon.primitive.Lng.grow;
/**
* Holds primitive values in a list like object for long.
*
* <p>
* Has sum, mean, median, standardDeviation, reduceBy,
* variance.
* </p>
*
* @author Rick Hightower
*/
public class LongList extends AbstractList<Long> {
/**
* Values in this list.
*/
private long[] values;
/**
* Index of last value added.
*/
private int end;
/**
* Create a new list with this many items in it.
* @param capacity capacity
*/
public LongList(final int capacity) {
this.values = new long[capacity];
}
/**
* Create a new list with exactly 10 items in it.
*/
public LongList() {
this.values = new long[10];
}
/**
* Create a new list with this many items in it.
* @param values values
*/
public LongList(long values[]) {
this.values = values;
this.end = values.length;
}
/**
* Creates a primitive list based on an input list and a property path
*
* @param inputList input list
* @param propertyPath property path
* @return primitive list
*/
public static LongList toLongList(Collection<?> inputList, String propertyPath) {
if (inputList.size() == 0) {
return new LongList(0);
}
LongList outputList = new LongList(inputList.size());
if (propertyPath.contains(".") || propertyPath.contains("[")) {
String[] properties = StringScanner.splitByDelimiters(propertyPath, ".[]");
for (Object o : inputList) {
outputList.add(BeanUtils.getPropertyLong(o, properties));
}
} else {
Map<String, FieldAccess> fields = BeanUtils.getFieldsFromObject(inputList.iterator().next());
FieldAccess fieldAccess = fields.get(propertyPath);
for (Object o : inputList) {
outputList.add(fieldAccess.getLong(o));
}
}
return outputList;
}
/**
* Get the value at index
*
* @param index index
* @return value
*/
@Override
public Long get(int index) {
return values[index];
}
/**
* Get the value at index but don't use a wrapper
*
* @param index index
* @return value
*/
public final long getInt(int index) {
return values[index];
}
/**
* Add a new value to the list.
*
* @param integer new value
* @return was able to add.
*/
@Override
public boolean add(Long integer) {
if (end + 1 >= values.length) {
values = Lng.grow(values);
}
values[end] = integer;
end++;
return true;
}
/**
* Add a new value to the list but don't employ a wrapper.
*
* @param integer new value
* @return was able to add.
*/
public boolean addLong(long integer) {
if (end + 1 >= values.length) {
values = Lng.grow(values);
}
values[end] = integer;
end++;
return true;
}
/**
* Add a new value to the list but don't employ a wrapper.
*
* @param integer new value
* @return was able to add.
*/
public LongList add(long integer) {
if (end + 1 >= values.length) {
values = Lng.grow(values);
}
values[end] = integer;
end++;
return this;
}
/**
* Add a new array to the list.
*
* @param newValues new values
* @return was able to add.
*/
public boolean addArray(long... newValues) {
if (end + newValues.length >= values.length) {
values = Lng.grow(values, (values.length + newValues.length) * 2);
}
System.arraycopy(newValues, 0, values, end, newValues.length);
end += newValues.length;
return true;
}
/**
* Set a value in the list.
*
* @param index index
* @param element new value
* @return old value at this index
*/
@Override
public Long set(int index, Long element) {
long oldValue = values[index];
values[index] = element;
return oldValue;
}
/**
* Set in a new value no wrapper
*
* @param index index
* @param element new value
* @return old value at this index
*/
public long setLong(int index, int element) {
long oldValue = values[index];
values[index] = element;
return oldValue;
}
/**
* Return the current size.
*
* @return size
*/
@Override
public int size() {
return end;
}
/**
* Sums the values with bounds checking.
* @return sum
*/
public long sum() {
return Lng.sum(values, end);
}
/**
* Get a copy of the array up to the end element.
*
* @return array
*/
public long[] toValueArray() {
return java.util.Arrays.copyOfRange(values, 0, end);
}
/**
* This would be a good opportunity to reintroduce dynamic invoke
*
* @param function function
* @return array
*/
public long reduceBy(Object function) {
return Lng.reduceBy(values, end, function);
}
/**
* This would be a good opportunity to reintroduce dynamic invoke
*
* @param function function
* @param name name
* @return result
*/
public long reduceBy(Object function, String name) {
return Lng.reduceBy(values, end, function, name);
}
/**
* @param reduceBy reduceBy function
* @return the reduction
*/
public long reduceBy(Lng.ReduceBy reduceBy) {
return Lng.reduceBy(values, end, reduceBy);
}
/**
* Mean
*
* @return mean
*/
public long mean() {
return Lng.mean(values, end);
}
/**
* standardDeviation
*
* @return standardDeviation
*/
public long standardDeviation() {
return Lng.standardDeviation(values, end);
}
/**
* variance
*
* @return variance
*/
public long variance() {
return Lng.variance(values, end);
}
/**
* max
*
* @return max
*/
public long max() {
return Lng.max(values, end);
}
/**
* min
*
* @return min
*/
public long min() {
return Lng.min(values, end);
}
/**
* median
*
* @return median
*/
public long median() {
return Lng.median(values, end);
}
/**
* sort
*
*/
public void sort() {
Arrays.sort(values, 0, end);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LongList longs = (LongList) o;
if (end != longs.end) return false;
if (!Lng.equals(0, end, values, longs.values)) return false;
return true;
}
@Override
public int hashCode() {
int result = 131313;
result = 31 * result + (values != null ? Lng.hashCode(0, end, values) : 0);
result = 31 * result + end;
return result;
}
public void clear() {
this.values = new long[10];
this.end = 0;
}
}
| |
/*
* Copyright 2014-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.packet;
import com.google.common.net.InetAddresses;
import com.google.common.testing.EqualsTester;
import org.junit.Test;
import java.net.InetAddress;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.onlab.junit.ImmutableClassChecker.assertThatClassIsImmutable;
/**
* Tests for class {@link Ip6Address}.
*/
public class Ip6AddressTest {
/**
* Tests the immutability of {@link Ip6Address}.
*/
@Test
public void testImmutable() {
assertThatClassIsImmutable(Ip6Address.class);
}
/**
* Tests the IPv4 address version constant.
*/
@Test
public void testAddressVersion() {
assertThat(Ip6Address.VERSION, is(IpAddress.Version.INET6));
}
/**
* Tests the length of the address in bytes (octets).
*/
@Test
public void testAddrByteLength() {
assertThat(Ip6Address.BYTE_LENGTH, is(16));
}
/**
* Tests the length of the address in bits.
*/
@Test
public void testAddrBitLength() {
assertThat(Ip6Address.BIT_LENGTH, is(128));
}
/**
* Tests returning the IP address version.
*/
@Test
public void testVersion() {
IpAddress ipAddress;
// IPv6
ipAddress = IpAddress.valueOf("::");
assertThat(ipAddress.version(), is(IpAddress.Version.INET6));
}
/**
* Tests returning an IPv6 address as a byte array.
*/
@Test
public void testAddressToOctetsIPv6() {
Ip6Address ipAddress;
byte[] value;
value = new byte[] {0x11, 0x11, 0x22, 0x22,
0x33, 0x33, 0x44, 0x44,
0x55, 0x55, 0x66, 0x66,
0x77, 0x77,
(byte) 0x88, (byte) 0x88};
ipAddress =
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888");
assertThat(ipAddress.toOctets(), is(value));
value = new byte[] {0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00};
ipAddress = Ip6Address.valueOf("::");
assertThat(ipAddress.toOctets(), is(value));
value = new byte[] {(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff};
ipAddress =
Ip6Address.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
assertThat(ipAddress.toOctets(), is(value));
}
/**
* Tests valueOf() converter for IPv6 byte array.
*/
@Test
public void testValueOfByteArrayIPv6() {
Ip6Address ipAddress;
byte[] value;
value = new byte[] {0x11, 0x11, 0x22, 0x22,
0x33, 0x33, 0x44, 0x44,
0x55, 0x55, 0x66, 0x66,
0x77, 0x77,
(byte) 0x88, (byte) 0x88};
ipAddress = Ip6Address.valueOf(value);
assertThat(ipAddress.toString(),
is("1111:2222:3333:4444:5555:6666:7777:8888"));
value = new byte[] {0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00};
ipAddress = Ip6Address.valueOf(value);
assertThat(ipAddress.toString(), is("::"));
value = new byte[] {(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff};
ipAddress = Ip6Address.valueOf(value);
assertThat(ipAddress.toString(),
is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"));
}
/**
* Tests invalid valueOf() converter for a null array for IPv6.
*/
@Test(expected = NullPointerException.class)
public void testInvalidValueOfNullArrayIPv6() {
Ip6Address ipAddress;
byte[] value;
value = null;
ipAddress = Ip6Address.valueOf(value);
}
/**
* Tests invalid valueOf() converger for an array that is too short for
* IPv6.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidValueOfShortArrayIPv6() {
Ip6Address ipAddress;
byte[] value;
value = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9};
ipAddress = Ip6Address.valueOf(value);
}
/**
* Tests valueOf() converter for IPv6 byte array and an offset.
*/
@Test
public void testValueOfByteArrayOffsetIPv6() {
Ip6Address ipAddress;
byte[] value;
value = new byte[] {11, 22, 33, // Preamble
0x11, 0x11, 0x22, 0x22,
0x33, 0x33, 0x44, 0x44,
0x55, 0x55, 0x66, 0x66,
0x77, 0x77,
(byte) 0x88, (byte) 0x88,
44, 55}; // Extra bytes
ipAddress = Ip6Address.valueOf(value, 3);
assertThat(ipAddress.toString(),
is("1111:2222:3333:4444:5555:6666:7777:8888"));
value = new byte[] {11, 22, // Preamble
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
33}; // Extra bytes
ipAddress = Ip6Address.valueOf(value, 2);
assertThat(ipAddress.toString(), is("::"));
value = new byte[] {11, 22, // Preamble
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff,
33}; // Extra bytes
ipAddress = Ip6Address.valueOf(value, 2);
assertThat(ipAddress.toString(),
is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"));
}
/**
* Tests invalid valueOf() converger for an array and an invalid offset
* for IPv6.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidValueOfArrayInvalidOffsetIPv6() {
Ip6Address ipAddress;
byte[] value;
value = new byte[] {11, 22, 33, // Preamble
0x11, 0x11, 0x22, 0x22,
0x33, 0x33, 0x44, 0x44,
0x55, 0x55, 0x66, 0x66,
0x77, 0x77,
(byte) 0x88, (byte) 0x88,
44, 55}; // Extra bytes
ipAddress = Ip6Address.valueOf(value, 6);
}
/**
* Tests valueOf() converter for IPv6 InetAddress.
*/
@Test
public void testValueOfInetAddressIPv6() {
Ip6Address ipAddress;
InetAddress inetAddress;
inetAddress =
InetAddresses.forString("1111:2222:3333:4444:5555:6666:7777:8888");
ipAddress = Ip6Address.valueOf(inetAddress);
assertThat(ipAddress.toString(),
is("1111:2222:3333:4444:5555:6666:7777:8888"));
inetAddress = InetAddresses.forString("::");
ipAddress = Ip6Address.valueOf(inetAddress);
assertThat(ipAddress.toString(), is("::"));
inetAddress =
InetAddresses.forString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
ipAddress = Ip6Address.valueOf(inetAddress);
assertThat(ipAddress.toString(),
is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"));
}
/**
* Tests valueOf() converter for IPv6 string.
*/
@Test
public void testValueOfStringIPv6() {
Ip6Address ipAddress;
ipAddress =
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888");
assertThat(ipAddress.toString(),
is("1111:2222:3333:4444:5555:6666:7777:8888"));
ipAddress = Ip6Address.valueOf("::");
assertThat(ipAddress.toString(), is("::"));
ipAddress =
Ip6Address.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
assertThat(ipAddress.toString(),
is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"));
}
/**
* Tests invalid valueOf() converter for a null string.
*/
@Test(expected = NullPointerException.class)
public void testInvalidValueOfNullString() {
Ip6Address ipAddress;
String fromString = null;
ipAddress = Ip6Address.valueOf(fromString);
}
/**
* Tests invalid valueOf() converter for an empty string.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidValueOfEmptyString() {
Ip6Address ipAddress;
String fromString = "";
ipAddress = Ip6Address.valueOf(fromString);
}
/**
* Tests invalid valueOf() converter for an incorrect string.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidValueOfIncorrectString() {
Ip6Address ipAddress;
String fromString = "NoSuchIpAddress";
ipAddress = Ip6Address.valueOf(fromString);
}
/**
* Tests making a mask prefix for a given prefix length for IPv6.
*/
@Test
public void testMakeMaskPrefixIPv6() {
Ip6Address ipAddress;
ipAddress = Ip6Address.makeMaskPrefix(8);
assertThat(ipAddress.toString(), is("ff00::"));
ipAddress = Ip6Address.makeMaskPrefix(120);
assertThat(ipAddress.toString(),
is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00"));
ipAddress = Ip6Address.makeMaskPrefix(0);
assertThat(ipAddress.toString(), is("::"));
ipAddress = Ip6Address.makeMaskPrefix(128);
assertThat(ipAddress.toString(),
is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"));
ipAddress = Ip6Address.makeMaskPrefix(64);
assertThat(ipAddress.toString(), is("ffff:ffff:ffff:ffff::"));
}
/**
* Tests making a mask prefix for an invalid prefix length for IPv6:
* negative prefix length.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidMakeNegativeMaskPrefixIPv6() {
Ip6Address ipAddress;
ipAddress = Ip6Address.makeMaskPrefix(-1);
}
/**
* Tests making a mask prefix for an invalid prefix length for IPv6:
* too long prefix length.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidMakeTooLongMaskPrefixIPv6() {
Ip6Address ipAddress;
ipAddress = Ip6Address.makeMaskPrefix(129);
}
/**
* Tests making of a masked address for IPv6.
*/
@Test
public void testMakeMaskedAddressIPv6() {
Ip6Address ipAddress =
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8885");
Ip6Address ipAddressMasked;
ipAddressMasked = Ip6Address.makeMaskedAddress(ipAddress, 8);
assertThat(ipAddressMasked.toString(), is("1100::"));
ipAddressMasked = Ip6Address.makeMaskedAddress(ipAddress, 120);
assertThat(ipAddressMasked.toString(),
is("1111:2222:3333:4444:5555:6666:7777:8800"));
ipAddressMasked = Ip6Address.makeMaskedAddress(ipAddress, 0);
assertThat(ipAddressMasked.toString(), is("::"));
ipAddressMasked = Ip6Address.makeMaskedAddress(ipAddress, 128);
assertThat(ipAddressMasked.toString(),
is("1111:2222:3333:4444:5555:6666:7777:8885"));
ipAddressMasked = Ip6Address.makeMaskedAddress(ipAddress, 64);
assertThat(ipAddressMasked.toString(), is("1111:2222:3333:4444::"));
}
/**
* Tests making of a masked address for invalid prefix length for IPv6:
* negative prefix length.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidMakeNegativeMaskedAddressIPv6() {
Ip6Address ipAddress =
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8885");
Ip6Address ipAddressMasked;
ipAddressMasked = Ip6Address.makeMaskedAddress(ipAddress, -1);
}
/**
* Tests making of a masked address for an invalid prefix length for IPv6:
* too long prefix length.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidMakeTooLongMaskedAddressIPv6() {
Ip6Address ipAddress =
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8885");
Ip6Address ipAddressMasked;
ipAddressMasked = Ip6Address.makeMaskedAddress(ipAddress, 129);
}
/**
* Tests comparison of {@link Ip6Address} for IPv6.
*/
@Test
public void testComparisonIPv6() {
Ip6Address addr1, addr2, addr3, addr4;
addr1 = Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888");
addr2 = Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888");
addr3 = Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8887");
addr4 = Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8889");
assertTrue(addr1.compareTo(addr2) == 0);
assertTrue(addr1.compareTo(addr3) > 0);
assertTrue(addr1.compareTo(addr4) < 0);
addr1 = Ip6Address.valueOf("ffff:2222:3333:4444:5555:6666:7777:8888");
addr2 = Ip6Address.valueOf("ffff:2222:3333:4444:5555:6666:7777:8888");
addr3 = Ip6Address.valueOf("ffff:2222:3333:4444:5555:6666:7777:8887");
addr4 = Ip6Address.valueOf("ffff:2222:3333:4444:5555:6666:7777:8889");
assertTrue(addr1.compareTo(addr2) == 0);
assertTrue(addr1.compareTo(addr3) > 0);
assertTrue(addr1.compareTo(addr4) < 0);
addr1 = Ip6Address.valueOf("ffff:2222:3333:4444:5555:6666:7777:8888");
addr2 = Ip6Address.valueOf("ffff:2222:3333:4444:5555:6666:7777:8888");
addr3 = Ip6Address.valueOf("ffff:2222:3333:4443:5555:6666:7777:8888");
addr4 = Ip6Address.valueOf("ffff:2222:3333:4445:5555:6666:7777:8888");
assertTrue(addr1.compareTo(addr2) == 0);
assertTrue(addr1.compareTo(addr3) > 0);
assertTrue(addr1.compareTo(addr4) < 0);
}
/**
* Tests equality of {@link Ip6Address} for IPv6.
*/
@Test
public void testEqualityIPv6() {
new EqualsTester()
.addEqualityGroup(
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888"),
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888"))
.addEqualityGroup(
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:888a"),
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:888a"))
.addEqualityGroup(
Ip6Address.valueOf("::"),
Ip6Address.valueOf("::"))
.addEqualityGroup(
Ip6Address.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"),
Ip6Address.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"))
.testEquals();
}
/**
* Tests object string representation for IPv6.
*/
@Test
public void testToStringIPv6() {
Ip6Address ipAddress;
ipAddress =
Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888");
assertThat(ipAddress.toString(),
is("1111:2222:3333:4444:5555:6666:7777:8888"));
ipAddress = Ip6Address.valueOf("1111::8888");
assertThat(ipAddress.toString(), is("1111::8888"));
ipAddress = Ip6Address.valueOf("1111::");
assertThat(ipAddress.toString(), is("1111::"));
ipAddress = Ip6Address.valueOf("::8888");
assertThat(ipAddress.toString(), is("::8888"));
ipAddress = Ip6Address.valueOf("::");
assertThat(ipAddress.toString(), is("::"));
ipAddress =
Ip6Address.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
assertThat(ipAddress.toString(),
is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.lucene;
import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.INDEX_NAME;
import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.REGION_NAME;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import java.util.Set;
import java.util.stream.IntStream;
import junitparams.Parameters;
import org.apache.logging.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.control.RebalanceFactory;
import org.apache.geode.cache.control.RebalanceOperation;
import org.apache.geode.cache.control.RebalanceResults;
import org.apache.geode.cache.control.ResourceManager;
import org.apache.geode.cache.lucene.internal.LuceneIndexFactoryImpl;
import org.apache.geode.cache.lucene.internal.LuceneServiceImpl;
import org.apache.geode.cache.partition.PartitionMemberInfo;
import org.apache.geode.cache.partition.PartitionRebalanceInfo;
import org.apache.geode.cache.partition.PartitionRegionInfo;
import org.apache.geode.logging.internal.log4j.api.LogService;
import org.apache.geode.test.dunit.AsyncInvocation;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.SerializableRunnableIF;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.junit.categories.LuceneTest;
import org.apache.geode.test.junit.runners.GeodeParamsRunner;
@Category({LuceneTest.class})
@RunWith(GeodeParamsRunner.class)
public class RebalanceWithRedundancyWithRegionCreatedBeforeReindexDUnitTest
extends LuceneQueriesAccessorBase {
private static final Logger logger = LogService.getLogger();
protected VM dataStore3;
protected VM dataStore4;
@Override
public void postSetUp() throws Exception {
super.postSetUp();
dataStore3 = Host.getHost(0).getVM(2);
dataStore4 = Host.getHost(0).getVM(3);
}
@Before
public void setNumBuckets() {
NUM_BUCKETS = 113;
}
@Before
public void setLuceneReindexFlag() {
dataStore1.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = true);
dataStore2.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = true);
dataStore3.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = true);
dataStore4.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = true);
}
@Override
protected RegionTestableType[] getListOfRegionTestTypes() {
return new RegionTestableType[] {RegionTestableType.PARTITION,
RegionTestableType.PARTITION_REDUNDANT};
}
@After
public void clearLuceneReindexFlag() {
dataStore1.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = false);
dataStore2.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = false);
dataStore3.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = false);
dataStore4.invoke(() -> LuceneServiceImpl.LUCENE_REINDEX = false);
}
protected SerializableRunnable createIndex = new SerializableRunnable("createIndex") {
@Override
public void run() {
LuceneService luceneService = LuceneServiceProvider.get(getCache());
((LuceneIndexFactoryImpl) luceneService.createIndexFactory()).addField("text")
.create(INDEX_NAME, REGION_NAME, LuceneServiceImpl.LUCENE_REINDEX);
}
};
protected SerializableRunnable rebalance = new SerializableRunnable("rebalance") {
@Override
public void run() throws InterruptedException {
Cache cache = getCache();
cache.getRegion(REGION_NAME);
ResourceManager resMan = cache.getResourceManager();
RebalanceFactory factory = resMan.createRebalanceFactory();
logger.info("Starting rebalance");
RebalanceResults rebalanceResults = null;
RebalanceOperation rebalanceOp = factory.start();
rebalanceResults = rebalanceOp.getResults();
await().until(rebalanceOp::isDone);
logger.info("Rebalance completed: "
+ RebalanceResultsToString(rebalanceResults, "Rebalance completed"));
}
};
protected SerializableRunnable doConcOps = new SerializableRunnable("doConcOps") {
@Override
public void run() {
putEntryInEachBucket(113);
}
};
protected void createIndexAndRebalance(RegionTestableType regionTestType,
SerializableRunnableIF createIndex, boolean doOps) throws Exception {
// give rebalance some work to do by adding another vm
// dataStore4.invoke(() -> (createIndex));
dataStore4.invoke(() -> initDataStore(regionTestType));
AsyncInvocation aiRebalancer = dataStore1.invokeAsync(rebalance);
if (doOps) {
AsyncInvocation aiConcOps = dataStore1.invokeAsync(doConcOps);
aiConcOps.join();
aiConcOps.checkException();
}
// re-index stored data
AsyncInvocation ai1 = dataStore1.invokeAsync(createIndex);
AsyncInvocation ai2 = dataStore2.invokeAsync(createIndex);
AsyncInvocation ai3 = dataStore3.invokeAsync(createIndex);
AsyncInvocation ai4 = dataStore4.invokeAsync(createIndex);
aiRebalancer.join();
aiRebalancer.checkException();
ai1.join();
ai2.join();
ai3.join();
ai4.join();
ai1.checkException();
ai2.checkException();
ai3.checkException();
ai4.checkException();
}
@Test
@Parameters(method = "getListOfRegionTestTypes")
public void returnCorrectResultsWithConcurrentOpsAndRebalance(RegionTestableType regionTestType)
throws Exception {
createAndPopulateRegion(regionTestType, NUM_BUCKETS / 2);
createIndexAndRebalance(regionTestType, createIndex, true);
waitForFlushBeforeExecuteTextSearch(dataStore3, 60000);
executeTextSearch(dataStore3, "world", "text", NUM_BUCKETS);
}
private void createAndPopulateRegion(RegionTestableType regionTestType, int numEntries) {
dataStore1.invoke(() -> initDataStore(regionTestType));
dataStore2.invoke(() -> initDataStore(regionTestType));
dataStore3.invoke(() -> initDataStore(regionTestType));
putEntryInEachBucket(numEntries);
}
protected void putEntryInEachBucket(int numBuckets) {
dataStore3.invoke(() -> {
final Cache cache = getCache();
Region<Object, Object> region = cache.getRegion(REGION_NAME);
IntStream.range(0, numBuckets).forEach(i -> region.put(i, new TestObject("hello world")));
});
}
public static String RebalanceResultsToString(RebalanceResults results, String title) {
if (results == null) {
return "null";
}
StringBuilder aStr = new StringBuilder();
aStr.append("Rebalance results (" + title + ") totalTime: "
+ valueToString(results.getTotalTime()) + "\n");
// bucketCreates
aStr.append(
"totalBucketCreatesCompleted: " + valueToString(results.getTotalBucketCreatesCompleted()));
aStr.append(" totalBucketCreateBytes: " + valueToString(results.getTotalBucketCreateBytes()));
aStr.append(
" totalBucketCreateTime: " + valueToString(results.getTotalBucketCreateTime()) + "\n");
// bucketTransfers
aStr.append("totalBucketTransfersCompleted: "
+ valueToString(results.getTotalBucketTransfersCompleted()));
aStr.append(
" totalBucketTransferBytes: " + valueToString(results.getTotalBucketTransferBytes()));
aStr.append(
" totalBucketTransferTime: " + valueToString(results.getTotalBucketTransferTime()) + "\n");
// primaryTransfers
aStr.append("totalPrimaryTransfersCompleted: "
+ valueToString(results.getTotalPrimaryTransfersCompleted()));
aStr.append(" totalPrimaryTransferTime: " + valueToString(results.getTotalPrimaryTransferTime())
+ "\n");
// PartitionRebalanceDetails (per region)
Set<PartitionRebalanceInfo> prdSet = results.getPartitionRebalanceDetails();
for (PartitionRebalanceInfo prd : prdSet) {
aStr.append(partitionRebalanceDetailsToString(prd));
}
aStr.append("total time (ms): " + valueToString(results.getTotalTime()));
String returnStr = aStr.toString();
return returnStr;
}
private static String partitionRebalanceDetailsToString(PartitionRebalanceInfo details) {
if (details == null) {
return "null\n";
}
StringBuilder aStr = new StringBuilder();
aStr.append("PartitionedRegionDetails for region named " + getRegionName(details) + " time: "
+ valueToString(details.getTime()) + "\n");
// bucketCreates
aStr.append("bucketCreatesCompleted: " + valueToString(details.getBucketCreatesCompleted()));
aStr.append(" bucketCreateBytes: " + valueToString(details.getBucketCreateBytes()));
aStr.append(" bucketCreateTime: " + valueToString(details.getBucketCreateTime()) + "\n");
// bucketTransfers
aStr.append(
"bucketTransfersCompleted: " + valueToString(details.getBucketTransfersCompleted()));
aStr.append(" bucketTransferBytes: " + valueToString(details.getBucketTransferBytes()));
aStr.append(" bucketTransferTime: " + valueToString(details.getBucketTransferTime()) + "\n");
// primaryTransfers
aStr.append(
"PrimaryTransfersCompleted: " + valueToString(details.getPrimaryTransfersCompleted()));
aStr.append(" PrimaryTransferTime: " + valueToString(details.getPrimaryTransferTime()) + "\n");
// PartitionMemberDetails (before)
aStr.append("PartitionedMemberDetails (before)\n");
Set<PartitionMemberInfo> pmdSet = details.getPartitionMemberDetailsBefore();
for (PartitionMemberInfo pmd : pmdSet) {
aStr.append(partitionMemberDetailsToString(pmd));
}
// PartitionMemberDetails (after)
aStr.append("PartitionedMemberDetails (after)\n");
pmdSet = details.getPartitionMemberDetailsAfter();
for (PartitionMemberInfo pmd : pmdSet) {
aStr.append(partitionMemberDetailsToString(pmd));
}
return aStr.toString();
}
public static String partitionedRegionDetailsToString(PartitionRegionInfo prd) {
if (prd == null) {
return "null\n";
}
StringBuilder aStr = new StringBuilder();
aStr.append("PartitionedRegionDetails for region named " + getRegionName(prd) + "\n");
aStr.append(" configuredBucketCount: " + valueToString(prd.getConfiguredBucketCount()) + "\n");
aStr.append(" createdBucketCount: " + valueToString(prd.getCreatedBucketCount()) + "\n");
aStr.append(
" lowRedundancyBucketCount: " + valueToString(prd.getLowRedundancyBucketCount()) + "\n");
aStr.append(
" configuredRedundantCopies: " + valueToString(prd.getConfiguredRedundantCopies()) + "\n");
aStr.append(" actualRedundantCopies: " + valueToString(prd.getActualRedundantCopies()) + "\n");
// memberDetails
Set<PartitionMemberInfo> pmd = prd.getPartitionMemberInfo();
for (PartitionMemberInfo memberDetails : pmd) {
aStr.append(partitionMemberDetailsToString(memberDetails));
}
// colocatedWithDetails
String colocatedWith = prd.getColocatedWith();
aStr.append(" colocatedWith: " + colocatedWith + "\n");
String returnStr = aStr.toString();
return returnStr;
}
private static String partitionMemberDetailsToString(PartitionMemberInfo pmd) {
StringBuilder aStr = new StringBuilder();
long localMaxMemory = pmd.getConfiguredMaxMemory();
long size = pmd.getSize();
aStr.append(" Member Details for: " + pmd.getDistributedMember() + "\n");
aStr.append(" configuredMaxMemory: " + valueToString(localMaxMemory));
double inUse = (double) size / localMaxMemory;
double heapUtilization = inUse * 100;
aStr.append(" size: " + size + " (" + valueToString(heapUtilization) + "%)");
aStr.append(" bucketCount: " + valueToString(pmd.getBucketCount()));
aStr.append(" primaryCount: " + valueToString(pmd.getPrimaryCount()) + "\n");
return aStr.toString();
}
/**
* Convert the given long to a String; if it is negative then flag it in the string
*/
private static String valueToString(long value) {
String returnStr = "" + value;
return returnStr;
}
/**
* Convert the given double to a String; if it is negative then flag it in the string
*/
private static String valueToString(double value) {
String returnStr = "" + value;
return returnStr;
}
public static String getRegionName(PartitionRegionInfo prd) {
return prd.getRegionPath().substring(1);
}
public static String getRegionName(PartitionRebalanceInfo prd) {
return prd.getRegionPath().substring(1);
}
}
| |
package org.twitpulse.sentiment.classifier.baseline;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
import org.twitpulse.sentiment.TwitterSentimentExample;
import org.twitpulse.sentiment.TwitterSentimentPrediction;
import org.twitpulse.sentiment.classifier.TwitterSentimentClassifier;
import org.twitpulse.sentiment.classifier.baseline.pipeline.TwitterSentimentExample2BaselineFeatureTokens;
import org.twitpulse.sentiment.classifier.baseline.pipeline.TwitterSentimentExampleIterator;
import cc.mallet.classify.Classification;
import cc.mallet.classify.Classifier;
import cc.mallet.classify.MaxEntTrainer;
import cc.mallet.classify.Trial;
import cc.mallet.pipe.FeatureSequence2FeatureVector;
import cc.mallet.pipe.Pipe;
import cc.mallet.pipe.SerialPipes;
import cc.mallet.pipe.Target2Label;
import cc.mallet.pipe.TokenSequence2FeatureSequence;
import cc.mallet.types.Alphabet;
import cc.mallet.types.InstanceList;
import cc.mallet.types.LabelAlphabet;
import cc.mallet.util.Randoms;
/**
* Baseline twitter sentiment predictor.
*
* @author kevd1337
*
*/
public class BaselineSentimentPredictor implements TwitterSentimentClassifier {
private static final int DEFAULT_MAX_TRAINING_ROUNDS = 50;
private Double confidenceThreshold;
private Classifier underlyingClassifier;
/**
* Constructor
*/
public BaselineSentimentPredictor() {
this(null);
}
/**
* Constructor
*
* @param confidenceThreshold
* minimum confidence required for a definitive sentiment prediction, otherwise prediction will be set to
* neutral
*/
public BaselineSentimentPredictor(Double confidenceThreshold) {
this.confidenceThreshold = confidenceThreshold;
}
@Override
public void train(List<TwitterSentimentExample> trainingExamples) {
InstanceList instances = new InstanceList(buildTrainPipe());
instances.addThruPipe(new TwitterSentimentExampleIterator(trainingExamples));
System.out.println("Setting up training and testing data set splits...");
InstanceList[] instanceLists = instances.split(new Randoms(), new double[] { 0.90, 0.10, 0.0 });
LabelAlphabet labelAlpha = instances.get(0).getLabeling().getLabelAlphabet();
System.out.println("Starting Training...");
MaxEntTrainer trainer = new MaxEntTrainer();
Classifier classifier = trainer.train(instanceLists[0], DEFAULT_MAX_TRAINING_ROUNDS);
Trial trainingTrial = new Trial(classifier, instanceLists[0]);
System.out.println("\nTraining evalution: ");
System.out.println("============================");
System.out.println("Overall accuracy:" + trainingTrial.getAccuracy());
for (Object labelEntry : labelAlpha.toArray()) {
System.out.println("f1(" + labelEntry + "): " + trainingTrial.getF1(labelEntry));
System.out.println("recall(" + labelEntry + "): " + trainingTrial.getRecall(labelEntry));
System.out.println("precision(" + labelEntry + "): " + trainingTrial.getPrecision(labelEntry));
}
Trial testTrial = new Trial(classifier, instanceLists[1]);
System.out.println("\nTesting evaluation:");
System.out.println("============================");
System.out.println("Overall accuracy:" + testTrial.getAccuracy());
for (Object labelEntry : labelAlpha.toArray()) {
System.out.println("f1(" + labelEntry + "): " + testTrial.getF1(labelEntry));
System.out.println("recall(" + labelEntry + "): " + testTrial.getRecall(labelEntry));
System.out.println("precision(" + labelEntry + "): " + testTrial.getPrecision(labelEntry));
}
System.out.println("\nTraining Done.");
this.underlyingClassifier = classifier;
}
@Override
public List<TwitterSentimentPrediction> classify(List<TwitterSentimentExample> examples) {
if (underlyingClassifier == null) {
throw new IllegalStateException("Predictor has not been trained yet; cannot classify.");
}
synchronized (underlyingClassifier) {
InstanceList instances = new InstanceList(buildClassifyPipe(underlyingClassifier.getAlphabet()));
instances.addThruPipe(new TwitterSentimentExampleIterator(examples));
ArrayList<Classification> classificationResults = underlyingClassifier.classify(instances);
List<TwitterSentimentPrediction> predictions = new ArrayList<TwitterSentimentPrediction>();
int i = 0;
for (Classification classification : classificationResults) {
Long tweetId = examples.get(i).getTweetId();
String tweet = examples.get(i).getTweet();
Integer sentiment = Integer.parseInt(classification.getLabelVector().getBestLabel().toString());
Double confidence = classification.getLabelVector().getBestValue();
TwitterSentimentPrediction prediction = null;
if (sentiment != 2 && confidenceThreshold != null && confidence < confidenceThreshold) {
sentiment = 2;
confidence = 1.0;
}
prediction = new TwitterSentimentPrediction(tweetId, tweet, sentiment, confidence);
predictions.add(prediction);
i++;
}
return predictions;
}
}
/**
* Saves model for predictor to file; note that threshold isn't saved
*
* @param targetFilePath
* target file path for model
*
* @throws FileNotFoundException
* @throws IOException
*/
public void saveModel(String targetFilePath) throws FileNotFoundException, IOException {
if (underlyingClassifier == null) {
throw new IllegalStateException("Predictor has not been trained yet; cannot be saved.");
}
synchronized (underlyingClassifier) {
saveClassifier(underlyingClassifier, targetFilePath);
}
}
/**
* Loads model for predictor; note that threshold isn't set in the model
*
* @param modelFilePath
* path to model file
*
* @throws FileNotFoundException
* @throws ClassNotFoundException
* @throws IOException
* pipeList.add(new PrintInputAndTarget());
*/
public void loadModel(String modelFilePath) throws FileNotFoundException, ClassNotFoundException, IOException {
if (underlyingClassifier != null) {
synchronized (underlyingClassifier) {
this.underlyingClassifier = loadClassifier(modelFilePath);
}
} else {
this.underlyingClassifier = loadClassifier(modelFilePath);
}
}
/**
* Helper that builds training mallet pipeline
*
* @return mallet pipeline
*/
private static Pipe buildTrainPipe() {
List<Pipe> pipeList = new ArrayList<Pipe>();
pipeList.add(new TwitterSentimentExample2BaselineFeatureTokens());
pipeList.add(new TokenSequence2FeatureSequence());
pipeList.add(new Target2Label());
pipeList.add(new FeatureSequence2FeatureVector());
// pipeList.add(new PrintInputAndTarget());
return new SerialPipes(pipeList);
}
/**
* Helper that builds classifying mallet pipeline
*
* @param dataAlphabet
* feature vocabulary for training data set
*
* @return mallet pipeline
*/
private static Pipe buildClassifyPipe(Alphabet dataAlphabet) {
List<Pipe> pipeList = new ArrayList<Pipe>();
pipeList.add(new TwitterSentimentExample2BaselineFeatureTokens());
pipeList.add(new TokenSequence2FeatureSequence(dataAlphabet));
pipeList.add(new FeatureSequence2FeatureVector());
return new SerialPipes(pipeList);
}
/**
* Helper that saves underlying classifier
*
* @param classifier
* classifier to save
* @param modelFilePath
* target model file path
*
* }
* @throws FileNotFoundException
* @throws IOException
*/
private static void saveClassifier(Classifier classifier, String modelFilePath) throws FileNotFoundException,
IOException {
ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(new File(modelFilePath)));
oos.writeObject(classifier);
oos.close();
}
/**
* Helper that loads a new underlying classifier
*
* @param modelFilePath
* model file path
*
* @return classifier requested
*
* @throws FileNotFoundException
* @throws IOException
* @throws ClassNotFoundException
*/
private static Classifier loadClassifier(String modelFilePath) throws FileNotFoundException, IOException,
ClassNotFoundException {
ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(modelFilePath)));
Classifier classifier = (Classifier) ois.readObject();
ois.close();
return classifier;
}
}
| |
/*
* Licensed to the University Corporation for Advanced Internet Development,
* Inc. (UCAID) under one or more contributor license agreements. See the
* NOTICE file distributed with this work for additional information regarding
* copyright ownership. The UCAID licenses this file to You under the Apache
* License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opensaml.saml1.binding.decoding;
import java.util.List;
import javax.xml.namespace.QName;
import org.opensaml.common.SAMLObject;
import org.opensaml.common.binding.SAMLMessageContext;
import org.opensaml.common.binding.artifact.SAMLArtifactMap;
import org.opensaml.common.xml.SAMLConstants;
import org.opensaml.ws.message.MessageContext;
import org.opensaml.ws.message.decoder.MessageDecodingException;
import org.opensaml.ws.soap.soap11.Envelope;
import org.opensaml.ws.soap.soap11.Header;
import org.opensaml.ws.transport.http.HTTPInTransport;
import org.opensaml.xml.AttributeExtensibleXMLObject;
import org.opensaml.xml.XMLObject;
import org.opensaml.xml.parse.ParserPool;
import org.opensaml.xml.util.DatatypeHelper;
import org.opensaml.xml.util.LazyList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* SAML 1.1 HTTP SOAP 1.1 binding decoder.
*/
public class HTTPSOAP11Decoder extends BaseSAML1MessageDecoder {
/** Class logger. */
private final Logger log = LoggerFactory.getLogger(HTTPSOAP11Decoder.class);
/** QNames of understood SOAP headers. */
private List<QName> understoodHeaders;
/** QName of SOAP mustUnderstand header attribute. */
private final QName soapMustUnderstand = new QName(SAMLConstants.SOAP11ENV_NS, "mustUnderstand");
/** Constructor. */
public HTTPSOAP11Decoder() {
super();
understoodHeaders = new LazyList<QName>();
}
/**
* Constructor.
*
* @param pool parser pool used to deserialize messages
*/
public HTTPSOAP11Decoder(ParserPool pool) {
super(pool);
understoodHeaders = new LazyList<QName>();
}
/**
* Constructor.
*
* @param map Artifact to SAML map
*
* @deprecated
*/
public HTTPSOAP11Decoder(SAMLArtifactMap map) {
super(map);
understoodHeaders = new LazyList<QName>();
}
/**
* Constructor.
*
* @param map used to map artifacts to SAML
* @param pool parser pool used to deserialize messages
*
* @deprecated
*/
public HTTPSOAP11Decoder(SAMLArtifactMap map, ParserPool pool) {
super(map, pool);
understoodHeaders = new LazyList<QName>();
}
/** {@inheritDoc} */
public String getBindingURI() {
return SAMLConstants.SAML1_SOAP11_BINDING_URI;
}
/**
* Gets the SOAP header names that are understood by the application.
*
* @return SOAP header names that are understood by the application
*/
public List<QName> getUnderstoodHeaders() {
return understoodHeaders;
}
/**
* Sets the SOAP header names that are understood by the application.
*
* @param headerNames SOAP header names that are understood by the application
*/
public void setUnderstoodHeaders(List<QName> headerNames) {
understoodHeaders.clear();
if (headerNames != null) {
understoodHeaders.addAll(headerNames);
}
}
/** {@inheritDoc} */
protected void doDecode(MessageContext messageContext) throws MessageDecodingException {
if (!(messageContext instanceof SAMLMessageContext)) {
log.error("Invalid message context type, this decoder only support SAMLMessageContext");
throw new MessageDecodingException(
"Invalid message context type, this decoder only support SAMLMessageContext");
}
if (!(messageContext.getInboundMessageTransport() instanceof HTTPInTransport)) {
log.error("Invalid inbound message transport type, this decoder only support HTTPInTransport");
throw new MessageDecodingException(
"Invalid inbound message transport type, this decoder only support HTTPInTransport");
}
SAMLMessageContext samlMsgCtx = (SAMLMessageContext) messageContext;
HTTPInTransport inTransport = (HTTPInTransport) samlMsgCtx.getInboundMessageTransport();
if (!inTransport.getHTTPMethod().equalsIgnoreCase("POST")) {
throw new MessageDecodingException("This message deocoder only supports the HTTP POST method");
}
log.debug("Unmarshalling SOAP message");
Envelope soapMessage = (Envelope) unmarshallMessage(inTransport.getIncomingStream());
samlMsgCtx.setInboundMessage(soapMessage);
Header messageHeader = soapMessage.getHeader();
if (messageHeader != null) {
checkUnderstoodSOAPHeaders(soapMessage.getHeader().getUnknownXMLObjects());
}
List<XMLObject> soapBodyChildren = soapMessage.getBody().getUnknownXMLObjects();
if (soapBodyChildren.size() < 1 || soapBodyChildren.size() > 1) {
log.error("Unexpected number of children in the SOAP body, " + soapBodyChildren.size()
+ ". Unable to extract SAML message");
throw new MessageDecodingException(
"Unexpected number of children in the SOAP body, unable to extract SAML message");
}
XMLObject incommingMessage = soapBodyChildren.get(0);
if (!(incommingMessage instanceof SAMLObject)) {
log.error("Unexpected SOAP body content. Expected a SAML request but recieved {}", incommingMessage
.getElementQName());
throw new MessageDecodingException("Unexpected SOAP body content. Expected a SAML request but recieved "
+ incommingMessage.getElementQName());
}
SAMLObject samlMessage = (SAMLObject) incommingMessage;
log.debug("Decoded SOAP messaged which included SAML message of type {}", samlMessage.getElementQName());
samlMsgCtx.setInboundSAMLMessage(samlMessage);
populateMessageContext(samlMsgCtx);
}
/**
* Checks that, if any SOAP headers, require understand that they are in the understood header list.
*
* @param headers SOAP headers to check
*
* @throws MessageDecodingException thrown if a SOAP header requires understanding but is not understood by the
* decoder
*/
protected void checkUnderstoodSOAPHeaders(List<XMLObject> headers) throws MessageDecodingException {
if (headers == null || headers.isEmpty()) {
return;
}
AttributeExtensibleXMLObject attribExtensObject;
for (XMLObject header : headers) {
if (header instanceof AttributeExtensibleXMLObject) {
attribExtensObject = (AttributeExtensibleXMLObject) header;
if (DatatypeHelper.safeEquals("1", attribExtensObject.getUnknownAttributes().get(soapMustUnderstand))) {
if (!understoodHeaders.contains(header.getElementQName())) {
throw new MessageDecodingException("SOAP decoder encountered a header, "
+ header.getElementQName()
+ ", that requires undestanding however this decoder does not understand that header");
}
}
}
}
}
/** {@inheritDoc} */
protected boolean isIntendedDestinationEndpointURIRequired(SAMLMessageContext samlMsgCtx) {
return false;
}
}
| |
package mobi.anoda.archinamon.kernel.persefone.ui.dialog;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentManager;
import android.view.ContextThemeWrapper;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ListView;
import org.intellij.lang.annotations.MagicConstant;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import mobi.anoda.archinamon.kernel.persefone.R;
import mobi.anoda.archinamon.kernel.persefone.annotation.Implement;
import mobi.anoda.archinamon.kernel.persefone.signal.broadcast.BroadcastFilter;
import mobi.anoda.archinamon.kernel.persefone.signal.broadcast.Broadcastable;
import mobi.anoda.archinamon.kernel.persefone.ui.TaggedView;
import mobi.anoda.archinamon.kernel.persefone.ui.adapter.AbstractAdapter;
import mobi.anoda.archinamon.kernel.persefone.ui.async.binder.UiAffectionChain;
import mobi.anoda.archinamon.kernel.persefone.ui.context.StableContext;
import mobi.anoda.archinamon.kernel.persefone.ui.delegate.ActivityLauncher;
import mobi.anoda.archinamon.kernel.persefone.ui.delegate.BroadcastBus;
import mobi.anoda.archinamon.kernel.persefone.ui.delegate.DbLoader;
import mobi.anoda.archinamon.kernel.persefone.ui.delegate.SoftKeyboard;
import mobi.anoda.archinamon.kernel.persefone.utils.LogHelper;
/**
* author: Archinamon project: FavorMe
*/
public abstract class AbstractDialog extends DialogFragment implements TaggedView {
public static interface ICustomInjection {
void makeInjection(@Nullable final View body) throws IllegalAccessException;
}
protected enum Popup {
ALERT,
PROGRESS_SIMPLE,
PROGRESS_CANCELABLE,
LIST_SELECTOR,
LIST_MULTICHECK,
PROMPT_YES_NO,
PROMPT_EXTENDED,
CUSTOM
}
protected static enum Theme {
LIGHT(android.R.style.Theme_Holo_Light_Dialog_NoActionBar),
DARK(android.R.style.Theme_Holo_Dialog_NoActionBar);
private final int mThemeId;
private Theme(int themeId) {
mThemeId = themeId;
}
public int theme() {
return mThemeId;
}
}
protected static final class ListElem {
private Integer mId;
private String mStrData;
public ListElem() {
}
public Integer getId() {
return mId;
}
public void setId(int id) {
mId = id;
}
public void setString(String str) {
mStrData = str;
}
@Override
public String toString() {
return mStrData;
}
}
protected final class ShadowAdapter extends AbstractAdapter<ListElem> {
public ShadowAdapter(int resource, int textViewResourceId, List<ListElem> objects) {
super(resource, textViewResourceId, objects);
setNotifyOnChange(true);
}
@Implement
public View getView(int position, View convertView, ViewGroup parent) {
convertView = createViewFromResource(position, convertView, parent, mItemResource);
applyFonts(convertView);
return convertView;
}
}
public static final String TAG = AbstractDialog.class.getSimpleName();
public static final String IEXTRA_TITLE = ":popup_title";
public static final String IEXTRA_MESSAGE = ":popup_message";
public static final String IEXTRA_THEME = ":popup_theme";
protected final BroadcastFilter fActionsFilter = new BroadcastFilter();
protected static transient final Map<String, Boolean> svWorkingPopups = new Hashtable<>();
// Sys mapping
protected Bundle mDialogParams;
protected ContextThemeWrapper mThemeWrapper;
protected AlertDialog.Builder mBuilder;
protected LayoutInflater mInflater;
private volatile transient String mCurrentTag;
// Common dialog data
private Dialog mDialog;
private Popup mDialogType;
private String mDialogTitle;
private String mDialogMessage;
private String[] mDialogListOptions;
private ShadowAdapter mListAdapter;
// Callbacks
private OnItemClickListener mOnListItemClickCallback;
private OnClickListener mOnClickOkCallback;
private OnClickListener mOnClickCancelCallback;
private OnClickListener mOnClickNeutralCallback;
// Customization
private int mDialogTheme;
private int mProgressStyle;
private String mButtonOkTitle;
private String mButtonCancelTitle;
private String mButtonNeutralTitle;
private ICustomInjection mCustomModelInjector;
private View mCustomLayout;
private View mCustomTitleLayout;
private int mDialogThemeDark = android.R.style.Theme_Holo_DialogWhenLarge;
private int mDialogThemeLight = android.R.style.Theme_Holo_Light_DialogWhenLarge;
private boolean mIsReady = false;
private volatile boolean isAsyncChained;
private StableContext mStableContext;
private SoftKeyboard mKeyboardManagerDelegate;
private BroadcastBus mBroadcastBusDelegate;
private ActivityLauncher mUiActivityLauncher;
private UiAffectionChain mUiAsyncChainBinder;
private DbLoader mAsyncDbLoader;
public static AbstractDialog newInstance(Class<? extends AbstractDialog> klass, Bundle params) {
final StableContext stableContext = StableContext.Impl.obtain();
AbstractDialog instance = null;
try {
instance = klass.newInstance();
instance.mDialogParams = params;
instance.mStableContext = stableContext;
instance.mUiActivityLauncher = new ActivityLauncher(stableContext);
instance.mUiActivityLauncher.setDialog(instance);
} catch (Exception e) {
LogHelper.println_error(TAG, e);
}
return instance;
}
public void listenFor(Broadcastable action) {
fActionsFilter.addAction(action);
}
public void connectAsyncChainBinder() {
this.isAsyncChained = true;
}
/* Simple Throwable processor */
protected final void logError(Throwable e) {
LogHelper.println_error(TAG, e);
}
public final boolean isShowing(String tag) {
return svWorkingPopups.containsKey(tag) && svWorkingPopups.get(tag);
}
@Override
public void show(@NonNull FragmentManager manager, String tag) {
mCurrentTag = tag;
super.show(manager, tag);
svWorkingPopups.put(tag, true);
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
svWorkingPopups.put(mCurrentTag, false);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mInflater = (LayoutInflater) mStableContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
if (mDialogParams != null) {
if (mDialogParams.containsKey(IEXTRA_THEME))
setTheme(mDialogParams.getInt(IEXTRA_THEME));
if (mDialogParams.containsKey(IEXTRA_MESSAGE))
setMessage(mDialogParams.getString(IEXTRA_MESSAGE));
if (mDialogParams.containsKey(IEXTRA_TITLE))
setTitle(mDialogParams.getString(IEXTRA_TITLE));
}
mIsReady = setup();
mThemeWrapper = new ContextThemeWrapper(mStableContext.obtainAppContext(), mDialogTheme);
}
@NonNull
@Override
public final Dialog onCreateDialog(Bundle savedInstanceState) {
if (mIsReady) {
setupDialog();
return mDialog;
} else {
return super.onCreateDialog(savedInstanceState);
}
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
applyFonts(view);
}
@Override
public void onResume() {
if (mAsyncDbLoader != null)
mAsyncDbLoader.onResume();
super.onResume();
if (mBroadcastBusDelegate != null) {
mBroadcastBusDelegate.registerNetworkEventsForCurrentUiContext();
mBroadcastBusDelegate.register(fActionsFilter);
}
}
@Override
public void onPause() {
if (isAsyncChained)
mUiAsyncChainBinder.doUnbindService();
if (mBroadcastBusDelegate != null) {
mBroadcastBusDelegate.unregisterNetworkEventsForCurrentUiContext();
mBroadcastBusDelegate.unregister();
}
super.onPause();
}
public UiAffectionChain getUiAsyncChainBinder() {
if (mUiAsyncChainBinder == null) {
mUiAsyncChainBinder = new UiAffectionChain(mStableContext);
}
return this.mUiAsyncChainBinder;
}
public SoftKeyboard getKeyboardDelegate() {
if (mKeyboardManagerDelegate == null) {
mKeyboardManagerDelegate = new SoftKeyboard(mStableContext);
}
return this.mKeyboardManagerDelegate;
}
public BroadcastBus getBroadcastBusDelegate() {
if (mBroadcastBusDelegate == null) {
mBroadcastBusDelegate = new BroadcastBus(mStableContext);
}
return this.mBroadcastBusDelegate;
}
public ActivityLauncher getUiActivityLauncher() {
return this.mUiActivityLauncher;
}
public DbLoader getAsyncDbLoader() {
if (mAsyncDbLoader == null)
mAsyncDbLoader = new DbLoader(mStableContext);
return this.mAsyncDbLoader;
}
// CUSTOMIZERS
protected abstract boolean setup();
protected void applyFonts(View view) {
}
protected final ShadowAdapter getAdapter() {
return mListAdapter;
}
protected final void setDialogType(int num) {
Popup[] types = Popup.values();
if (num < types.length) {
mDialogType = types[num];
}
}
protected final void setDialogType(Popup type) {
mDialogType = type;
}
protected final void setProgressStyle(@MagicConstant(flagsFromClass = ProgressDialog.class) int style) {
mProgressStyle = style;
}
protected final void setTheme(int style) {
mDialogTheme = style;
}
protected final void setTheme(Theme style) {
mDialogTheme = style.theme();
}
protected final void setTitle(int resId) {
mDialogTitle = getString(resId);
}
protected final void setTitle(String title) {
mDialogTitle = title;
}
protected final void setMessage(int resId) {
mDialogMessage = getString(resId);
}
protected final void setMessage(String title) {
mDialogMessage = title;
}
protected final void setListOptions(int resId) {
mDialogListOptions = getResources().getStringArray(resId);
}
protected final void setListOptions(String[] options) {
mDialogListOptions = options;
}
protected final void setListItemCallback(OnItemClickListener listener) {
mOnListItemClickCallback = listener;
}
protected final void setOkButton(@Nullable Integer resId, OnClickListener callback) {
mButtonOkTitle = getString(resId != null ? resId : android.R.string.ok);
mOnClickOkCallback = callback;
}
protected final void setOkButton(@Nullable String title, OnClickListener callback) {
mButtonOkTitle = title != null ? title : getString(android.R.string.ok);
mOnClickOkCallback = callback;
}
protected final void setCancelButton(@Nullable Integer resId, OnClickListener callback) {
mButtonCancelTitle = getString(resId != null ? resId : android.R.string.no);
mOnClickCancelCallback = callback;
}
protected final void setCancelButton(@Nullable String title, OnClickListener callback) {
mButtonCancelTitle = title != null ? title : getString(android.R.string.no);
mOnClickCancelCallback = callback;
}
protected final void setNeutralButton(@Nullable Integer resId, OnClickListener callback) {
mButtonNeutralTitle = getString(resId != null ? resId : android.R.string.cancel);
mOnClickNeutralCallback = callback;
}
protected final void setNeutralButton(@Nullable String title, OnClickListener callback) {
mButtonNeutralTitle = title != null ? title : getString(android.R.string.cancel);
mOnClickNeutralCallback = callback;
}
protected final void setCustomizer(ICustomInjection injector) {
mCustomModelInjector = injector;
}
protected final void setCustomLayoutId(int redId) {
mCustomLayout = mInflater.inflate(redId, null);
}
protected final void setCustomView(View v) {
mCustomLayout = v;
}
protected final void setCustomTitleLayoutId(int resId) {
mCustomTitleLayout = mInflater.inflate(resId, null);
}
protected final void setCustomTitleLayout(View v) {
mCustomTitleLayout = v;
}
// PRIVATE BUILDERS
private void setupDialog() {
switch (mDialogType) {
case ALERT:
buildAlert();
break;
case PROGRESS_SIMPLE:
buildProgress();
setCancelable(false);
break;
case PROGRESS_CANCELABLE:
buildProgress();
setCancelable(true);
break;
case LIST_SELECTOR:
ListView v = buildList();
v.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
break;
case LIST_MULTICHECK:
ListView lv = buildList();
lv.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
break;
case PROMPT_YES_NO:
buildPrompt();
break;
case PROMPT_EXTENDED:
buildPromptExt();
break;
case CUSTOM:
buildCustom();
break;
default:
throw new IllegalStateException("You need to specify dialog type to point internal builder");
}
//injection of customDialog code
if (mCustomModelInjector != null) {
try {
mCustomModelInjector.makeInjection(mCustomLayout);
} catch (Exception accessExp) {
accessExp.printStackTrace();//print error and silently pass next
}
}
if (mDialog == null && mBuilder != null) {
mDialog = mBuilder.create();
}
}
private void buildAlert() {
mBuilder = new AlertDialog.Builder(mThemeWrapper).setTitle(mDialogTitle)
.setMessage(mDialogMessage)
.setPositiveButton(mButtonOkTitle, mOnClickOkCallback);
mBuilder.setCancelable(false);
setCancelable(false);
}
private void buildProgress() {
final int style = mDialogTheme == mDialogThemeLight ? R.style.ProgressPopupLight : R.style.ProgressPopupDark;
mDialog = new android.app.ProgressDialog(mStableContext.obtainAppContext(), style);
((android.app.ProgressDialog) mDialog).setMessage(mDialogMessage);
((android.app.ProgressDialog) mDialog).setProgressStyle(mProgressStyle);
}
private ListView buildList() {
View view = mInflater.inflate(R.layout.popup_list, null);
assert view != null;
mListAdapter = new ShadowAdapter( R.layout.item_popup_list, android.R.id.text1, buildListOptions());
ListView listView = (ListView) view.findViewById(android.R.id.list);
listView.setAdapter(mListAdapter);
listView.setOnItemClickListener(mOnListItemClickCallback);
mBuilder = new AlertDialog.Builder(mThemeWrapper).setView(view)
.setTitle(mDialogTitle);
mBuilder.setCancelable(true);
setCancelable(true);
return listView;
}
private void buildPrompt() {
mBuilder = new AlertDialog.Builder(mThemeWrapper).setTitle(mDialogTitle)
.setMessage(mDialogMessage)
.setPositiveButton(mButtonOkTitle, mOnClickOkCallback)
.setNegativeButton(mButtonCancelTitle, mOnClickCancelCallback);
}
private void buildPromptExt() {
buildPrompt();
mBuilder.setNeutralButton(mButtonNeutralTitle, mOnClickNeutralCallback);
}
/**
* now we can modify options list via ShadowAdapter instance and get real disposition according to initial list by getItem adapter's method
*/
private ArrayList<ListElem> buildListOptions() {
ArrayList<ListElem> options = new ArrayList<>();
if (mDialogListOptions != null) {
final int size = mDialogListOptions.length;
for (int i = 0; i < size; i++) {
ListElem item = new ListElem();
item.setId(i);
item.setString(mDialogListOptions[i]);
options.add(i, item);
}
}
return options;
}
private void buildCustom() {
Context context = mStableContext.obtainAppContext();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mBuilder = new AlertDialog.Builder(context, mDialogTheme);
} else {
mBuilder = new AlertDialog.Builder(context);
mBuilder.setInverseBackgroundForced(true);
}
if (mDialogTitle != null) {
mBuilder.setTitle(mDialogTitle);
}
if (mDialogMessage != null) {
mBuilder.setMessage(mDialogMessage);
}
if (mCustomLayout != null) {
mBuilder.setView(mCustomLayout);
}
if (mCustomTitleLayout != null) {
mBuilder.setCustomTitle(mCustomTitleLayout);
}
if (mButtonOkTitle != null || mOnClickOkCallback != null) {
mBuilder.setPositiveButton(mButtonOkTitle, mOnClickOkCallback);
}
if (mButtonCancelTitle != null || mOnClickCancelCallback != null) {
mBuilder.setNegativeButton(mButtonCancelTitle, mOnClickCancelCallback);
}
if (mButtonNeutralTitle != null || mOnClickNeutralCallback != null) {
mBuilder.setNeutralButton(mButtonNeutralTitle, mOnClickNeutralCallback);
}
}
}
| |
/*
* Copyright 2002-2005 Sascha Weinreuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.xpathView.support.jaxen;
import com.intellij.lang.xml.XMLLanguage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.xml.*;
import com.intellij.xml.XmlAttributeDescriptor;
import org.intellij.plugins.xpathView.util.MyPsiUtil;
import org.jaxen.DefaultNavigator;
import org.jaxen.FunctionCallException;
import org.jaxen.UnsupportedAxisException;
import org.jaxen.XPath;
import org.jaxen.saxpath.SAXPathException;
import javax.annotation.Nonnull;
import java.util.Collections;
import java.util.Iterator;
/**
* <p>Adapter class for IDEA's PSI-tree to Jaxen.</p>
* Not all of the required functionality is implemented yet. See the TODO comments...
*/
public class PsiDocumentNavigator extends DefaultNavigator {
private static final Logger LOG = Logger.getInstance("org.intellij.plugins.xpathView.support.jaxen.PsiDocumentNavigator");
private final XmlFile file;
public PsiDocumentNavigator(XmlFile file) {
this.file = file;
}
public Iterator getChildAxisIterator(Object contextNode) throws UnsupportedAxisException {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getChildAxisIterator " + contextNode);
}
if (!(contextNode instanceof XmlElement)) {
return Collections.emptyList().iterator();
}
return new PsiChildAxisIterator(contextNode);
}
public Iterator getParentAxisIterator(Object contextNode) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getParentAxisIterator " + contextNode);
}
if (!(contextNode instanceof XmlElement)) {
return Collections.emptyList().iterator();
}
return new NodeIterator((XmlElement)contextNode) {
protected PsiElement getFirstNode(PsiElement n) {
while (n != null) {
n = n.getParent();
if (n instanceof XmlTag) {
return n;
}
}
return null;
}
protected PsiElement getNextNode(PsiElement n) {
return null;
}
};
}
public Iterator getNamespaceAxisIterator(Object contextNode) throws UnsupportedAxisException {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getNamespaceAxisIterator()");
}
// TODO
return super.getNamespaceAxisIterator(contextNode);
}
public Object getDocumentNode(Object contextNode) {
LOG.debug("enter: getDocumentNode");
if (contextNode instanceof XmlDocument) {
return contextNode;
}
while (contextNode instanceof PsiElement) {
if (contextNode instanceof XmlDocument) {
return contextNode;
}
contextNode = ((PsiElement)contextNode).getParent();
}
return null;
}
public String translateNamespacePrefixToUri(String prefix, Object element) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: translateNamespacePrefixToUri()");
}
if (isElement(element)) {
return ((XmlTag)element).getNamespaceByPrefix(prefix);
}
return super.translateNamespacePrefixToUri(prefix, element);
}
public String getProcessingInstructionTarget(Object obj) {
LOG.debug("enter: getProcessingInstructionTarget");
LOG.assertTrue(obj instanceof XmlProcessingInstruction);
XmlProcessingInstruction pi = (XmlProcessingInstruction)obj;
return getProcessingInstructionTarget(pi);
}
public static String getProcessingInstructionTarget(XmlProcessingInstruction pi) {
final PsiElement[] children = pi.getChildren();
LOG.assertTrue(children[1] instanceof XmlToken && ((XmlToken)children[1]).getTokenType() == XmlTokenType.XML_NAME, "Unknown PI structure");
String text = children[1].getText();
int i;
for (i=0; i<text.length() && text.charAt(i) == ' ';) i++; // skip
final int pos = text.indexOf(' ', i);
if (pos != -1) {
text = text.substring(i, pos);
} else {
text = text.substring(i);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Target: " + text);
}
return text;
}
@Nonnull
public String getProcessingInstructionData(Object obj) {
LOG.debug("enter: getProcessingInstructionData");
LOG.assertTrue(obj instanceof XmlProcessingInstruction);
XmlProcessingInstruction pi = (XmlProcessingInstruction)obj;
int targetLength = getProcessingInstructionTarget(obj).length();
int piLength= pi.getText().length();
final String s = pi.getText().substring(2 + targetLength, piLength - 2).trim();
if (LOG.isDebugEnabled()) {
LOG.debug("Data: " + s);
}
return s;
}
public Object getParentNode(Object contextNode) throws UnsupportedAxisException {
return ((PsiElement)contextNode).getParent();
}
public Object getDocument(String url) throws FunctionCallException {
LOG.debug("enter: getDocument: " + url);
final VirtualFile virtualFile = VfsUtil.findRelativeFile(url, file.getVirtualFile());
if (virtualFile != null) {
LOG.debug("document() -> VirtualFile = " + virtualFile.getPath());
final PsiFile file = this.file.getManager().findFile(virtualFile);
if (file instanceof XmlFile) {
return ((XmlFile)file).getDocument();
}
}
return null;
}
public Iterator getAttributeAxisIterator(Object contextNode) {
if (isElement(contextNode)) {
return new AttributeIterator((XmlElement)contextNode);
} else {
return Collections.emptyList().iterator();
}
}
public String getElementNamespaceUri(Object element) {
LOG.assertTrue(element instanceof XmlTag);
final XmlTag context = (XmlTag)element;
final String namespaceUri = context.getNamespace();
if (!MyPsiUtil.isInDeclaredNamespace(context, namespaceUri, context.getNamespacePrefix())) {
if (LOG.isDebugEnabled()) {
LOG.debug("getElementNamespaceUri: not returning implicit namespace uri: " + namespaceUri);
}
return "";
}
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getElementNamespaceUri: " + namespaceUri);
}
return namespaceUri;
}
public String getElementName(Object element) {
LOG.assertTrue(element instanceof XmlTag);
final String name = ((XmlTag)element).getLocalName();
if (LOG.isDebugEnabled()) {
LOG.debug("getElementName: " + name);
}
return name;
}
public String getElementQName(Object element) {
LOG.assertTrue(element instanceof XmlTag);
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getElementQName: " + ((XmlTag)element).getName());
}
return ((XmlTag)element).getName();
}
public String getAttributeNamespaceUri(Object attr) {
LOG.assertTrue(attr instanceof XmlAttribute);
final XmlAttribute attribute = ((XmlAttribute)attr);
final String name = attribute.getName();
if (name.indexOf(':') == -1) return "";
final String uri = attribute.getNamespace();
if (!MyPsiUtil.isInDeclaredNamespace(attribute.getParent(), uri, MyPsiUtil.getAttributePrefix(attribute))) {
LOG.info("getElementNamespaceUri: not returning implicit attribute-namespace uri: " + uri);
return "";
}
if (LOG.isDebugEnabled()) {
LOG.debug("getAttributeNamespaceUri: " + uri);
}
return uri;
}
public String getAttributeName(Object attr) {
LOG.assertTrue(attr instanceof XmlAttribute);
final String name = ((XmlAttribute)attr).getLocalName();
if (LOG.isDebugEnabled()) {
LOG.debug("getAttributeName: " + name);
}
return name;
}
public String getAttributeQName(Object attr) {
LOG.assertTrue(attr instanceof XmlAttribute);
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getAttributeQName");
}
return ((XmlAttribute)attr).getName();
}
public boolean isDocument(Object object) {
final boolean b = object instanceof XmlDocument;
if (LOG.isDebugEnabled()) {
LOG.debug("enter: isDocument(): " + object + " = " + b);
}
return b;
}
public boolean isElement(Object object) {
final boolean b = object instanceof XmlTag;
if (LOG.isDebugEnabled()) {
LOG.debug("enter: isElement(): " + object + " = " + b);
}
return b && isSupportedElement((XmlTag)object);
}
private static boolean isSupportedElement(XmlTag object) {
// optimization: all tags from XML language are supported, but some from other languages (JSP, see IDEADEV-37939) are not
return object.getLanguage() == XMLLanguage.INSTANCE || MyPsiUtil.findNameElement(object) != null;
}
public boolean isAttribute(Object object) {
final boolean b = object instanceof XmlAttribute;
if (LOG.isDebugEnabled()) {
LOG.debug("enter: isAttribute(): " + object + " = " + b);
}
return b;
}
public boolean isNamespace(Object object) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: isNamespace(): " + object);
}
// TODO: implement when namespace axis is supported
return false;
}
public boolean isComment(Object object) {
final boolean b = object instanceof XmlComment;
if (LOG.isDebugEnabled()) {
LOG.debug("enter: isComment():" + object + " = " + b);
}
return b;
}
public boolean isText(Object object) {
final boolean b;
if (object instanceof PsiWhiteSpace) {
b = ((PsiWhiteSpace)object).getParent() instanceof XmlText;
} else {
b = object instanceof XmlText;
}
if (LOG.isDebugEnabled()) {
LOG.debug("enter: isText():" + object + " = " + b);
}
return b;
}
public boolean isProcessingInstruction(Object object) {
final boolean b = object instanceof XmlProcessingInstruction;
if (LOG.isDebugEnabled()) {
LOG.debug("enter: isProcessingInstruction(): " + object + " = " + b);
}
return b;
}
@Nonnull
public String getCommentStringValue(Object comment) {
LOG.assertTrue(comment instanceof XmlComment);
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getCommentStringValue()");
}
PsiElement c = (PsiElement)comment;
final PsiElement[] children = c.getChildren();
for (PsiElement child : children) {
if (child instanceof XmlToken && ((XmlToken)child).getTokenType() == XmlTokenType.XML_COMMENT_CHARACTERS) {
return child.getText();
}
}
return "";
}
@Nonnull
public String getElementStringValue(Object element) {
LOG.assertTrue(element instanceof XmlTag);
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getElementStringValue()");
}
final TextCollector collector = new TextCollector();
((XmlTag)element).accept(collector);
return collector.getText();
}
@Nonnull
public String getAttributeStringValue(Object attr) {
LOG.assertTrue(attr instanceof XmlAttribute);
return StringUtil.notNullize(((XmlAttribute)attr).getValue());
}
public String getNamespaceStringValue(Object ns) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getNamespaceStringValue");
LOG.debug("ns = " + ns);
}
// TODO: implement when namespace axis is supported
return null;
}
public String getNamespacePrefix(Object ns) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getNamespacePrefix");
LOG.debug("ns = " + ns);
}
// TODO: implement when namespace axis is supported
return null;
}
@Nonnull
public String getTextStringValue(Object txt) {
LOG.debug("enter: getTextStringValue");
if (txt instanceof XmlText) {
return ((XmlText)txt).getValue();
}
return txt instanceof PsiElement ? ((PsiElement)txt).getText() : txt.toString();
}
public XPath parseXPath(String xpath) throws SAXPathException {
return new PsiXPath(file, xpath);
}
public Object getElementById(Object object, final String elementId) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: getElementById: " + object + " -- " + elementId);
}
final XmlTag rootTag = ((XmlFile)((XmlElement)object).getContainingFile()).getRootTag();
if (rootTag == null) {
return null;
}
final Ref<XmlTag> ref = new Ref<XmlTag>();
rootTag.accept(new XmlRecursiveElementVisitor() {
@Override
public void visitElement(PsiElement element) {
if (ref.get() == null) {
super.visitElement(element);
}
}
@Override
public void visitXmlAttribute(XmlAttribute attribute) {
final XmlAttributeDescriptor descriptor = attribute.getDescriptor();
final String value = attribute.getValue();
if ((value != null &&
(descriptor != null && descriptor.hasIdType()))) {
if (elementId.equals(value)) {
ref.set(attribute.getParent());
}
}
}
});
return ref.get();
}
static class TextCollector extends XmlRecursiveElementVisitor {
private final StringBuffer builder = new StringBuffer();
@Override
public void visitXmlText(XmlText text) {
builder.append(text.getValue());
}
public String getText() {
return builder.toString();
}
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v4.app;
import android.app.Activity;
import android.os.Bundle;
import android.support.v4.content.Loader;
import android.support.v4.util.DebugUtils;
import android.support.v4.util.SparseArrayCompat;
import android.util.Log;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.lang.reflect.Modifier;
/**
* Static library support version of the framework's {@link android.app.LoaderManager}.
* Used to write apps that run on platforms prior to Android 3.0. When running
* on Android 3.0 or above, this implementation is still used; it does not try
* to switch to the framework's implementation. See the framework SDK
* documentation for a class overview.
*
* <p>Your activity must derive from {@link FragmentActivity} to use this.
*/
public abstract class LoaderManager {
/**
* Callback interface for a client to interact with the manager.
*/
public interface LoaderCallbacks<D> {
/**
* Instantiate and return a new Loader for the given ID.
*
* @param id The ID whose loader is to be created.
* @param args Any arguments supplied by the caller.
* @return Return a new Loader instance that is ready to start loading.
*/
public Loader<D> onCreateLoader(int id, Bundle args);
/**
* Called when a previously created loader has finished its load. Note
* that normally an application is <em>not</em> allowed to commit fragment
* transactions while in this call, since it can happen after an
* activity's state is saved. See {@link FragmentManager#beginTransaction()
* FragmentManager.openTransaction()} for further discussion on this.
*
* <p>This function is guaranteed to be called prior to the release of
* the last data that was supplied for this Loader. At this point
* you should remove all use of the old data (since it will be released
* soon), but should not do your own release of the data since its Loader
* owns it and will take care of that. The Loader will take care of
* management of its data so you don't have to. In particular:
*
* <ul>
* <li> <p>The Loader will monitor for changes to the data, and report
* them to you through new calls here. You should not monitor the
* data yourself. For example, if the data is a {@link android.database.Cursor}
* and you place it in a {@link android.widget.CursorAdapter}, use
* the {@link android.widget.CursorAdapter#CursorAdapter(android.content.Context,
* android.database.Cursor, int)} constructor <em>without</em> passing
* in either {@link android.widget.CursorAdapter#FLAG_AUTO_REQUERY}
* or {@link android.widget.CursorAdapter#FLAG_REGISTER_CONTENT_OBSERVER}
* (that is, use 0 for the flags argument). This prevents the CursorAdapter
* from doing its own observing of the Cursor, which is not needed since
* when a change happens you will get a new Cursor throw another call
* here.
* <li> The Loader will release the data once it knows the application
* is no longer using it. For example, if the data is
* a {@link android.database.Cursor} from a {@link android.content.CursorLoader},
* you should not call close() on it yourself. If the Cursor is being placed in a
* {@link android.widget.CursorAdapter}, you should use the
* {@link android.widget.CursorAdapter#swapCursor(android.database.Cursor)}
* method so that the old Cursor is not closed.
* </ul>
*
* @param loader The Loader that has finished.
* @param data The data generated by the Loader.
*/
public void onLoadFinished(Loader<D> loader, D data);
/**
* Called when a previously created loader is being reset, and thus
* making its data unavailable. The application should at this point
* remove any references it has to the Loader's data.
*
* @param loader The Loader that is being reset.
*/
public void onLoaderReset(Loader<D> loader);
}
/**
* Ensures a loader is initialized and active. If the loader doesn't
* already exist, one is created and (if the activity/fragment is currently
* started) starts the loader. Otherwise the last created
* loader is re-used.
*
* <p>In either case, the given callback is associated with the loader, and
* will be called as the loader state changes. If at the point of call
* the caller is in its started state, and the requested loader
* already exists and has generated its data, then
* callback {@link LoaderCallbacks#onLoadFinished} will
* be called immediately (inside of this function), so you must be prepared
* for this to happen.
*
* @param id A unique identifier for this loader. Can be whatever you want.
* Identifiers are scoped to a particular LoaderManager instance.
* @param args Optional arguments to supply to the loader at construction.
* If a loader already exists (a new one does not need to be created), this
* parameter will be ignored and the last arguments continue to be used.
* @param callback Interface the LoaderManager will call to report about
* changes in the state of the loader. Required.
*/
public abstract <D> Loader<D> initLoader(int id, Bundle args,
LoaderManager.LoaderCallbacks<D> callback);
/**
* Starts a new or restarts an existing {@link android.content.Loader} in
* this manager, registers the callbacks to it,
* and (if the activity/fragment is currently started) starts loading it.
* If a loader with the same id has previously been
* started it will automatically be destroyed when the new loader completes
* its work. The callback will be delivered before the old loader
* is destroyed.
*
* @param id A unique identifier for this loader. Can be whatever you want.
* Identifiers are scoped to a particular LoaderManager instance.
* @param args Optional arguments to supply to the loader at construction.
* @param callback Interface the LoaderManager will call to report about
* changes in the state of the loader. Required.
*/
public abstract <D> Loader<D> restartLoader(int id, Bundle args,
LoaderManager.LoaderCallbacks<D> callback);
/**
* Stops and removes the loader with the given ID. If this loader
* had previously reported data to the client through
* {@link LoaderCallbacks#onLoadFinished(Loader, Object)}, a call
* will be made to {@link LoaderCallbacks#onLoaderReset(Loader)}.
*/
public abstract void destroyLoader(int id);
/**
* Return the Loader with the given id or null if no matching Loader
* is found.
*/
public abstract <D> Loader<D> getLoader(int id);
/**
* Print the LoaderManager's state into the given stream.
*
* @param prefix Text to print at the front of each line.
* @param fd The raw file descriptor that the dump is being sent to.
* @param writer A PrintWriter to which the dump is to be set.
* @param args Additional arguments to the dump request.
*/
public abstract void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args);
/**
* Control whether the framework's internal loader manager debugging
* logs are turned on. If enabled, you will see output in logcat as
* the framework performs loader operations.
*/
public static void enableDebugLogging(boolean enabled) {
LoaderManagerImpl.DEBUG = enabled;
}
/**
* Returns true if any loaders managed are currently running and have not
* returned data to the application yet.
*/
public boolean hasRunningLoaders() { return false; }
}
class LoaderManagerImpl extends LoaderManager {
static final String TAG = "LoaderManager";
static boolean DEBUG = false;
// These are the currently active loaders. A loader is here
// from the time its load is started until it has been explicitly
// stopped or restarted by the application.
final SparseArrayCompat<LoaderInfo> mLoaders = new SparseArrayCompat<LoaderInfo>();
// These are previously run loaders. This list is maintained internally
// to avoid destroying a loader while an application is still using it.
// It allows an application to restart a loader, but continue using its
// previously run loader until the new loader's data is available.
final SparseArrayCompat<LoaderInfo> mInactiveLoaders = new SparseArrayCompat<LoaderInfo>();
final String mWho;
FragmentActivity mActivity;
boolean mStarted;
boolean mRetaining;
boolean mRetainingStarted;
boolean mCreatingLoader;
final class LoaderInfo implements Loader.OnLoadCompleteListener<Object> {
final int mId;
final Bundle mArgs;
LoaderManager.LoaderCallbacks<Object> mCallbacks;
Loader<Object> mLoader;
boolean mHaveData;
boolean mDeliveredData;
Object mData;
boolean mStarted;
boolean mRetaining;
boolean mRetainingStarted;
boolean mReportNextStart;
boolean mDestroyed;
boolean mListenerRegistered;
LoaderInfo mPendingLoader;
public LoaderInfo(int id, Bundle args, LoaderManager.LoaderCallbacks<Object> callbacks) {
mId = id;
mArgs = args;
mCallbacks = callbacks;
}
void start() {
if (mRetaining && mRetainingStarted) {
// Our owner is started, but we were being retained from a
// previous instance in the started state... so there is really
// nothing to do here, since the loaders are still started.
mStarted = true;
return;
}
if (mStarted) {
// If loader already started, don't restart.
return;
}
mStarted = true;
if (DEBUG) Log.v(TAG, " Starting: " + this);
if (mLoader == null && mCallbacks != null) {
mLoader = mCallbacks.onCreateLoader(mId, mArgs);
}
if (mLoader != null) {
if (mLoader.getClass().isMemberClass()
&& !Modifier.isStatic(mLoader.getClass().getModifiers())) {
throw new IllegalArgumentException(
"Object returned from onCreateLoader must not be a non-static inner member class: "
+ mLoader);
}
if (!mListenerRegistered) {
mLoader.registerListener(mId, this);
mListenerRegistered = true;
}
mLoader.startLoading();
}
}
void retain() {
if (DEBUG) Log.v(TAG, " Retaining: " + this);
mRetaining = true;
mRetainingStarted = mStarted;
mStarted = false;
mCallbacks = null;
}
void finishRetain() {
if (mRetaining) {
if (DEBUG) Log.v(TAG, " Finished Retaining: " + this);
mRetaining = false;
if (mStarted != mRetainingStarted) {
if (!mStarted) {
// This loader was retained in a started state, but
// at the end of retaining everything our owner is
// no longer started... so make it stop.
stop();
}
}
}
if (mStarted && mHaveData && !mReportNextStart) {
// This loader has retained its data, either completely across
// a configuration change or just whatever the last data set
// was after being restarted from a stop, and now at the point of
// finishing the retain we find we remain started, have
// our data, and the owner has a new callback... so
// let's deliver the data now.
callOnLoadFinished(mLoader, mData);
}
}
void reportStart() {
if (mStarted) {
if (mReportNextStart) {
mReportNextStart = false;
if (mHaveData) {
callOnLoadFinished(mLoader, mData);
}
}
}
}
void stop() {
if (DEBUG) Log.v(TAG, " Stopping: " + this);
mStarted = false;
if (!mRetaining) {
if (mLoader != null && mListenerRegistered) {
// Let the loader know we're done with it
mListenerRegistered = false;
mLoader.unregisterListener(this);
mLoader.stopLoading();
}
}
}
void destroy() {
if (DEBUG) Log.v(TAG, " Destroying: " + this);
mDestroyed = true;
boolean needReset = mDeliveredData;
mDeliveredData = false;
if (mCallbacks != null && mLoader != null && mHaveData && needReset) {
if (DEBUG) Log.v(TAG, " Reseting: " + this);
String lastBecause = null;
if (mActivity != null) {
lastBecause = mActivity.mFragments.mNoTransactionsBecause;
mActivity.mFragments.mNoTransactionsBecause = "onLoaderReset";
}
try {
mCallbacks.onLoaderReset(mLoader);
} finally {
if (mActivity != null) {
mActivity.mFragments.mNoTransactionsBecause = lastBecause;
}
}
}
mCallbacks = null;
mData = null;
mHaveData = false;
if (mLoader != null) {
if (mListenerRegistered) {
mListenerRegistered = false;
mLoader.unregisterListener(this);
}
mLoader.reset();
}
if (mPendingLoader != null) {
mPendingLoader.destroy();
}
}
@Override public void onLoadComplete(Loader<Object> loader, Object data) {
if (DEBUG) Log.v(TAG, "onLoadComplete: " + this);
if (mDestroyed) {
if (DEBUG) Log.v(TAG, " Ignoring load complete -- destroyed");
return;
}
if (mLoaders.get(mId) != this) {
// This data is not coming from the current active loader.
// We don't care about it.
if (DEBUG) Log.v(TAG, " Ignoring load complete -- not active");
return;
}
LoaderInfo pending = mPendingLoader;
if (pending != null) {
// There is a new request pending and we were just
// waiting for the old one to complete before starting
// it. So now it is time, switch over to the new loader.
if (DEBUG) Log.v(TAG, " Switching to pending loader: " + pending);
mPendingLoader = null;
mLoaders.put(mId, null);
destroy();
installLoader(pending);
return;
}
// Notify of the new data so the app can switch out the old data before
// we try to destroy it.
if (mData != data || !mHaveData) {
mData = data;
mHaveData = true;
if (mStarted) {
callOnLoadFinished(loader, data);
}
}
//if (DEBUG) Log.v(TAG, " onLoadFinished returned: " + this);
// We have now given the application the new loader with its
// loaded data, so it should have stopped using the previous
// loader. If there is a previous loader on the inactive list,
// clean it up.
LoaderInfo info = mInactiveLoaders.get(mId);
if (info != null && info != this) {
info.mDeliveredData = false;
info.destroy();
mInactiveLoaders.remove(mId);
}
if (mActivity != null && !hasRunningLoaders()) {
mActivity.mFragments.startPendingDeferredFragments();
}
}
void callOnLoadFinished(Loader<Object> loader, Object data) {
if (mCallbacks != null) {
String lastBecause = null;
if (mActivity != null) {
lastBecause = mActivity.mFragments.mNoTransactionsBecause;
mActivity.mFragments.mNoTransactionsBecause = "onLoadFinished";
}
try {
if (DEBUG) Log.v(TAG, " onLoadFinished in " + loader + ": "
+ loader.dataToString(data));
mCallbacks.onLoadFinished(loader, data);
} finally {
if (mActivity != null) {
mActivity.mFragments.mNoTransactionsBecause = lastBecause;
}
}
mDeliveredData = true;
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(64);
sb.append("LoaderInfo{");
sb.append(Integer.toHexString(System.identityHashCode(this)));
sb.append(" #");
sb.append(mId);
sb.append(" : ");
DebugUtils.buildShortClassTag(mLoader, sb);
sb.append("}}");
return sb.toString();
}
public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) {
writer.print(prefix); writer.print("mId="); writer.print(mId);
writer.print(" mArgs="); writer.println(mArgs);
writer.print(prefix); writer.print("mCallbacks="); writer.println(mCallbacks);
writer.print(prefix); writer.print("mLoader="); writer.println(mLoader);
if (mLoader != null) {
mLoader.dump(prefix + " ", fd, writer, args);
}
if (mHaveData || mDeliveredData) {
writer.print(prefix); writer.print("mHaveData="); writer.print(mHaveData);
writer.print(" mDeliveredData="); writer.println(mDeliveredData);
writer.print(prefix); writer.print("mData="); writer.println(mData);
}
writer.print(prefix); writer.print("mStarted="); writer.print(mStarted);
writer.print(" mReportNextStart="); writer.print(mReportNextStart);
writer.print(" mDestroyed="); writer.println(mDestroyed);
writer.print(prefix); writer.print("mRetaining="); writer.print(mRetaining);
writer.print(" mRetainingStarted="); writer.print(mRetainingStarted);
writer.print(" mListenerRegistered="); writer.println(mListenerRegistered);
if (mPendingLoader != null) {
writer.print(prefix); writer.println("Pending Loader ");
writer.print(mPendingLoader); writer.println(":");
mPendingLoader.dump(prefix + " ", fd, writer, args);
}
}
}
LoaderManagerImpl(String who, FragmentActivity activity, boolean started) {
mWho = who;
mActivity = activity;
mStarted = started;
}
void updateActivity(FragmentActivity activity) {
mActivity = activity;
}
private LoaderInfo createLoader(int id, Bundle args,
LoaderManager.LoaderCallbacks<Object> callback) {
LoaderInfo info = new LoaderInfo(id, args, (LoaderManager.LoaderCallbacks<Object>)callback);
Loader<Object> loader = callback.onCreateLoader(id, args);
info.mLoader = (Loader<Object>)loader;
return info;
}
private LoaderInfo createAndInstallLoader(int id, Bundle args,
LoaderManager.LoaderCallbacks<Object> callback) {
try {
mCreatingLoader = true;
LoaderInfo info = createLoader(id, args, callback);
installLoader(info);
return info;
} finally {
mCreatingLoader = false;
}
}
void installLoader(LoaderInfo info) {
mLoaders.put(info.mId, info);
if (mStarted) {
// The activity will start all existing loaders in it's onStart(),
// so only start them here if we're past that point of the activitiy's
// life cycle
info.start();
}
}
/**
* Call to initialize a particular ID with a Loader. If this ID already
* has a Loader associated with it, it is left unchanged and any previous
* callbacks replaced with the newly provided ones. If there is not currently
* a Loader for the ID, a new one is created and started.
*
* <p>This function should generally be used when a component is initializing,
* to ensure that a Loader it relies on is created. This allows it to re-use
* an existing Loader's data if there already is one, so that for example
* when an {@link Activity} is re-created after a configuration change it
* does not need to re-create its loaders.
*
* <p>Note that in the case where an existing Loader is re-used, the
* <var>args</var> given here <em>will be ignored</em> because you will
* continue using the previous Loader.
*
* @param id A unique (to this LoaderManager instance) identifier under
* which to manage the new Loader.
* @param args Optional arguments that will be propagated to
* {@link LoaderCallbacks#onCreateLoader(int, Bundle) LoaderCallbacks.onCreateLoader()}.
* @param callback Interface implementing management of this Loader. Required.
* Its onCreateLoader() method will be called while inside of the function to
* instantiate the Loader object.
*/
@SuppressWarnings("unchecked")
public <D> Loader<D> initLoader(int id, Bundle args, LoaderManager.LoaderCallbacks<D> callback) {
if (mCreatingLoader) {
throw new IllegalStateException("Called while creating a loader");
}
LoaderInfo info = mLoaders.get(id);
if (DEBUG) Log.v(TAG, "initLoader in " + this + ": args=" + args);
if (info == null) {
// Loader doesn't already exist; create.
info = createAndInstallLoader(id, args, (LoaderManager.LoaderCallbacks<Object>)callback);
if (DEBUG) Log.v(TAG, " Created new loader " + info);
} else {
if (DEBUG) Log.v(TAG, " Re-using existing loader " + info);
info.mCallbacks = (LoaderManager.LoaderCallbacks<Object>)callback;
}
if (info.mHaveData && mStarted) {
// If the loader has already generated its data, report it now.
info.callOnLoadFinished(info.mLoader, info.mData);
}
return (Loader<D>)info.mLoader;
}
/**
* Call to re-create the Loader associated with a particular ID. If there
* is currently a Loader associated with this ID, it will be
* canceled/stopped/destroyed as appropriate. A new Loader with the given
* arguments will be created and its data delivered to you once available.
*
* <p>This function does some throttling of Loaders. If too many Loaders
* have been created for the given ID but not yet generated their data,
* new calls to this function will create and return a new Loader but not
* actually start it until some previous loaders have completed.
*
* <p>After calling this function, any previous Loaders associated with
* this ID will be considered invalid, and you will receive no further
* data updates from them.
*
* @param id A unique (to this LoaderManager instance) identifier under
* which to manage the new Loader.
* @param args Optional arguments that will be propagated to
* {@link LoaderCallbacks#onCreateLoader(int, Bundle) LoaderCallbacks.onCreateLoader()}.
* @param callback Interface implementing management of this Loader. Required.
* Its onCreateLoader() method will be called while inside of the function to
* instantiate the Loader object.
*/
@SuppressWarnings("unchecked")
public <D> Loader<D> restartLoader(int id, Bundle args, LoaderManager.LoaderCallbacks<D> callback) {
if (mCreatingLoader) {
throw new IllegalStateException("Called while creating a loader");
}
LoaderInfo info = mLoaders.get(id);
if (DEBUG) Log.v(TAG, "restartLoader in " + this + ": args=" + args);
if (info != null) {
LoaderInfo inactive = mInactiveLoaders.get(id);
if (inactive != null) {
if (info.mHaveData) {
// This loader now has data... we are probably being
// called from within onLoadComplete, where we haven't
// yet destroyed the last inactive loader. So just do
// that now.
if (DEBUG) Log.v(TAG, " Removing last inactive loader: " + info);
inactive.mDeliveredData = false;
inactive.destroy();
info.mLoader.abandon();
mInactiveLoaders.put(id, info);
} else {
// We already have an inactive loader for this ID that we are
// waiting for! What to do, what to do...
if (!info.mStarted) {
// The current Loader has not been started... we thus
// have no reason to keep it around, so bam, slam,
// thank-you-ma'am.
if (DEBUG) Log.v(TAG, " Current loader is stopped; replacing");
mLoaders.put(id, null);
info.destroy();
} else {
// Now we have three active loaders... we'll queue
// up this request to be processed once one of the other loaders
// finishes.
if (info.mPendingLoader != null) {
if (DEBUG) Log.v(TAG, " Removing pending loader: " + info.mPendingLoader);
info.mPendingLoader.destroy();
info.mPendingLoader = null;
}
if (DEBUG) Log.v(TAG, " Enqueuing as new pending loader");
info.mPendingLoader = createLoader(id, args,
(LoaderManager.LoaderCallbacks<Object>)callback);
return (Loader<D>)info.mPendingLoader.mLoader;
}
}
} else {
// Keep track of the previous instance of this loader so we can destroy
// it when the new one completes.
if (DEBUG) Log.v(TAG, " Making last loader inactive: " + info);
info.mLoader.abandon();
mInactiveLoaders.put(id, info);
}
}
info = createAndInstallLoader(id, args, (LoaderManager.LoaderCallbacks<Object>)callback);
return (Loader<D>)info.mLoader;
}
/**
* Rip down, tear apart, shred to pieces a current Loader ID. After returning
* from this function, any Loader objects associated with this ID are
* destroyed. Any data associated with them is destroyed. You better not
* be using it when you do this.
* @param id Identifier of the Loader to be destroyed.
*/
public void destroyLoader(int id) {
if (mCreatingLoader) {
throw new IllegalStateException("Called while creating a loader");
}
if (DEBUG) Log.v(TAG, "destroyLoader in " + this + " of " + id);
int idx = mLoaders.indexOfKey(id);
if (idx >= 0) {
LoaderInfo info = mLoaders.valueAt(idx);
mLoaders.removeAt(idx);
info.destroy();
}
idx = mInactiveLoaders.indexOfKey(id);
if (idx >= 0) {
LoaderInfo info = mInactiveLoaders.valueAt(idx);
mInactiveLoaders.removeAt(idx);
info.destroy();
}
if (mActivity != null && !hasRunningLoaders()) {
mActivity.mFragments.startPendingDeferredFragments();
}
}
/**
* Return the most recent Loader object associated with the
* given ID.
*/
@SuppressWarnings("unchecked")
public <D> Loader<D> getLoader(int id) {
if (mCreatingLoader) {
throw new IllegalStateException("Called while creating a loader");
}
LoaderInfo loaderInfo = mLoaders.get(id);
if (loaderInfo != null) {
if (loaderInfo.mPendingLoader != null) {
return (Loader<D>)loaderInfo.mPendingLoader.mLoader;
}
return (Loader<D>)loaderInfo.mLoader;
}
return null;
}
void doStart() {
if (DEBUG) Log.v(TAG, "Starting in " + this);
if (mStarted) {
RuntimeException e = new RuntimeException("here");
e.fillInStackTrace();
Log.w(TAG, "Called doStart when already started: " + this, e);
return;
}
mStarted = true;
// Call out to sub classes so they can start their loaders
// Let the existing loaders know that we want to be notified when a load is complete
for (int i = mLoaders.size()-1; i >= 0; i--) {
mLoaders.valueAt(i).start();
}
}
void doStop() {
if (DEBUG) Log.v(TAG, "Stopping in " + this);
if (!mStarted) {
RuntimeException e = new RuntimeException("here");
e.fillInStackTrace();
Log.w(TAG, "Called doStop when not started: " + this, e);
return;
}
for (int i = mLoaders.size()-1; i >= 0; i--) {
mLoaders.valueAt(i).stop();
}
mStarted = false;
}
void doRetain() {
if (DEBUG) Log.v(TAG, "Retaining in " + this);
if (!mStarted) {
RuntimeException e = new RuntimeException("here");
e.fillInStackTrace();
Log.w(TAG, "Called doRetain when not started: " + this, e);
return;
}
mRetaining = true;
mStarted = false;
for (int i = mLoaders.size()-1; i >= 0; i--) {
mLoaders.valueAt(i).retain();
}
}
void finishRetain() {
if (mRetaining) {
if (DEBUG) Log.v(TAG, "Finished Retaining in " + this);
mRetaining = false;
for (int i = mLoaders.size()-1; i >= 0; i--) {
mLoaders.valueAt(i).finishRetain();
}
}
}
void doReportNextStart() {
for (int i = mLoaders.size()-1; i >= 0; i--) {
mLoaders.valueAt(i).mReportNextStart = true;
}
}
void doReportStart() {
for (int i = mLoaders.size()-1; i >= 0; i--) {
mLoaders.valueAt(i).reportStart();
}
}
void doDestroy() {
if (!mRetaining) {
if (DEBUG) Log.v(TAG, "Destroying Active in " + this);
for (int i = mLoaders.size()-1; i >= 0; i--) {
mLoaders.valueAt(i).destroy();
}
}
if (DEBUG) Log.v(TAG, "Destroying Inactive in " + this);
for (int i = mInactiveLoaders.size()-1; i >= 0; i--) {
mInactiveLoaders.valueAt(i).destroy();
}
mInactiveLoaders.clear();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(128);
sb.append("LoaderManager{");
sb.append(Integer.toHexString(System.identityHashCode(this)));
sb.append(" in ");
DebugUtils.buildShortClassTag(mActivity, sb);
sb.append("}}");
return sb.toString();
}
@Override
public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) {
if (mLoaders.size() > 0) {
writer.print(prefix); writer.println("Active Loaders:");
String innerPrefix = prefix + " ";
for (int i=0; i < mLoaders.size(); i++) {
LoaderInfo li = mLoaders.valueAt(i);
writer.print(prefix); writer.print(" #"); writer.print(mLoaders.keyAt(i));
writer.print(": "); writer.println(li.toString());
li.dump(innerPrefix, fd, writer, args);
}
}
if (mInactiveLoaders.size() > 0) {
writer.print(prefix); writer.println("Inactive Loaders:");
String innerPrefix = prefix + " ";
for (int i=0; i < mInactiveLoaders.size(); i++) {
LoaderInfo li = mInactiveLoaders.valueAt(i);
writer.print(prefix); writer.print(" #"); writer.print(mInactiveLoaders.keyAt(i));
writer.print(": "); writer.println(li.toString());
li.dump(innerPrefix, fd, writer, args);
}
}
}
@Override
public boolean hasRunningLoaders() {
boolean loadersRunning = false;
final int count = mLoaders.size();
for (int i = 0; i < count; i++) {
final LoaderInfo li = mLoaders.valueAt(i);
loadersRunning |= li.mStarted && !li.mDeliveredData;
}
return loadersRunning;
}
}
| |
package model;
import java.util.ArrayList;
import java.util.List;
/**
* Possible profile fields can be claimed to server.
*/
public class Profile {
private List<String> zoneinfo = new ArrayList<String>();
private List<String> sub = new ArrayList<String>();
private List<String> phone_number = new ArrayList<String>();
private List<String> nickname = new ArrayList<String>();
private List<String> website = new ArrayList<String>();
private List<String> middle_name = new ArrayList<String>();
private List<String> email_verified = new ArrayList<String>();
private List<String> locale = new ArrayList<String>();
private List<String> phone_number_verified = new ArrayList<String>();
private List<String> preferred_username = new ArrayList<String>();
private List<String> given_name = new ArrayList<String>();
private List<String> picture = new ArrayList<String>();
private List<String> updated_at = new ArrayList<String>();
private List<String> inum = new ArrayList<String>();
private List<String> email = new ArrayList<String>();
private List<String> address = new ArrayList<String>();
private List<String> name = new ArrayList<String>();
private List<String> birthdate = new ArrayList<String>();
private List<String> family_name = new ArrayList<String>();
private List<String> gender = new ArrayList<String>();
private List<String> phone_mobile_number = new ArrayList<String>();
private List<String> profile = new ArrayList<String>();
public List<String> getZoneinfo() {
return zoneinfo;
}
public void setZoneinfo(List<String> zoneinfo) {
this.zoneinfo = zoneinfo;
}
public List<String> getSub() {
return sub;
}
public void setSub(List<String> sub) {
this.sub = sub;
}
public List<String> getPhone_number() {
return phone_number;
}
public void setPhone_number(List<String> phone_number) {
this.phone_number = phone_number;
}
public List<String> getNickname() {
return nickname;
}
public void setNickname(List<String> nickname) {
this.nickname = nickname;
}
public List<String> getWebsite() {
return website;
}
public void setWebsite(List<String> website) {
this.website = website;
}
public List<String> getMiddle_name() {
return middle_name;
}
public void setMiddle_name(List<String> middle_name) {
this.middle_name = middle_name;
}
public List<String> getEmail_verified() {
return email_verified;
}
public void setEmail_verified(List<String> email_verified) {
this.email_verified = email_verified;
}
public List<String> getLocale() {
return locale;
}
public void setLocale(List<String> locale) {
this.locale = locale;
}
public List<String> getPhone_number_verified() {
return phone_number_verified;
}
public void setPhone_number_verified(List<String> phone_number_verified) {
this.phone_number_verified = phone_number_verified;
}
public List<String> getPreferred_username() {
return preferred_username;
}
public void setPreferred_username(List<String> preferred_username) {
this.preferred_username = preferred_username;
}
public List<String> getGiven_name() {
return given_name;
}
public void setGiven_name(List<String> given_name) {
this.given_name = given_name;
}
public List<String> getPicture() {
return picture;
}
public void setPicture(List<String> picture) {
this.picture = picture;
}
public List<String> getUpdated_at() {
return updated_at;
}
public void setUpdated_at(List<String> updated_at) {
this.updated_at = updated_at;
}
public List<String> getInum() {
return inum;
}
public void setInum(List<String> inum) {
this.inum = inum;
}
public List<String> getEmail() {
return email;
}
public void setEmail(List<String> email) {
this.email = email;
}
public List<String> getAddress() {
return address;
}
public void setAddress(List<String> address) {
this.address = address;
}
public List<String> getName() {
return name;
}
public void setName(List<String> name) {
this.name = name;
}
public List<String> getBirthdate() {
return birthdate;
}
public void setBirthdate(List<String> birthdate) {
this.birthdate = birthdate;
}
public List<String> getFamily_name() {
return family_name;
}
public void setFamily_name(List<String> family_name) {
this.family_name = family_name;
}
public List<String> getGender() {
return gender;
}
public void setGender(List<String> gender) {
this.gender = gender;
}
public List<String> getPhone_mobile_number() {
return phone_mobile_number;
}
public void setPhone_mobile_number(List<String> phone_mobile_number) {
this.phone_mobile_number = phone_mobile_number;
}
public List<String> getProfile() {
return profile;
}
public void setProfile(List<String> profile) {
this.profile = profile;
}
@Override
public String toString() {
return "ClassPojo [zoneinfo = " + zoneinfo + ", sub = " + sub + ", phone_number = " + phone_number + ", nickname = " + nickname + ", website = " + website + ", middle_name = " + middle_name + ", email_verified = " + email_verified + ", locale = " + locale + ", phone_number_verified = " + phone_number_verified + ", preferred_username = " + preferred_username + ", given_name = " + given_name + ", picture = " + picture + ", updated_at = " + updated_at + ", inum = " + inum + ", email = " + email + ", address = " + address + ", name = " + name + ", birthdate = " + birthdate + ", family_name = " + family_name + ", gender = " + gender + ", phone_mobile_number = " + phone_mobile_number + ", profile = " + profile + "]";
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.typography.font.sfntly.table;
import com.google.typography.font.sfntly.Tag;
import java.util.Comparator;
/**
* The header entry for a table in the OffsetTable for the font.
*
* For equality purposes the only property of the header that is considered is
* the tag - the name of the table that is referred to by this header. There can
* only be one table with each tag in the font and it doesn't matter what the
* other properties of that header are for that purpose.
*
* @author Stuart Gill
*
*/
public final class Header {
private final int tag;
private final int offset;
private final boolean offsetValid;
private final int length;
private final boolean lengthValid;
private final long checksum;
private final boolean checksumValid;
public static final Comparator<Header> COMPARATOR_BY_OFFSET = new Comparator<Header>() {
@Override
public int compare(Header h1, Header h2) {
int comp = h1.offset - h2.offset;
return comp == 0 ? h1.length - h2.length : comp;
}
};
public static final Comparator<Header> COMPARATOR_BY_TAG = new Comparator<Header>() {
@Override
public int compare(Header h1, Header h2) {
return h1.tag - h2.tag;
}
};
/**
* Constructor.
*
* Make a full header as read from an existing font.
*
* @param tag
* @param offset
* @param length
* @param checksum
*/
public Header(int tag, long checksum, int offset, int length) {
this.tag = tag;
this.checksum = checksum;
this.checksumValid = true;
this.offset = offset;
this.offsetValid = true;
this.length = length;
this.lengthValid = true;
}
/**
* Constructor.
*
* Make a partial header with only the basic info for a new table.
*
* @param tag
* @param length
*/
public Header(int tag, int length) {
this.tag = tag;
this.checksum = 0;
this.checksumValid = false;
this.offset = 0;
this.offsetValid = false;
this.length = length;
this.lengthValid = true;
}
/**
* Constructor.
*
* Make a partial header with only the basic info for an empty new table.
*
* @param tag
*/
public Header(int tag) {
this.tag = tag;
this.checksum = 0;
this.checksumValid = false;
this.offset = 0;
this.offsetValid = false;
this.length = 0;
this.lengthValid = true;
}
/**
* Get the table tag.
*
* @return the tag
*/
public int tag() {
return tag;
}
/**
* Get the table offset. The offset is from the start of the font file. This
* offset value is what was read from the font file during construction of the
* font. It may not be meaningful if the font was maninpulated through the
* builders.
*
* @return the offset
*/
public int offset() {
return offset;
}
/**
* Is the offset in the header valid. The offset will not be valid if the
* table was constructed during building and has no physical location in a
* font file.
*
* @return true if the offset is valid; false otherwise
*/
public boolean offsetValid() {
return offsetValid;
}
/**
* Get the length of the table as recorded in the table record header. During
* building the header length will reflect the length that was initially read
* from the font file. This may not be consistent with the current state of
* the data.
*
* @return the length
*/
public int length() {
return length;
}
/**
* Is the length in the header valid. The length will not be valid if the
* table was constructed during building and has no physical location in a
* font file until the table is built from the builder.
*
* @return true if the offset is valid; false otherwise
*/
public boolean lengthValid() {
return lengthValid;
}
/**
* Get the checksum for the table as recorded in the table record header.
*
* @return the checksum
*/
public long checksum() {
return checksum;
}
/**
* Is the checksum valid. The checksum will not be valid if the table was
* constructed during building and has no physical location in a font file.
* Note that this does <b>not</b> check the validity of the checksum against
* the calculated checksum for the table data.
*
* @return true if the checksum is valid; false otherwise
*/
public boolean checksumValid() {
return checksumValid;
}
/**
* Checks equality of this Header against another object. The only property of
* the Header object that is considered is the tag.
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Header)) {
return false;
}
return ((Header) obj).tag == this.tag;
}
/**
* Computes the hashcode for this Header . The only property of the Header
* object that is considered is the tag.
*/
@Override
public int hashCode() {
return this.tag;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("[");
builder.append(Tag.stringValue(this.tag));
builder.append(", ");
builder.append(Long.toHexString(this.checksum));
builder.append(", ");
builder.append(Integer.toHexString(this.offset));
builder.append(", ");
builder.append(Integer.toHexString(this.length));
builder.append("]");
return builder.toString();
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.06.03 at 10:20:18 AM CEST
//
package com.tibco.xmlns.bw.process._2003;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="description" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="x" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="y" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="width" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="height" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="fontColor" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="color" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="type" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="thickness" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="fade" type="{http://www.w3.org/2001/XMLSchema}string"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"name",
"description",
"x",
"y",
"width",
"height",
"fontColor",
"color",
"type",
"thickness",
"fade"
})
@XmlRootElement(name = "label")
public class Label {
@XmlElement(required = true)
protected String name;
@XmlElement(required = true)
protected String description;
@XmlElement(required = true)
protected String x;
@XmlElement(required = true)
protected String y;
@XmlElement(required = true)
protected String width;
@XmlElement(required = true)
protected String height;
@XmlElement(required = true)
protected String fontColor;
@XmlElement(required = true)
protected String color;
@XmlElement(required = true)
protected String type;
@XmlElement(required = true)
protected String thickness;
@XmlElement(required = true)
protected String fade;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the x property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getX() {
return x;
}
/**
* Sets the value of the x property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setX(String value) {
this.x = value;
}
/**
* Gets the value of the y property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getY() {
return y;
}
/**
* Sets the value of the y property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setY(String value) {
this.y = value;
}
/**
* Gets the value of the width property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getWidth() {
return width;
}
/**
* Sets the value of the width property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setWidth(String value) {
this.width = value;
}
/**
* Gets the value of the height property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHeight() {
return height;
}
/**
* Sets the value of the height property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHeight(String value) {
this.height = value;
}
/**
* Gets the value of the fontColor property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFontColor() {
return fontColor;
}
/**
* Sets the value of the fontColor property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFontColor(String value) {
this.fontColor = value;
}
/**
* Gets the value of the color property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getColor() {
return color;
}
/**
* Sets the value of the color property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setColor(String value) {
this.color = value;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setType(String value) {
this.type = value;
}
/**
* Gets the value of the thickness property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getThickness() {
return thickness;
}
/**
* Sets the value of the thickness property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setThickness(String value) {
this.thickness = value;
}
/**
* Gets the value of the fade property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFade() {
return fade;
}
/**
* Sets the value of the fade property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFade(String value) {
this.fade = value;
}
}
| |
/*
* Written by Doug Lea and Martin Buchholz with assistance from
* members of JCP JSR-166 Expert Group and released to the public
* domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
/*
* Source:
* http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/jsr166e/extra/AtomicDouble.java?revision=1.13
* (Modified to adapt to guava coding conventions and
* to use AtomicLong instead of sun.misc.Unsafe)
*/
package com.google.common.util.concurrent;
import static java.lang.Double.doubleToRawLongBits;
import static java.lang.Double.longBitsToDouble;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.concurrent.atomic.AtomicLong;
/**
* A {@code double} value that may be updated atomically. See the {@link
* java.util.concurrent.atomic} package specification for description of the properties of atomic
* variables. An {@code AtomicDouble} is used in applications such as atomic accumulation, and
* cannot be used as a replacement for a {@link Double}. However, this class does extend {@code
* Number} to allow uniform access by tools and utilities that deal with numerically-based classes.
*
* <p><a id="bitEquals"></a>This class compares primitive {@code double} values in methods such as
* {@link #compareAndSet} by comparing their bitwise representation using {@link
* Double#doubleToRawLongBits}, which differs from both the primitive double {@code ==} operator and
* from {@link Double#equals}, as if implemented by:
*
* <pre>{@code
* static boolean bitEquals(double x, double y) {
* long xBits = Double.doubleToRawLongBits(x);
* long yBits = Double.doubleToRawLongBits(y);
* return xBits == yBits;
* }
* }</pre>
*
* <p>It is possible to write a more scalable updater, at the cost of giving up strict atomicity.
* See for example <a
* href="http://gee.cs.oswego.edu/dl/jsr166/dist/docs/java.base/java/util/concurrent/atomic/DoubleAdder.html">
* DoubleAdder</a>.
*
* @author Doug Lea
* @author Martin Buchholz
* @since 11.0
*/
@ElementTypesAreNonnullByDefault
public class AtomicDouble extends Number implements java.io.Serializable {
private static final long serialVersionUID = 0L;
// We would use AtomicLongFieldUpdater, but it has issues on some Android devices.
private transient AtomicLong value;
/**
* Creates a new {@code AtomicDouble} with the given initial value.
*
* @param initialValue the initial value
*/
public AtomicDouble(double initialValue) {
value = new AtomicLong(doubleToRawLongBits(initialValue));
}
/** Creates a new {@code AtomicDouble} with initial value {@code 0.0}. */
public AtomicDouble() {
this(0.0);
}
/**
* Gets the current value.
*
* @return the current value
*/
public final double get() {
return longBitsToDouble(value.get());
}
/**
* Sets to the given value.
*
* @param newValue the new value
*/
public final void set(double newValue) {
long next = doubleToRawLongBits(newValue);
value.set(next);
}
/**
* Eventually sets to the given value.
*
* @param newValue the new value
*/
public final void lazySet(double newValue) {
long next = doubleToRawLongBits(newValue);
value.lazySet(next);
}
/**
* Atomically sets to the given value and returns the old value.
*
* @param newValue the new value
* @return the previous value
*/
public final double getAndSet(double newValue) {
long next = doubleToRawLongBits(newValue);
return longBitsToDouble(value.getAndSet(next));
}
/**
* Atomically sets the value to the given updated value if the current value is <a
* href="#bitEquals">bitwise equal</a> to the expected value.
*
* @param expect the expected value
* @param update the new value
* @return {@code true} if successful. False return indicates that the actual value was not
* bitwise equal to the expected value.
*/
public final boolean compareAndSet(double expect, double update) {
return value.compareAndSet(doubleToRawLongBits(expect), doubleToRawLongBits(update));
}
/**
* Atomically sets the value to the given updated value if the current value is <a
* href="#bitEquals">bitwise equal</a> to the expected value.
*
* <p>May <a
* href="http://download.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/package-summary.html#Spurious">
* fail spuriously</a> and does not provide ordering guarantees, so is only rarely an appropriate
* alternative to {@code compareAndSet}.
*
* @param expect the expected value
* @param update the new value
* @return {@code true} if successful
*/
public final boolean weakCompareAndSet(double expect, double update) {
return value.weakCompareAndSet(doubleToRawLongBits(expect), doubleToRawLongBits(update));
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the previous value
*/
@CanIgnoreReturnValue
public final double getAndAdd(double delta) {
while (true) {
long current = value.get();
double currentVal = longBitsToDouble(current);
double nextVal = currentVal + delta;
long next = doubleToRawLongBits(nextVal);
if (value.compareAndSet(current, next)) {
return currentVal;
}
}
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the updated value
*/
@CanIgnoreReturnValue
public final double addAndGet(double delta) {
while (true) {
long current = value.get();
double currentVal = longBitsToDouble(current);
double nextVal = currentVal + delta;
long next = doubleToRawLongBits(nextVal);
if (value.compareAndSet(current, next)) {
return nextVal;
}
}
}
/**
* Returns the String representation of the current value.
*
* @return the String representation of the current value
*/
@Override
public String toString() {
return Double.toString(get());
}
/**
* Returns the value of this {@code AtomicDouble} as an {@code int} after a narrowing primitive
* conversion.
*/
@Override
public int intValue() {
return (int) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code long} after a narrowing primitive
* conversion.
*/
@Override
public long longValue() {
return (long) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code float} after a narrowing primitive
* conversion.
*/
@Override
public float floatValue() {
return (float) get();
}
/** Returns the value of this {@code AtomicDouble} as a {@code double}. */
@Override
public double doubleValue() {
return get();
}
/**
* Saves the state to a stream (that is, serializes it).
*
* @serialData The current value is emitted (a {@code double}).
*/
private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
s.defaultWriteObject();
s.writeDouble(get());
}
/** Reconstitutes the instance from a stream (that is, deserializes it). */
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
value = new AtomicLong();
set(s.readDouble());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteRequestBuilder;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainRequestBuilder;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequestBuilder;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.ClearScrollRequestBuilder;
import org.elasticsearch.action.search.ClearScrollResponse;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchRequestBuilder;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.search.SearchScrollRequestBuilder;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsRequestBuilder;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import java.util.Map;
/**
* A client provides a one stop interface for performing actions/operations against the cluster.
* <p>
* All operations performed are asynchronous by nature. Each action/operation has two flavors, the first
* simply returns an {@link org.elasticsearch.action.ActionFuture}, while the second accepts an
* {@link org.elasticsearch.action.ActionListener}.
* <p>
* A client can be retrieved from a started {@link org.elasticsearch.node.Node}.
*
* @see org.elasticsearch.node.Node#client()
*/
public interface Client extends ElasticsearchClient, Releasable {
Setting<String> CLIENT_TYPE_SETTING_S = new Setting<>("client.type", "node", (s) -> {
switch (s) {
case "node":
case "transport":
return s;
default:
throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]");
}
}, Property.NodeScope);
/**
* The admin client that can be used to perform administrative operations.
*/
AdminClient admin();
/**
* Index a JSON source associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*
* @param request The index request
* @return The result future
* @see Requests#indexRequest(String)
*/
ActionFuture<IndexResponse> index(IndexRequest request);
/**
* Index a document associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*
* @param request The index request
* @param listener A listener to be notified with a result
* @see Requests#indexRequest(String)
*/
void index(IndexRequest request, ActionListener<IndexResponse> listener);
/**
* Index a document associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*/
IndexRequestBuilder prepareIndex();
/**
* Index a document associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*
* @param index The index to index the document to
*/
IndexRequestBuilder prepareIndex(String index);
/**
* Updates a document based on a script.
*
* @param request The update request
* @return The result future
*/
ActionFuture<UpdateResponse> update(UpdateRequest request);
/**
* Updates a document based on a script.
*
* @param request The update request
* @param listener A listener to be notified with a result
*/
void update(UpdateRequest request, ActionListener<UpdateResponse> listener);
/**
* Updates a document based on a script.
*/
UpdateRequestBuilder prepareUpdate();
/**
* Updates a document based on a script.
*/
UpdateRequestBuilder prepareUpdate(String index, String id);
/**
* Deletes a document from the index based on the index and id.
*
* @param request The delete request
* @return The result future
* @see Requests#deleteRequest(String)
*/
ActionFuture<DeleteResponse> delete(DeleteRequest request);
/**
* Deletes a document from the index based on the index and id.
*
* @param request The delete request
* @param listener A listener to be notified with a result
* @see Requests#deleteRequest(String)
*/
void delete(DeleteRequest request, ActionListener<DeleteResponse> listener);
/**
* Deletes a document from the index based on the index and id.
*/
DeleteRequestBuilder prepareDelete();
/**
* Deletes a document from the index based on the index and id.
*
* @param index The index to delete the document from
* @param id The id of the document to delete
*/
DeleteRequestBuilder prepareDelete(String index, String id);
/**
* Executes a bulk of index / delete operations.
*
* @param request The bulk request
* @return The result future
* @see org.elasticsearch.client.Requests#bulkRequest()
*/
ActionFuture<BulkResponse> bulk(BulkRequest request);
/**
* Executes a bulk of index / delete operations.
*
* @param request The bulk request
* @param listener A listener to be notified with a result
* @see org.elasticsearch.client.Requests#bulkRequest()
*/
void bulk(BulkRequest request, ActionListener<BulkResponse> listener);
/**
* Executes a bulk of index / delete operations.
*/
BulkRequestBuilder prepareBulk();
/**
* Executes a bulk of index / delete operations with default index
*/
BulkRequestBuilder prepareBulk(@Nullable String globalIndex);
/**
* Gets the document that was indexed from an index with an id.
*
* @param request The get request
* @return The result future
* @see Requests#getRequest(String)
*/
ActionFuture<GetResponse> get(GetRequest request);
/**
* Gets the document that was indexed from an index with an id.
*
* @param request The get request
* @param listener A listener to be notified with a result
* @see Requests#getRequest(String)
*/
void get(GetRequest request, ActionListener<GetResponse> listener);
/**
* Gets the document that was indexed from an index with an id.
*/
GetRequestBuilder prepareGet();
/**
* Gets the document that was indexed from an index with an id.
*/
GetRequestBuilder prepareGet(String index, String id);
/**
* Multi get documents.
*/
ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request);
/**
* Multi get documents.
*/
void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener);
/**
* Multi get documents.
*/
MultiGetRequestBuilder prepareMultiGet();
/**
* Search across one or more indices with a query.
*
* @param request The search request
* @return The result future
* @see Requests#searchRequest(String...)
*/
ActionFuture<SearchResponse> search(SearchRequest request);
/**
* Search across one or more indices with a query.
*
* @param request The search request
* @param listener A listener to be notified of the result
* @see Requests#searchRequest(String...)
*/
void search(SearchRequest request, ActionListener<SearchResponse> listener);
/**
* Search across one or more indices with a query.
*/
SearchRequestBuilder prepareSearch(String... indices);
/**
* A search scroll request to continue searching a previous scrollable search request.
*
* @param request The search scroll request
* @return The result future
* @see Requests#searchScrollRequest(String)
*/
ActionFuture<SearchResponse> searchScroll(SearchScrollRequest request);
/**
* A search scroll request to continue searching a previous scrollable search request.
*
* @param request The search scroll request
* @param listener A listener to be notified of the result
* @see Requests#searchScrollRequest(String)
*/
void searchScroll(SearchScrollRequest request, ActionListener<SearchResponse> listener);
/**
* A search scroll request to continue searching a previous scrollable search request.
*/
SearchScrollRequestBuilder prepareSearchScroll(String scrollId);
/**
* Performs multiple search requests.
*/
ActionFuture<MultiSearchResponse> multiSearch(MultiSearchRequest request);
/**
* Performs multiple search requests.
*/
void multiSearch(MultiSearchRequest request, ActionListener<MultiSearchResponse> listener);
/**
* Performs multiple search requests.
*/
MultiSearchRequestBuilder prepareMultiSearch();
/**
* An action that returns the term vectors for a specific document.
*
* @param request The term vector request
* @return The response future
*/
ActionFuture<TermVectorsResponse> termVectors(TermVectorsRequest request);
/**
* An action that returns the term vectors for a specific document.
*
* @param request The term vector request
*/
void termVectors(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener);
/**
* Builder for the term vector request.
*/
TermVectorsRequestBuilder prepareTermVectors();
/**
* Builder for the term vector request.
*
* @param index The index to load the document from
* @param id The id of the document
*/
TermVectorsRequestBuilder prepareTermVectors(String index, String id);
/**
* Multi get term vectors.
*/
ActionFuture<MultiTermVectorsResponse> multiTermVectors(MultiTermVectorsRequest request);
/**
* Multi get term vectors.
*/
void multiTermVectors(MultiTermVectorsRequest request, ActionListener<MultiTermVectorsResponse> listener);
/**
* Multi get term vectors.
*/
MultiTermVectorsRequestBuilder prepareMultiTermVectors();
/**
* Computes a score explanation for the specified request.
*
* @param index The index this explain is targeted for
* @param id The document identifier this explain is targeted for
*/
ExplainRequestBuilder prepareExplain(String index, String id);
/**
* Computes a score explanation for the specified request.
*
* @param request The request encapsulating the query and document identifier to compute a score explanation for
*/
ActionFuture<ExplainResponse> explain(ExplainRequest request);
/**
* Computes a score explanation for the specified request.
*
* @param request The request encapsulating the query and document identifier to compute a score explanation for
* @param listener A listener to be notified of the result
*/
void explain(ExplainRequest request, ActionListener<ExplainResponse> listener);
/**
* Clears the search contexts associated with specified scroll ids.
*/
ClearScrollRequestBuilder prepareClearScroll();
/**
* Clears the search contexts associated with specified scroll ids.
*/
ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request);
/**
* Clears the search contexts associated with specified scroll ids.
*/
void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener);
/**
* Builder for the field capabilities request.
*/
FieldCapabilitiesRequestBuilder prepareFieldCaps(String... indices);
/**
* An action that returns the field capabilities from the provided request
*/
ActionFuture<FieldCapabilitiesResponse> fieldCaps(FieldCapabilitiesRequest request);
/**
* An action that returns the field capabilities from the provided request
*/
void fieldCaps(FieldCapabilitiesRequest request, ActionListener<FieldCapabilitiesResponse> listener);
/**
* Returns this clients settings
*/
Settings settings();
/**
* Returns a new lightweight Client that applies all given headers to each of the requests
* issued from it.
*/
Client filterWithHeader(Map<String, String> headers);
/**
* Returns a client to a remote cluster with the given cluster alias.
*
* @throws IllegalArgumentException if the given clusterAlias doesn't exist
* @throws UnsupportedOperationException if this functionality is not available on this client.
*/
default Client getRemoteClusterClient(String clusterAlias) {
throw new UnsupportedOperationException("this client doesn't support remote cluster connections");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.InvalidOffsetException;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.LockException;
import org.apache.kafka.streams.errors.ShutdownException;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.processor.StateRestoreListener;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.streams.state.internals.ThreadCache;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static org.apache.kafka.streams.processor.internals.ConsumerUtils.poll;
import static org.apache.kafka.streams.processor.internals.GlobalStreamThread.State.DEAD;
import static org.apache.kafka.streams.processor.internals.GlobalStreamThread.State.PENDING_SHUTDOWN;
/**
* This is the thread responsible for keeping all Global State Stores updated.
* It delegates most of the responsibility to the internal class StateConsumer
*/
public class GlobalStreamThread extends Thread {
private final Logger log;
private final LogContext logContext;
private final StreamsConfig config;
private final Consumer<byte[], byte[]> globalConsumer;
private final StateDirectory stateDirectory;
private final Time time;
private final ThreadCache cache;
private final StreamsMetricsImpl streamsMetrics;
private final ProcessorTopology topology;
private volatile StreamsException startupException;
/**
* The states that the global stream thread can be in
*
* <pre>
* +-------------+
* +<--- | Created (0) |
* | +-----+-------+
* | |
* | v
* | +-----+-------+
* +<--- | Running (1) |
* | +-----+-------+
* | |
* | v
* | +-----+-------+
* +---> | Pending |
* | Shutdown (2)|
* +-----+-------+
* |
* v
* +-----+-------+
* | Dead (3) |
* +-------------+
* </pre>
*
* Note the following:
* <ul>
* <li>Any state can go to PENDING_SHUTDOWN. That is because streams can be closed at any time.</li>
* <li>State PENDING_SHUTDOWN may want to transit itself. In this case we will forbid the transition but will not treat as an error.</li>
* </ul>
*/
public enum State implements ThreadStateTransitionValidator {
CREATED(1, 2), RUNNING(2), PENDING_SHUTDOWN(3), DEAD;
private final Set<Integer> validTransitions = new HashSet<>();
State(final Integer... validTransitions) {
this.validTransitions.addAll(Arrays.asList(validTransitions));
}
public boolean isRunning() {
return equals(RUNNING);
}
public boolean isStarting() {
return equals(CREATED);
}
@Override
public boolean isValidTransition(final ThreadStateTransitionValidator newState) {
final State tmpState = (State) newState;
return validTransitions.contains(tmpState.ordinal());
}
}
private volatile State state = State.CREATED;
private final Object stateLock = new Object();
private StreamThread.StateListener stateListener = null;
private final String logPrefix;
private final StateRestoreListener stateRestoreListener;
/**
* Set the {@link StreamThread.StateListener} to be notified when state changes. Note this API is internal to
* Kafka Streams and is not intended to be used by an external application.
*/
public void setStateListener(final StreamThread.StateListener listener) {
stateListener = listener;
}
/**
* @return The state this instance is in
*/
public State state() {
// we do not need to use the stat lock since the variable is volatile
return state;
}
/**
* Sets the state
*
* @param newState New state
*/
private void setState(final State newState) {
final State oldState = state;
synchronized (stateLock) {
if (state == State.PENDING_SHUTDOWN && newState == State.PENDING_SHUTDOWN) {
// when the state is already in PENDING_SHUTDOWN, its transition to itself
// will be refused but we do not throw exception here
return;
} else if (state == State.DEAD) {
// when the state is already in NOT_RUNNING, all its transitions
// will be refused but we do not throw exception here
return;
} else if (!state.isValidTransition(newState)) {
log.error("Unexpected state transition from {} to {}", oldState, newState);
throw new StreamsException(logPrefix + "Unexpected state transition from " + oldState + " to " + newState);
} else {
log.info("State transition from {} to {}", oldState, newState);
}
state = newState;
}
if (stateListener != null) {
stateListener.onChange(this, state, oldState);
}
}
public boolean stillRunning() {
synchronized (stateLock) {
return state.isRunning();
}
}
private boolean stillStarting() {
synchronized (stateLock) {
return state.isStarting();
}
}
public GlobalStreamThread(final ProcessorTopology topology,
final StreamsConfig config,
final Consumer<byte[], byte[]> globalConsumer,
final StateDirectory stateDirectory,
final long cacheSizeBytes,
final Metrics metrics,
final Time time,
final String threadClientId,
final StateRestoreListener stateRestoreListener) {
super(threadClientId);
this.time = time;
this.config = config;
this.topology = topology;
this.globalConsumer = globalConsumer;
this.stateDirectory = stateDirectory;
this.streamsMetrics = new StreamsMetricsImpl(metrics, threadClientId);
this.logPrefix = String.format("global-stream-thread [%s] ", threadClientId);
this.logContext = new LogContext(logPrefix);
this.log = logContext.logger(getClass());
this.cache = new ThreadCache(logContext, cacheSizeBytes, streamsMetrics);
this.stateRestoreListener = stateRestoreListener;
}
static class StateConsumer {
private final Consumer<byte[], byte[]> globalConsumer;
private final GlobalStateMaintainer stateMaintainer;
private final Time time;
private final long pollMs;
private final long flushInterval;
private final Logger log;
private long lastFlush;
StateConsumer(final LogContext logContext,
final Consumer<byte[], byte[]> globalConsumer,
final GlobalStateMaintainer stateMaintainer,
final Time time,
final long pollMs,
final long flushInterval) {
this.log = logContext.logger(getClass());
this.globalConsumer = globalConsumer;
this.stateMaintainer = stateMaintainer;
this.time = time;
this.pollMs = pollMs;
this.flushInterval = flushInterval;
}
/**
* @throws IllegalStateException If store gets registered after initialized is already finished
* @throws StreamsException if the store's change log does not contain the partition
*/
void initialize() {
final Map<TopicPartition, Long> partitionOffsets = stateMaintainer.initialize();
globalConsumer.assign(partitionOffsets.keySet());
for (final Map.Entry<TopicPartition, Long> entry : partitionOffsets.entrySet()) {
globalConsumer.seek(entry.getKey(), entry.getValue());
}
lastFlush = time.milliseconds();
}
void pollAndUpdate() {
try {
final ConsumerRecords<byte[], byte[]> received = poll(globalConsumer, pollMs);
for (final ConsumerRecord<byte[], byte[]> record : received) {
stateMaintainer.update(record);
}
final long now = time.milliseconds();
if (flushInterval >= 0 && now >= lastFlush + flushInterval) {
stateMaintainer.flushState();
lastFlush = now;
}
} catch (final InvalidOffsetException recoverableException) {
log.error("Updating global state failed. You can restart KafkaStreams to recover from this error.", recoverableException);
throw new StreamsException("Updating global state failed. " +
"You can restart KafkaStreams to recover from this error.", recoverableException);
}
}
public void close() throws IOException {
try {
globalConsumer.close();
} catch (final RuntimeException e) {
// just log an error if the consumer throws an exception during close
// so we can always attempt to close the state stores.
log.error("Failed to close consumer due to the following error:", e);
}
stateMaintainer.close();
}
}
@Override
public void run() {
final StateConsumer stateConsumer;
try {
stateConsumer = initialize();
} catch (final ShutdownException e) {
log.info("Shutting down from initialization");
// Almost certainly, we arrived here because the state is already PENDING_SHUTDOWN, but it's harmless to
// just make sure
setState(State.PENDING_SHUTDOWN);
setState(State.DEAD);
streamsMetrics.removeAllThreadLevelSensors();
log.info("Shutdown complete");
return;
}
if (stateConsumer == null) {
// during initialization, the caller thread would wait for the state consumer
// to restore the global state store before transiting to RUNNING state and return;
// if an error happens during the restoration process, the stateConsumer will be null
// and in this case we will transit the state to PENDING_SHUTDOWN and DEAD immediately.
// the exception will be thrown in the caller thread during start() function.
setState(State.PENDING_SHUTDOWN);
setState(State.DEAD);
log.warn("Error happened during initialization of the global state store; this thread has shutdown");
streamsMetrics.removeAllThreadLevelSensors();
return;
}
setState(State.RUNNING);
try {
while (stillRunning()) {
stateConsumer.pollAndUpdate();
}
} finally {
// set the state to pending shutdown first as it may be called due to error;
// its state may already be PENDING_SHUTDOWN so it will return false but we
// intentionally do not check the returned flag
setState(State.PENDING_SHUTDOWN);
log.info("Shutting down");
try {
stateConsumer.close();
} catch (final IOException e) {
log.error("Failed to close state maintainer due to the following error:", e);
}
streamsMetrics.removeAllThreadLevelSensors();
setState(DEAD);
log.info("Shutdown complete");
}
}
private StateConsumer initialize() {
try {
final GlobalStateManager stateMgr = new GlobalStateManagerImpl(
logContext,
topology,
globalConsumer,
stateDirectory,
stateRestoreListener,
config,
new GlobalStateManagerImpl.IsRunning() {
@Override
public boolean check() {
return stillStarting() || stillRunning();
}
}
);
final GlobalProcessorContextImpl globalProcessorContext = new GlobalProcessorContextImpl(
config,
stateMgr,
streamsMetrics,
cache);
stateMgr.setGlobalProcessorContext(globalProcessorContext);
final StateConsumer stateConsumer = new StateConsumer(
logContext,
globalConsumer,
new GlobalStateUpdateTask(
topology,
globalProcessorContext,
stateMgr,
config.defaultDeserializationExceptionHandler(),
logContext
),
time,
config.getLong(StreamsConfig.POLL_MS_CONFIG),
config.getLong(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG));
stateConsumer.initialize();
return stateConsumer;
} catch (final LockException fatalException) {
final String errorMsg = "Could not lock global state directory. This could happen if multiple KafkaStreams " +
"instances are running on the same host using the same state directory.";
log.error(errorMsg, fatalException);
startupException = new StreamsException(errorMsg, fatalException);
} catch (final StreamsException fatalException) {
startupException = fatalException;
} catch (final Exception fatalException) {
startupException = new StreamsException("Exception caught during initialization of GlobalStreamThread", fatalException);
}
return null;
}
@Override
public synchronized void start() {
super.start();
while (!stillRunning()) {
Utils.sleep(1);
if (startupException != null) {
throw startupException;
}
}
}
public void shutdown() {
// one could call shutdown() multiple times, so ignore subsequent calls
// if already shutting down or dead
setState(PENDING_SHUTDOWN);
globalConsumer.wakeup();
}
public Map<MetricName, Metric> consumerMetrics() {
return Collections.unmodifiableMap(globalConsumer.metrics());
}
}
| |
package ru.dega;
import ru.dega.models.User;
import ru.dega.models.UserRole;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
/**
* DBManager class.
*
* @author Denis
* @since 11.08.2017
*/
@SuppressWarnings({"Duplicates", "SqlNoDataSourceInspection", "SqlDialectInspection"})
public class DBManager {
/**
* Data source.
*/
private static final DataSource DATA_SOURCE;
/**
* Instance.
*/
public static final DBManager INSTANCE = new DBManager();
/**
* Singleton constructor.
*/
public DBManager() {
}
/**
* Get instance.
*
* @return instance
*/
public static DBManager getInstance() {
return INSTANCE;
}
static {
try {
InitialContext initContext = new InitialContext();
DATA_SOURCE = (DataSource) initContext.lookup(
"java:comp/env/jdbc/mvc");
} catch (NamingException e) {
e.printStackTrace();
throw new ExceptionInInitializerError("dataSource not initialized");
}
}
/**
* Get all entries in database.
*
* @return list of entries
*/
public List<User> getAllEntries() {
List<User> result = new ArrayList<>();
try (Connection con = DATA_SOURCE.getConnection(); Statement statement = con.createStatement()) {
try (ResultSet rs = statement.executeQuery("SELECT * FROM USERS")) {
while (rs.next()) {
result.add(new User(rs.getString("LOGIN"),
rs.getString("PASSWORD"),
rs.getString("NAME"),
rs.getString("EMAIL"),
rs.getTimestamp("CREATEDATE").toLocalDateTime(),
rs.getString("COUNTRY"),
rs.getString("TOWN"),
UserRole.valueOf(rs.getString("ROLE"))));
}
}
} catch (SQLException e) {
e.printStackTrace();
}
return result;
}
/**
* Add entry in database.
*
* @param user user
* @return true if added successfully
*/
public boolean addEntry(User user) {
boolean result = true;
String sql = "INSERT INTO USERS (LOGIN, PASSWORD, NAME, EMAIL, CREATEDATE, COUNTRY, TOWN, ROLE) "
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?);";
try (Connection con = DATA_SOURCE.getConnection();
PreparedStatement preparedStatement = preparedStatement(con, sql)) {
preparedStatement.setString(1, user.getLogin());
preparedStatement.setString(2, user.getPassword());
preparedStatement.setString(3, user.getName());
preparedStatement.setString(4, user.getEmail());
preparedStatement.setTimestamp(5,
Timestamp.valueOf(user.getCreateDate()));
preparedStatement.setString(6, user.getCountry());
preparedStatement.setString(7, user.getTown());
preparedStatement.setString(8, user.getRole().toString());
preparedStatement.executeUpdate();
} catch (SQLException e) {
result = false;
e.printStackTrace();
}
return result;
}
/**
* Delete entry in database.
*
* @param login login
* @return list of entries
*/
public boolean deleteEntry(String login) {
boolean result = true;
String sql = "DELETE FROM USERS WHERE login=?;";
try (Connection con = DATA_SOURCE.getConnection();
PreparedStatement preparedStatement = preparedStatement(con, sql)) {
preparedStatement.setString(1, login);
preparedStatement.executeUpdate();
} catch (SQLException e) {
result = false;
e.printStackTrace();
}
return result;
}
/**
* Edit entry in database.
*
* @param login login
* @param name new name
* @param email new email
* @param country country
* @param town town
* @return true if edit successfully
*/
public boolean editEntry(String login, String name, String email, String country, String town) {
boolean result = false;
String sql = "UPDATE USERS SET NAME = ?, EMAIL = ?, COUNTRY = ?, TOWN = ? WHERE LOGIN=?;";
try (Connection con = DATA_SOURCE.getConnection();
PreparedStatement preparedStatement = preparedStatement(con, sql)) {
preparedStatement.setString(1, name);
preparedStatement.setString(2, email);
preparedStatement.setString(3, country);
preparedStatement.setString(4, town);
preparedStatement.setString(5, login);
if (preparedStatement.executeUpdate() != 0) {
result = true;
}
} catch (SQLException e) {
e.printStackTrace();
}
return result;
}
/**
* Get entry form database.
*
* @param login login
* @return true if edit successfully
*/
public User getEntry(String login) {
User result = null;
String sql = "SELECT * FROM USERS WHERE LOGIN=?;";
try (Connection con = DATA_SOURCE.getConnection();
PreparedStatement preparedStatement = preparedStatement(con, sql)) {
preparedStatement.setString(1, login);
try (ResultSet rs = preparedStatement.executeQuery()) {
if (rs.next()) {
result = new User(rs.getString("LOGIN"),
rs.getString("PASSWORD"),
rs.getString("NAME"),
rs.getString("EMAIL"),
null,
rs.getString("COUNTRY"),
rs.getString("TOWN"),
UserRole.valueOf(rs.getString("ROLE")));
}
}
} catch (SQLException e) {
e.printStackTrace();
}
return result;
}
/**
* Check login and password.
*
* @param login login
* @param password password
* @return true if credential
*/
public boolean isCredential(String login, String password) {
boolean exist = false;
for (User user : getAllEntries()) {
if (user.getLogin().equals(login) && user.getPassword().equals(password)) {
exist = true;
break;
}
}
return exist;
}
/**
* Precompile SQL statement.
*
* @param connection connection
* @param sql sql string
* @return prepared statement
*/
private PreparedStatement preparedStatement(Connection connection, String sql) {
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
} catch (SQLException e) {
e.printStackTrace();
}
return preparedStatement;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.resourcegroups;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.DataSize;
import io.prestosql.spi.memory.MemoryPoolInfo;
import io.prestosql.spi.resourcegroups.ResourceGroup;
import io.prestosql.spi.resourcegroups.ResourceGroupId;
import io.prestosql.spi.resourcegroups.SelectionContext;
import io.prestosql.spi.resourcegroups.SelectionCriteria;
import io.prestosql.spi.session.ResourceEstimates;
import org.testng.annotations.Test;
import java.time.Duration;
import java.util.Optional;
import java.util.regex.Pattern;
import static com.google.common.io.Resources.getResource;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static io.prestosql.memory.LocalMemoryManager.GENERAL_POOL;
import static io.prestosql.plugin.resourcegroups.TestingResourceGroups.groupIdTemplate;
import static io.prestosql.plugin.resourcegroups.TestingResourceGroups.managerSpec;
import static io.prestosql.plugin.resourcegroups.TestingResourceGroups.resourceGroupSpec;
import static io.prestosql.plugin.resourcegroups.TestingResourceGroups.selectorSpec;
import static io.prestosql.spi.resourcegroups.SchedulingPolicy.WEIGHTED;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
public class TestFileResourceGroupConfigurationManager
{
private static final ResourceEstimates EMPTY_RESOURCE_ESTIMATES = new ResourceEstimates(Optional.empty(), Optional.empty(), Optional.empty());
@Test
public void testInvalid()
{
assertFails("resource_groups_config_bad_root.json", "Duplicated root group: global");
assertFails("resource_groups_config_bad_sub_group.json", "Duplicated sub group: sub");
assertFails("resource_groups_config_bad_group_id.json", "Invalid resource group name. 'glo.bal' contains a '.'");
assertFails("resource_groups_config_bad_weighted_scheduling_policy.json", "Must specify scheduling weight for all sub-groups of 'requests' or none of them");
assertFails("resource_groups_config_unused_field.json", "Unknown property at line 8:6: maxFoo");
assertFails("resource_groups_config_bad_query_priority_scheduling_policy.json",
"Must use 'weighted' or 'weighted_fair' scheduling policy if specifying scheduling weight for 'requests'");
assertFails("resource_groups_config_bad_extract_variable.json", "Invalid resource group name.*");
assertFails("resource_groups_config_bad_query_type.json", "Selector specifies an invalid query type: invalid_query_type");
assertFails("resource_groups_config_bad_selector.json", "Selector refers to nonexistent group: a.b.c.X");
}
@Test
public void testQueryTypeConfiguration()
{
FileResourceGroupConfigurationManager manager = parse("resource_groups_config_query_type.json");
assertMatch(manager, queryTypeSelectionCriteria("select"), "global.select");
assertMatch(manager, queryTypeSelectionCriteria("explain"), "global.explain");
assertMatch(manager, queryTypeSelectionCriteria("insert"), "global.insert");
assertMatch(manager, queryTypeSelectionCriteria("delete"), "global.delete");
assertMatch(manager, queryTypeSelectionCriteria("describe"), "global.describe");
assertMatch(manager, queryTypeSelectionCriteria("data_definition"), "global.data_definition");
assertMatch(manager, queryTypeSelectionCriteria("sth_else"), "global.other");
}
@Test
public void testMatchByUserGroups()
{
ManagerSpec managerSpec = managerSpec(
resourceGroupSpec("group"),
ImmutableList.of(selectorSpec(groupIdTemplate("group"))
.userGroups("first matching", "second matching")));
FileResourceGroupConfigurationManager groupManager = new FileResourceGroupConfigurationManager((poolId, listener) -> {}, managerSpec);
assertThat(groupManager.match(userGroupsSelectionCriteria("not matching"))).isEmpty();
assertThat(groupManager.match(userGroupsSelectionCriteria("first matching")))
.map(SelectionContext::getContext)
.isEqualTo(Optional.of(groupIdTemplate("group")));
}
@Test
public void testMatchByUsers()
{
ManagerSpec managerSpec = managerSpec(
resourceGroupSpec("group"),
ImmutableList.of(selectorSpec(groupIdTemplate("group"))
.users("First matching user", "Second matching user")));
FileResourceGroupConfigurationManager groupManager = new FileResourceGroupConfigurationManager((poolId, listener) -> {}, managerSpec);
assertThat(groupManager.match(userSelectionCriteria("Not matching user"))).isEmpty();
assertThat(groupManager.match(userSelectionCriteria("First matching user")))
.map(SelectionContext::getContext)
.isEqualTo(Optional.of(groupIdTemplate("group")));
}
@Test
public void testMatchByUsersAndGroups()
{
ManagerSpec managerSpec = managerSpec(
resourceGroupSpec("group"),
ImmutableList.of(selectorSpec(groupIdTemplate("group"))
.userGroups("Matching group")
.users("Matching user")));
FileResourceGroupConfigurationManager groupManager = new FileResourceGroupConfigurationManager((poolId, listener) -> {}, managerSpec);
assertThat(groupManager.match(userAndUserGroupsSelectionCriteria("Matching user", "Not matching group"))).isEmpty();
assertThat(groupManager.match(userAndUserGroupsSelectionCriteria("Not matching user", "Matching group"))).isEmpty();
assertThat(groupManager.match(userAndUserGroupsSelectionCriteria("Matching user", "Matching group")))
.map(SelectionContext::getContext)
.isEqualTo(Optional.of(groupIdTemplate("group")));
}
@Test
public void testUserGroupsConfiguration()
{
ManagerSpec spec = parseManagerSpec("resource_groups_config_user_groups.json");
assertThat(spec.getSelectors()
.stream()
.map(SelectorSpec::getUserGroupRegex)
.map(pattern -> pattern.map(Pattern::pattern)))
.containsOnly(Optional.of("groupA"));
}
@Test
public void testConfiguration()
{
FileResourceGroupConfigurationManager manager = parse("resource_groups_config.json");
ResourceGroupId globalId = new ResourceGroupId("global");
ResourceGroup global = new TestingResourceGroup(globalId);
manager.configure(global, new SelectionContext<>(globalId, new ResourceGroupIdTemplate("global")));
assertEquals(global.getSoftMemoryLimitBytes(), DataSize.of(1, MEGABYTE).toBytes());
assertEquals(global.getSoftCpuLimit(), Duration.ofHours(1));
assertEquals(global.getHardCpuLimit(), Duration.ofDays(1));
assertEquals(global.getCpuQuotaGenerationMillisPerSecond(), 1000 * 24);
assertEquals(global.getMaxQueuedQueries(), 1000);
assertEquals(global.getHardConcurrencyLimit(), 100);
assertEquals(global.getSchedulingPolicy(), WEIGHTED);
assertEquals(global.getSchedulingWeight(), 0);
assertTrue(global.getJmxExport());
ResourceGroupId subId = new ResourceGroupId(globalId, "sub");
ResourceGroup sub = new TestingResourceGroup(subId);
manager.configure(sub, new SelectionContext<>(subId, new ResourceGroupIdTemplate("global.sub")));
assertEquals(sub.getSoftMemoryLimitBytes(), DataSize.of(2, MEGABYTE).toBytes());
assertEquals(sub.getHardConcurrencyLimit(), 3);
assertEquals(sub.getMaxQueuedQueries(), 4);
assertNull(sub.getSchedulingPolicy());
assertEquals(sub.getSchedulingWeight(), 5);
assertFalse(sub.getJmxExport());
}
@Test
public void testExtractVariableConfiguration()
{
FileResourceGroupConfigurationManager manager = parse("resource_groups_config_extract_variable.json");
SelectionContext<ResourceGroupIdTemplate> selectionContext = match(manager, userAndSourceSelectionCriteria("someuser@presto.io", "scheduler.us_east.12"));
assertEquals(selectionContext.getResourceGroupId().toString(), "global.presto:us_east:12");
TestingResourceGroup resourceGroup = new TestingResourceGroup(selectionContext.getResourceGroupId());
manager.configure(resourceGroup, selectionContext);
assertEquals(resourceGroup.getHardConcurrencyLimit(), 3);
selectionContext = match(manager, userAndSourceSelectionCriteria("nobody", "rg-abcdefghijkl"));
assertEquals(selectionContext.getResourceGroupId().toString(), "global.abcdefghijkl");
resourceGroup = new TestingResourceGroup(selectionContext.getResourceGroupId());
manager.configure(resourceGroup, selectionContext);
assertEquals(resourceGroup.getHardConcurrencyLimit(), 115);
}
@Test
public void testDocsExample()
{
long generalPoolSize = 31415926535900L; // arbitrary uneven value for testing
FileResourceGroupConfigurationManager manager = new FileResourceGroupConfigurationManager(
(poolId, listener) -> {
if (poolId.equals(GENERAL_POOL)) {
listener.accept(new MemoryPoolInfo(generalPoolSize, 0, 0, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of()));
}
},
new FileResourceGroupConfig()
// TODO: figure out a better way to validate documentation
.setConfigFile("../presto-docs/src/main/sphinx/admin/resource-groups-example.json"));
SelectionContext<ResourceGroupIdTemplate> selectionContext = match(manager, new SelectionCriteria(
true,
"Alice",
ImmutableSet.of(),
Optional.of("jdbc#powerfulbi"),
ImmutableSet.of("hipri"),
EMPTY_RESOURCE_ESTIMATES,
Optional.of("select")));
assertEquals(selectionContext.getResourceGroupId().toString(), "global.adhoc.bi-powerfulbi.Alice");
TestingResourceGroup resourceGroup = new TestingResourceGroup(selectionContext.getResourceGroupId());
manager.configure(resourceGroup, selectionContext);
assertEquals(resourceGroup.getHardConcurrencyLimit(), 3);
assertEquals(resourceGroup.getMaxQueuedQueries(), 10);
assertEquals(resourceGroup.getSoftMemoryLimitBytes(), generalPoolSize / 10);
}
@Test
public void testLegacyConfiguration()
{
FileResourceGroupConfigurationManager manager = parse("resource_groups_config_legacy.json");
ResourceGroupId globalId = new ResourceGroupId("global");
ResourceGroup global = new TestingResourceGroup(globalId);
manager.configure(global, new SelectionContext<>(globalId, new ResourceGroupIdTemplate("global")));
assertEquals(global.getSoftMemoryLimitBytes(), DataSize.of(3, MEGABYTE).toBytes());
assertEquals(global.getMaxQueuedQueries(), 99);
assertEquals(global.getHardConcurrencyLimit(), 42);
}
private static void assertMatch(FileResourceGroupConfigurationManager manager, SelectionCriteria criteria, String expectedResourceGroup)
{
ResourceGroupId resourceGroupId = match(manager, criteria).getResourceGroupId();
assertEquals(resourceGroupId.toString(), expectedResourceGroup, format("Expected: '%s' resource group, found: %s", expectedResourceGroup, resourceGroupId));
}
private static SelectionContext<ResourceGroupIdTemplate> match(FileResourceGroupConfigurationManager manager, SelectionCriteria criteria)
{
return manager.match(criteria)
.orElseThrow(() -> new IllegalStateException("No match"));
}
private static void assertFails(String fileName, String expectedPattern)
{
assertThatThrownBy(() -> parse(fileName)).hasMessageMatching(expectedPattern);
}
private static FileResourceGroupConfigurationManager parse(String fileName)
{
FileResourceGroupConfig config = new FileResourceGroupConfig();
config.setConfigFile(getResource(fileName).getPath());
return new FileResourceGroupConfigurationManager((poolId, listener) -> {}, config);
}
private static ManagerSpec parseManagerSpec(String fileName)
{
FileResourceGroupConfig config = new FileResourceGroupConfig();
config.setConfigFile(getResource(fileName).getPath());
return FileResourceGroupConfigurationManager.parseManagerSpec(config);
}
private static SelectionCriteria userAndSourceSelectionCriteria(String user, String source)
{
return new SelectionCriteria(true, user, ImmutableSet.of(), Optional.of(source), ImmutableSet.of(), EMPTY_RESOURCE_ESTIMATES, Optional.empty());
}
private static SelectionCriteria userSelectionCriteria(String user)
{
return userAndSourceSelectionCriteria(user, "source");
}
private static SelectionCriteria queryTypeSelectionCriteria(String queryType)
{
return new SelectionCriteria(true, "test_user", ImmutableSet.of(), Optional.empty(), ImmutableSet.of(), EMPTY_RESOURCE_ESTIMATES, Optional.of(queryType));
}
private static SelectionCriteria userGroupsSelectionCriteria(String... groups)
{
return new SelectionCriteria(true, "test_user", ImmutableSet.copyOf(groups), Optional.empty(), ImmutableSet.of(), EMPTY_RESOURCE_ESTIMATES, Optional.empty());
}
private static SelectionCriteria userAndUserGroupsSelectionCriteria(String user, String group, String... groups)
{
return new SelectionCriteria(
true,
user,
ImmutableSet.<String>builder()
.add(group)
.add(groups).build(),
Optional.empty(),
ImmutableSet.of(),
EMPTY_RESOURCE_ESTIMATES,
Optional.empty());
}
}
| |
package org.apache.maven.plugin;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.maven.RepositoryUtils;
import org.apache.maven.artifact.ArtifactUtils;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.repository.ComponentDescriptor;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.repository.LocalRepository;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.repository.WorkspaceRepository;
/**
* Caches raw plugin descriptors. A raw plugin descriptor is a descriptor that has just been extracted from the plugin
* artifact and does not contain any runtime specific data. The cache must not be used for descriptors that hold runtime
* data like the plugin realm. <strong>Warning:</strong> This is an internal utility interface that is only public for
* technical reasons, it is not part of the public API. In particular, this interface can be changed or deleted without
* prior notice.
*
* @since 3.0
* @author Benjamin Bentmann
*/
@Component( role = PluginDescriptorCache.class )
public class DefaultPluginDescriptorCache
implements PluginDescriptorCache
{
private Map<Key, PluginDescriptor> descriptors = new HashMap<>( 128 );
public void flush()
{
descriptors.clear();
}
public Key createKey( Plugin plugin, List<RemoteRepository> repositories, RepositorySystemSession session )
{
return new CacheKey( plugin, repositories, session );
}
public PluginDescriptor get( Key cacheKey )
{
return clone( descriptors.get( cacheKey ) );
}
public void put( Key cacheKey, PluginDescriptor pluginDescriptor )
{
descriptors.put( cacheKey, clone( pluginDescriptor ) );
}
protected static PluginDescriptor clone( PluginDescriptor original )
{
PluginDescriptor clone = null;
if ( original != null )
{
clone = new PluginDescriptor();
clone.setGroupId( original.getGroupId() );
clone.setArtifactId( original.getArtifactId() );
clone.setVersion( original.getVersion() );
clone.setGoalPrefix( original.getGoalPrefix() );
clone.setInheritedByDefault( original.isInheritedByDefault() );
clone.setName( original.getName() );
clone.setDescription( original.getDescription() );
clone.setRequiredMavenVersion( original.getRequiredMavenVersion() );
clone.setPluginArtifact( ArtifactUtils.copyArtifactSafe( original.getPluginArtifact() ) );
clone.setComponents( clone( original.getMojos(), clone ) );
clone.setId( original.getId() );
clone.setIsolatedRealm( original.isIsolatedRealm() );
clone.setSource( original.getSource() );
clone.setDependencies( original.getDependencies() );
}
return clone;
}
private static List<ComponentDescriptor<?>> clone( List<MojoDescriptor> mojos, PluginDescriptor pluginDescriptor )
{
List<ComponentDescriptor<?>> clones = null;
if ( mojos != null )
{
clones = new ArrayList<>( mojos.size() );
for ( MojoDescriptor mojo : mojos )
{
MojoDescriptor clone = mojo.clone();
clone.setPluginDescriptor( pluginDescriptor );
clones.add( clone );
}
}
return clones;
}
private static final class CacheKey
implements Key
{
private final String groupId;
private final String artifactId;
private final String version;
private final WorkspaceRepository workspace;
private final LocalRepository localRepo;
private final List<RemoteRepository> repositories;
private final int hashCode;
CacheKey( Plugin plugin, List<RemoteRepository> repositories, RepositorySystemSession session )
{
groupId = plugin.getGroupId();
artifactId = plugin.getArtifactId();
version = plugin.getVersion();
workspace = RepositoryUtils.getWorkspace( session );
localRepo = session.getLocalRepository();
this.repositories = new ArrayList<>( repositories.size() );
for ( RemoteRepository repository : repositories )
{
if ( repository.isRepositoryManager() )
{
this.repositories.addAll( repository.getMirroredRepositories() );
}
else
{
this.repositories.add( repository );
}
}
int hash = 17;
hash = hash * 31 + groupId.hashCode();
hash = hash * 31 + artifactId.hashCode();
hash = hash * 31 + version.hashCode();
hash = hash * 31 + hash( workspace );
hash = hash * 31 + localRepo.hashCode();
hash = hash * 31 + RepositoryUtils.repositoriesHashCode( repositories );
this.hashCode = hash;
}
@Override
public int hashCode()
{
return hashCode;
}
@Override
public boolean equals( Object obj )
{
if ( this == obj )
{
return true;
}
if ( !( obj instanceof CacheKey ) )
{
return false;
}
CacheKey that = (CacheKey) obj;
return eq( this.artifactId, that.artifactId ) && eq( this.groupId, that.groupId )
&& eq( this.version, that.version ) && eq( this.localRepo, that.localRepo )
&& eq( this.workspace, that.workspace )
&& RepositoryUtils.repositoriesEquals( this.repositories, that.repositories );
}
@Override
public String toString()
{
return groupId + ':' + artifactId + ':' + version;
}
private static int hash( Object obj )
{
return obj != null ? obj.hashCode() : 0;
}
private static <T> boolean eq( T s1, T s2 )
{
return s1 != null ? s1.equals( s2 ) : s2 == null;
}
}
}
| |
/**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.layout.dlm;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import java.io.Serializable;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.regex.Pattern;
import net.sf.ehcache.Ehcache;
import net.sf.ehcache.constructs.blocking.CacheEntryFactory;
import net.sf.ehcache.constructs.blocking.SelfPopulatingCache;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apereo.portal.AuthorizationException;
import org.apereo.portal.IUserIdentityStore;
import org.apereo.portal.IUserProfile;
import org.apereo.portal.UserProfile;
import org.apereo.portal.i18n.LocaleManager;
import org.apereo.portal.layout.IUserLayoutStore;
import org.apereo.portal.properties.PropertiesManager;
import org.apereo.portal.security.IPerson;
import org.apereo.portal.security.provider.PersonImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/** @since 2.5 */
@Service
public class FragmentActivator {
private static final String NEWLY_CREATED_ATTR = "newlyCreated";
private static final Log LOG = LogFactory.getLog(FragmentActivator.class);
private final LoadingCache<String, List<Locale>> fragmentOwnerLocales =
CacheBuilder.newBuilder()
.<String, List<Locale>>build(
new CacheLoader<String, List<Locale>>() {
@Override
public List<Locale> load(String key) throws Exception {
return new CopyOnWriteArrayList<Locale>();
}
});
private Ehcache userViews;
private Ehcache userViewErrors;
private IUserIdentityStore identityStore;
private IUserLayoutStore userLayoutStore;
private ConfigurationLoader configurationLoader;
private static final String PROPERTY_ALLOW_EXPANDED_CONTENT =
"org.apereo.portal.layout.dlm.allowExpandedContent";
private static final Pattern STANDARD_PATTERN =
Pattern.compile("\\A[Rr][Ee][Gg][Uu][Ll][Aa][Rr]\\z");
private static final Pattern EXPANDED_PATTERN = Pattern.compile(".*");
@Autowired
public void setUserViewErrors(
@Qualifier("org.apereo.portal.layout.dlm.FragmentActivator.userViewErrors")
Ehcache userViewErrors) {
this.userViewErrors = userViewErrors;
}
@Autowired
public void setUserViews(
@Qualifier("org.apereo.portal.layout.dlm.FragmentActivator.userViews")
Ehcache userViews) {
this.userViews =
new SelfPopulatingCache(
userViews,
new CacheEntryFactory() {
@Override
public Object createEntry(Object key) throws Exception {
final UserViewKey userViewKey = (UserViewKey) key;
//Check if there was an exception the last time a load attempt was made and re-throw
final net.sf.ehcache.Element exceptionElement =
userViewErrors.get(userViewKey);
if (exceptionElement != null) {
throw (Exception) exceptionElement.getObjectValue();
}
try {
return activateFragment(userViewKey);
} catch (Exception e) {
userViewErrors.put(new net.sf.ehcache.Element(userViewKey, e));
throw e;
}
}
});
}
@Autowired
public void setConfigurationLoader(ConfigurationLoader configurationLoader) {
this.configurationLoader = configurationLoader;
}
@Autowired
public void setIdentityStore(IUserIdentityStore identityStore) {
this.identityStore = identityStore;
}
@Autowired
public void setUserLayoutStore(IUserLayoutStore userLayoutStore) {
this.userLayoutStore = userLayoutStore;
}
private static class UserViewKey implements Serializable {
private static final long serialVersionUID = 1L;
private final String ownerId;
private final Locale locale;
private final int hashCode;
public UserViewKey(String ownerId, Locale locale) {
this.ownerId = ownerId;
this.locale = locale;
this.hashCode = internalHashCode();
}
public String getOwnerId() {
return ownerId;
}
public Locale getLocale() {
return locale;
}
@Override
public int hashCode() {
return this.hashCode;
}
public int internalHashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((ownerId == null) ? 0 : ownerId.hashCode());
result = prime * result + ((locale == null) ? 0 : locale.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
UserViewKey other = (UserViewKey) obj;
if (ownerId == null) {
if (other.ownerId != null) return false;
} else if (!ownerId.equals(other.ownerId)) return false;
if (locale == null) {
if (other.locale != null) return false;
} else if (!locale.equals(other.locale)) return false;
return true;
}
@Override
public String toString() {
return "UserViewKey [ownerId=" + ownerId + ", locale=" + locale + "]";
}
}
private UserView activateFragment(final UserViewKey userViewKey) {
final String ownerId = userViewKey.getOwnerId();
final FragmentDefinition fd = configurationLoader.getFragmentByOwnerId(ownerId);
final Locale locale = userViewKey.getLocale();
fragmentOwnerLocales.getUnchecked(ownerId).add(locale);
if (fd.isNoAudienceIncluded()) {
if (LOG.isDebugEnabled()) {
LOG.debug(
"Skipping activation of FragmentDefinition "
+ fd.getName()
+ ", no evaluators found. "
+ fd);
}
return null;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Activating FragmentDefinition " + fd.getName() + " with locale " + locale);
}
IPerson owner = bindToOwner(fd);
UserView view = new UserView(owner.getID());
loadLayout(view, fd, owner, locale);
// if owner just created we need to push the layout into
// the db so that our fragment template user is used and
// not the default template user as determined by
// the user identity store.
if (owner.getAttribute(NEWLY_CREATED_ATTR) != null) {
owner.setAttribute(Constants.PLF, view.getLayout());
try {
saveLayout(view, owner);
} catch (Exception e) {
throw new RuntimeException(
"Failed to save layout for newly created fragment owner "
+ owner.getUserName(),
e);
}
}
loadPreferences(view, fd);
fragmentizeLayout(view, fd);
if (LOG.isInfoEnabled()) {
LOG.info("Activated FragmentDefinition " + fd.getName() + " with locale " + locale);
}
return view;
}
public UserView getUserView(final FragmentDefinition fd, final Locale locale) {
final UserViewKey userViewKey = new UserViewKey(fd.getOwnerId(), locale);
final net.sf.ehcache.Element userViewElement = this.userViews.get(userViewKey);
return (UserView) userViewElement.getObjectValue();
}
/**
* Saves the loaded layout in the database for the user and profile.
*
* @param view
* @param owner
* @throws Exception
*/
private void saveLayout(UserView view, IPerson owner) throws Exception {
IUserProfile profile = new UserProfile();
profile.setProfileId(view.getProfileId());
userLayoutStore.setUserLayout(owner, profile, view.getLayout(), true, false);
}
private IPerson bindToOwner(FragmentDefinition fragment) {
IPerson owner = new PersonImpl();
owner.setAttribute("username", fragment.getOwnerId());
int userID = -1;
try {
userID = identityStore.getPortalUID(owner, false);
} catch (AuthorizationException ae) {
// current implementation of RDMBUserIdentityStore throws an
// auth exception if the user doesn't exist even if
// create data is false as we have it here. So this exception
// can be discarded since we check for the userID being -1
// meaning that the user wasn't found to trigger creating
// that user.
}
if (userID == -1) {
userID = createOwner(owner, fragment);
owner.setAttribute(NEWLY_CREATED_ATTR, "" + (userID != -1));
}
owner.setID(userID);
return owner;
}
private int createOwner(IPerson owner, FragmentDefinition fragment) {
String defaultUser = null;
int userID = -1;
if (fragment.defaultLayoutOwnerID != null) {
defaultUser = fragment.defaultLayoutOwnerID;
} else {
final String defaultLayoutOwner =
PropertiesManager.getProperty(
RDBMDistributedLayoutStore.DEFAULT_LAYOUT_OWNER_PROPERTY);
if (defaultLayoutOwner != null) {
defaultUser = defaultLayoutOwner;
} else {
try {
defaultUser =
PropertiesManager.getProperty(
RDBMDistributedLayoutStore.TEMPLATE_USER_NAME);
} catch (RuntimeException re) {
throw new RuntimeException(
"\n\n WARNING: defaultLayoutOwner is not specified"
+ " in portal.properties and no default user is "
+ "configured for the system. Owner '"
+ fragment.getOwnerId()
+ "' for fragment '"
+ fragment.getName()
+ "' can not be "
+ "created. The fragment will not be available for "
+ "inclusion into user layouts.\n",
re);
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug(
"\n\nOwner '"
+ fragment.getOwnerId()
+ "' of fragment '"
+ fragment.getName()
+ "' not found. Creating as copy of '"
+ defaultUser
+ "'\n");
}
if (defaultUser != null) {
owner.setAttribute("uPortalTemplateUserName", defaultUser);
}
try {
userID = identityStore.getPortalUID(owner, true);
} catch (AuthorizationException ae) {
throw new RuntimeException(
"\n\nWARNING: Anomaly occurred while creating owner '"
+ fragment.getOwnerId()
+ "' of fragment '"
+ fragment.getName()
+ "'. The fragment will not be "
+ "available for inclusion into user layouts.",
ae);
}
return userID;
}
private void loadLayout(
UserView view, FragmentDefinition fragment, IPerson owner, Locale locale) {
// if fragment not bound to user can't return any layouts.
if (view.getUserId() == -1) return;
// this area is hacked right now. Time won't permit how to handle
// matching up multiple profiles for a fragment with an appropriate
// one for incorporating into a user's layout based on their profile
// when they log in with a certain user agent. The challenge is
// being able to match up profiles for a user with those of a
// fragment. Until this is resolved only one profile will be supported
// and will have a hard coded id of 1 which is the default for profiles.
// If anyone changes this user all heck could break loose for dlm. :-(
Document layout = null;
try {
// fix hard coded 1 later for multiple profiles
IUserProfile profile = userLayoutStore.getUserProfileByFname(owner, "default");
profile.setLocaleManager(new LocaleManager(owner, new Locale[] {locale}));
// see if we have structure & theme stylesheets for this user yet.
// If not then fall back on system's selected stylesheets.
if (profile.getStructureStylesheetId() == 0 || profile.getThemeStylesheetId() == 0)
profile = userLayoutStore.getSystemProfileByFname(profile.getProfileFname());
view.setProfileId(profile.getProfileId());
view.setLayoutId(profile.getLayoutId());
layout = userLayoutStore.getFragmentLayout(owner, profile);
Element root = layout.getDocumentElement();
root.setAttribute(
Constants.ATT_ID,
Constants.FRAGMENT_ID_USER_PREFIX
+ view.getUserId()
+ Constants.FRAGMENT_ID_LAYOUT_PREFIX
+ view.getLayoutId());
view.setLayout(layout);
} catch (Exception e) {
LOG.error(
"Anomaly occurred while loading layout for fragment '"
+ fragment.getName()
+ "'. The fragment will not be "
+ "available for inclusion into user layouts.",
e);
}
}
private void loadPreferences(UserView view, FragmentDefinition fragment) {
// if fragment not bound to user can't return any preferences.
if (view.getUserId() == -1) return;
IPerson p = new PersonImpl();
p.setID(view.getUserId());
p.setAttribute("username", fragment.getOwnerId());
}
/**
* Removes unwanted and hidden folders, then changes all node ids to their globally safe
* incorporated version.
*/
private void fragmentizeLayout(UserView view, FragmentDefinition fragment) {
// if fragment not bound to user or layout empty due to error, return
if (view.getUserId() == -1 || view.getLayout() == null) {
return;
}
// Choose what types of content to apply from the fragment
Pattern contentPattern = STANDARD_PATTERN; // default
boolean allowExpandedContent =
Boolean.parseBoolean(
PropertiesManager.getProperty(PROPERTY_ALLOW_EXPANDED_CONTENT));
if (allowExpandedContent) {
contentPattern = EXPANDED_PATTERN;
}
// remove all non-regular or hidden top level folders
// skip root folder that is only child of top level layout element
Element layout = view.getLayout().getDocumentElement();
Element root = (Element) layout.getFirstChild();
NodeList children = root.getChildNodes();
// process the children backwards since as we delete some the indices
// shift around
for (int i = children.getLength() - 1; i >= 0; i--) {
Node node = children.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE && node.getNodeName().equals("folder")) {
Element folder = (Element) node;
// strip out folder types 'header', 'footer' and regular,
// hidden folder "User Preferences" since users have their own
boolean isApplicable =
contentPattern.matcher(folder.getAttribute("type")).matches();
if (!isApplicable || folder.getAttribute("hidden").equals("true")) {
try {
root.removeChild(folder);
} catch (Exception e) {
throw new RuntimeException(
"Anomaly occurred while stripping out "
+ " portions of layout for fragment '"
+ fragment.getName()
+ "'. The fragment will not be available for "
+ "inclusion into user layouts.",
e);
}
}
}
}
// now re-lable all remaining nodes below root to have a safe system
// wide id.
setIdsAndAttribs(
layout,
layout.getAttribute(Constants.ATT_ID),
"" + fragment.getIndex(),
"" + fragment.getPrecedence());
}
/**
* Recursive method that passes through a layout tree and changes all ids from the regular
* format of sXX or nXX to the globally safe incorporated id of form uXlXsXX or uXlXnXX
* indicating the user id and layout id from which this node came.
*/
private void setIdsAndAttribs(
Element parent, String labelBase, String index, String precedence) {
NodeList children = parent.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
if (children.item(i).getNodeType() == Node.ELEMENT_NODE) {
Element child = (Element) children.item(i);
String id = child.getAttribute(Constants.ATT_ID);
if (!id.equals("")) {
String newId = labelBase + id;
child.setAttribute(Constants.ATT_ID, newId);
child.setIdAttribute(Constants.ATT_ID, true);
child.setAttributeNS(Constants.NS_URI, Constants.ATT_FRAGMENT, index);
child.setAttributeNS(Constants.NS_URI, Constants.ATT_PRECEDENCE, precedence);
setIdsAndAttribs(child, labelBase, index, precedence);
}
}
}
}
public void clearChacheForOwner(final String ownerId) {
final List<Locale> locales = fragmentOwnerLocales.getIfPresent(ownerId);
if (locales == null) {
//Nothing to purge
return;
}
for (final Locale locale : locales) {
final UserViewKey userViewKey = new UserViewKey(ownerId, locale);
userViews.remove(userViewKey);
}
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.tools;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.distributed.internal.DistributionConfigImpl;
import com.gemstone.gemfire.internal.DistributionLocator;
import com.gemstone.gemfire.internal.cache.CacheServerLauncher;
import com.gemstone.gemfire.internal.lang.StringUtils;
import com.pivotal.gemfirexd.FabricLocator;
import com.pivotal.gemfirexd.FabricService;
import com.pivotal.gemfirexd.internal.engine.fabricservice.FabricServiceUtils;
import com.pivotal.gemfirexd.internal.iapi.tools.i18n.LocalizedResource;
import com.pivotal.gemfirexd.internal.shared.common.sanity.SanityManager;
import com.pivotal.gemfirexd.tools.internal.GfxdServerLauncher;
/**
* An extension to {@link GfxdServerLauncher} for GemFireXD that starts a peer
* client with an embedded locator and a GemFireXD Network Server by default. This
* allows for running SQL DMLs by JDBC clients which would not work in normal
* GFE locators.
*
* @author swale
*/
public class GfxdDistributionLocator extends GfxdServerLauncher {
private static final String LOC_ADDRESS_ARG = "peer-discovery-address";
private static final String LOC_PORT_ARG = "peer-discovery-port";
private static final String LWC_PORT_ARG = "client-port";
private static final String JMX_MANAGER_ARG = "jmx-manager";
/** the bind address for the locator */
private String bindAddress;
/** the port for the locator to bind */
private int port;
/** Should the launch command be printed? */
private static final boolean PRINT_LAUNCH_COMMAND = Boolean
.getBoolean(GfxdDistributionLocator.class.getSimpleName()
+ ".PRINT_LAUNCH_COMMAND");
public GfxdDistributionLocator(String baseName) {
super(baseName);
this.bindAddress = FabricLocator.LOCATOR_DEFAULT_BIND_ADDRESS;
this.port = FabricLocator.LOCATOR_DEFAULT_PORT;
// don't wait for DD initialization on locators by default
this.waitForData = false;
}
@Override
protected void initKnownOptions() {
super.initKnownOptions();
knownOptions.add(LOC_ADDRESS_ARG);
knownOptions.add(LOC_PORT_ARG);
knownOptions.add(JMX_MANAGER_ARG);
}
@Override
public boolean hostData() {
return false;
}
/**
* Prints usage information of this program.
*/
@Override
protected void usage() throws IOException {
final String script = LocalizedResource.getMessage("LOC_SCRIPT");
final String name = LocalizedResource.getMessage("LOC_NAME");
printUsage(LocalizedResource.getMessage("SERVER_HELP", script, name,
LocalizedResource.getMessage("LOC_ADDRESS_ARG"),
LocalizedResource.getMessage("LOC_EXTRA_HELP")),
SanityManager.DEFAULT_MAX_OUT_LINES);
}
@Override
protected FabricService getFabricServiceInstance() throws Exception {
return (FabricService)Class
.forName("com.pivotal.gemfirexd.FabricServiceManager")
.getMethod("getFabricLocatorInstance").invoke(null);
}
/**
* @see GfxdServerLauncher#startServerVM(Properties)
*/
@Override
protected void startServerVM(Properties props) throws Exception {
((FabricLocator)getFabricServiceInstance()).start(this.bindAddress,
this.port, props);
this.bootProps = props;
}
@Override
protected long getDefaultHeapSizeMB(boolean hostData) {
return 1024L;
}
@Override
protected long getDefaultSmallHeapSizeMB(boolean hostData) {
return 512L;
}
@Override
protected void processStartOption(String key, String value,
Map<String, Object> m, List<String> vmArgs, Map<String, String> envArgs,
Properties props) throws Exception {
if (LOC_ADDRESS_ARG.equals(key)) {
m.put(LOC_ADDRESS_ARG, value);
}
else if(JMX_MANAGER_ARG.equals(key)){
m.put(JMX_MANAGER_ARG, value);
}
else if (LOC_PORT_ARG.equals(key)) {
try {
final int locPort = Integer.parseInt(value);
if (locPort < 1 || locPort > 65535) {
String msg = LocalizedResource.getMessage("SERVER_INVALID_PORT",
value);
throw new IllegalArgumentException(msg);
}
m.put(LOC_PORT_ARG, value);
} catch (NumberFormatException nfe) {
String msg = LocalizedResource.getMessage("SERVER_INVALID_PORT", value);
throw new IllegalArgumentException(msg, nfe);
}
}
else {
super.processStartOption(key, value, m, vmArgs, envArgs, props);
}
}
@Override
protected DistributionConfig printDiscoverySettings(
final Map<String, Object> options, Properties props) throws SQLException {
final Object locAddressObj = options.get(LOC_ADDRESS_ARG);
final Object locPortObj = options.get(LOC_PORT_ARG);
final String locators = props.getProperty(DistributionConfig.LOCATORS_NAME);
String locAddress, locPort;
if (locAddressObj == null
|| (locAddress = (String)locAddressObj).length() == 0) {
locAddress = FabricLocator.LOCATOR_DEFAULT_BIND_ADDRESS;
}
if (locPortObj == null || (locPort = (String)locPortObj).length() == 0) {
locPort = String.valueOf(DistributionLocator.DEFAULT_LOCATOR_PORT);
}
// perform GemFireXD specific customizations
props.setProperty(DistributionConfig.START_LOCATOR_NAME, locAddress + '['
+ locPort + ']');
props = FabricServiceUtils.preprocessProperties(props, null, null, true);
// TODO: KN: Soubhik, why hardcoded log-file name below??
props = FabricServiceUtils.filterGemFireProperties(props,
"gemfirexdlocatortemp.log");
if (locators != null && locators.length() > 0) {
System.out.println(LocalizedResource.getMessage(
"LOC_START_MESSAGE_WITH_LOCATORS", this.baseName, locAddress + '['
+ locPort + ']', locators));
}
else {
System.out.println(LocalizedResource.getMessage("LOC_START_MESSAGE",
this.baseName, locAddress + '[' + locPort + ']'));
}
return new DistributionConfigImpl(props);
}
@Override
protected String getNetworkPortArgName() {
return LWC_PORT_ARG;
}
/**
* Main method that parses the command line and performs an will start, stop,
* or get the status of a GemFireXD locator. This main method is also the main
* method of the launched GemFireXD locator VM.
*/
public static void main(String[] args) {
final GfxdDistributionLocator launcher = new GfxdDistributionLocator(
"SnappyData Locator");
launcher.run(args);
}
@Override
protected String getBaseName(final String name) {
if (!StringUtils.isBlank(System.getenv("SNAPPY_HOME")))
return "snappylocator";
else
return "gfxdlocator";
}
/** @see CacheServerLauncher#addToServerCommand */
@Override
protected void addToServerCommand(List<String> cmds,
Map<String, Object> options) {
super.addToServerCommand(cmds, options);
final StringBuilder locOption = new StringBuilder();
String locAddress = (String)options.get(LOC_ADDRESS_ARG);
if (locAddress != null) {
locOption.append('-').append(LOC_ADDRESS_ARG).append('=')
.append(locAddress);
cmds.add(locOption.toString());
}
String locPort = (String)options.get(LOC_PORT_ARG);
if (locPort != null) {
locOption.setLength(0);
locOption.append('-').append(LOC_PORT_ARG).append('=')
.append(Integer.parseInt(locPort));
cmds.add(locOption.toString());
}
String manager = (String) options.get(JMX_MANAGER_ARG);
if (null == manager) {
locOption.setLength(0);
locOption.append(JMX_MANAGER_ARG).append('=').append("true");
cmds.add(locOption.toString());
}
}
/** @see CacheServerLauncher#getServerOptions(String[]) */
@Override
protected Map<String, Object> getServerOptions(String[] args) throws Exception {
final Map<String, Object> options = super.getServerOptions(args);
this.bindAddress = (String)options.get(LOC_ADDRESS_ARG);
if (this.bindAddress == null) {
this.bindAddress = FabricLocator.LOCATOR_DEFAULT_BIND_ADDRESS;
}
Properties prop = (Properties) options.get(PROPERTIES);
if (prop.getProperty(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME) == null &&
prop.getProperty(DistributionConfig.GEMFIRE_PREFIX
+ DistributionConfig.DISABLE_AUTO_RECONNECT_NAME) == null) {
prop.put(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME, "false");
}
final String locPort = (String)options.get(LOC_PORT_ARG);
if (locPort != null) {
this.port = Integer.parseInt(locPort);
}
else {
this.port = DistributionLocator.DEFAULT_LOCATOR_PORT;
}
return options;
}
@Override
protected boolean printLaunchCommand() {
return PRINT_LAUNCH_COMMAND;
}
@Override
protected void listAddOnArgs(boolean startsWithGemfire,
boolean startsWithGemfirexd, boolean isPrefixHyphen) {
super.listAddOnArgs(startsWithGemfire, startsWithGemfirexd, isPrefixHyphen);
// only applicable to start and if filter is not gemfire. or gemfirexd.
if (!startsWithGemfire && !startsWithGemfirexd && isPrefixHyphen) {
System.out.println("-" + LOC_ADDRESS_ARG);
System.out.println("-" + LOC_PORT_ARG);
System.out.println("-" + LWC_PORT_ARG);
}
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.compiler.commons.utils;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import com.github.javaparser.StaticJavaParser;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.ConstructorDeclaration;
import com.github.javaparser.ast.body.InitializerDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.body.Parameter;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.AssignExpr;
import com.github.javaparser.ast.expr.BooleanLiteralExpr;
import com.github.javaparser.ast.expr.DoubleLiteralExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.IntegerLiteralExpr;
import com.github.javaparser.ast.expr.LambdaExpr;
import com.github.javaparser.ast.expr.LongLiteralExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.MethodReferenceExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.NullLiteralExpr;
import com.github.javaparser.ast.expr.ObjectCreationExpr;
import com.github.javaparser.ast.expr.SimpleName;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.expr.ThisExpr;
import com.github.javaparser.ast.expr.TypeExpr;
import com.github.javaparser.ast.expr.VariableDeclarationExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.ExplicitConstructorInvocationStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import com.github.javaparser.ast.stmt.ReturnStmt;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
import com.github.javaparser.ast.type.Type;
import org.dmg.pmml.DataType;
import org.dmg.pmml.OpType;
import org.kie.pmml.api.enums.DATA_TYPE;
import org.kie.pmml.api.enums.OP_TYPE;
import org.kie.pmml.api.exceptions.KiePMMLException;
import org.kie.pmml.api.exceptions.KiePMMLInternalException;
import org.kie.pmml.commons.model.tuples.KiePMMLNameValue;
import static com.github.javaparser.StaticJavaParser.parseClassOrInterfaceType;
import static org.kie.pmml.commons.Constants.MISSING_BODY_IN_METHOD;
import static org.kie.pmml.commons.Constants.MISSING_BODY_TEMPLATE;
import static org.kie.pmml.commons.Constants.MISSING_CHAINED_METHOD_DECLARATION_TEMPLATE;
import static org.kie.pmml.commons.Constants.MISSING_CONSTRUCTOR_IN_BODY;
import static org.kie.pmml.commons.Constants.MISSING_PARAMETER_IN_CONSTRUCTOR_INVOCATION;
import static org.kie.pmml.commons.Constants.MISSING_STATIC_INITIALIZER;
import static org.kie.pmml.commons.Constants.MISSING_VARIABLE_INITIALIZER_TEMPLATE;
import static org.kie.pmml.commons.Constants.MISSING_VARIABLE_IN_BODY;
/**
* Class meant to provide <i>helper</i> methods to all <i>code-generating</i> classes
*/
public class CommonCodegenUtils {
static final String LAMBDA_PARAMETER_NAME = "lmbdParam";
public static String OPTIONAL_FILTERED_KIEPMMLNAMEVALUE_NAME = "kiePMMLNameValue";
private CommonCodegenUtils() {
// Avoid instantiation
}
/**
* Populate the <code>ClassOrInterfaceDeclaration</code> with the provided <code>MethodDeclaration</code>s
* @param toPopulate
* @param methodDeclarations
*/
public static void populateMethodDeclarations(final ClassOrInterfaceDeclaration toPopulate,
final Collection<MethodDeclaration> methodDeclarations) {
methodDeclarations.forEach(toPopulate::addMember);
}
/**
* Returns
* <pre>
* Optional<KiePMMLNameValue> kiePMMLNameValue = (<i>kiePMMLNameValueListParam</i>)
* .stream()
* .filter((KiePMMLNameValue kpmmlnv) -> Objects.equals("(<i>fieldNameToRef</i>)", kpmmlnv.getName()))
* .findFirst();
* </pre>
* <p>
* expression, where <b>kiePMMLNameValueListParam</b> is the name of the
* <code>List<KiePMMLNameValue></code> parameter, and
* <b>fieldNameToRef</b> is the name of the field to find, in the containing method
* @param kiePMMLNameValueListParam
* @param fieldNameToRef
* @param stringLiteralComparison if <code>true</code>, equals comparison is made on the String, e.g Objects
* .equals("(<i>fieldNameToRef</i>)", kpmmlnv.getName())),
* otherwise, is done on object reference, e.g Objects.equals((<i>fieldNameToRef</i>), kpmmlnv.getName())). In
* this latter case, a <i>fieldNameToRef</i> variable is
* expected to exists
* @return
*/
public static ExpressionStmt getFilteredKiePMMLNameValueExpression(final String kiePMMLNameValueListParam,
final String fieldNameToRef,
boolean stringLiteralComparison) {
// kpmmlnv.getName()
MethodCallExpr argumentBodyExpressionArgument2 = new MethodCallExpr("getName");
argumentBodyExpressionArgument2.setScope(new NameExpr(LAMBDA_PARAMETER_NAME));
// Objects.equals(fieldNameToRef, kpmmlnv.getName())
MethodCallExpr argumentBodyExpression = new MethodCallExpr("equals");
Expression equalsComparisonExpression;
if (stringLiteralComparison) {
equalsComparisonExpression = new StringLiteralExpr(fieldNameToRef);
} else {
equalsComparisonExpression = new NameExpr(fieldNameToRef);
}
argumentBodyExpression.setArguments(NodeList.nodeList(equalsComparisonExpression,
argumentBodyExpressionArgument2));
argumentBodyExpression.setScope(new NameExpr(Objects.class.getName()));
ExpressionStmt argumentBody = new ExpressionStmt(argumentBodyExpression);
// (KiePMMLNameValue kpmmlnv) -> Objects.equals(fieldNameToRef, kpmmlnv.getName())
Parameter argumentParameter = new Parameter(parseClassOrInterfaceType(KiePMMLNameValue.class.getName()),
LAMBDA_PARAMETER_NAME);
LambdaExpr argument = new LambdaExpr();
argument.setEnclosingParameters(true).setParameters(NodeList.nodeList(argumentParameter)); //
// (KiePMMLNameValue kpmmlnv) ->
argument.setBody(argumentBody); // Objects.equals(fieldNameToRef, kpmmlnv.getName())
// kiePMMLNameValueListParam.stream()
MethodCallExpr initializerScopeScope = new MethodCallExpr("stream");
initializerScopeScope.setScope(new NameExpr(kiePMMLNameValueListParam));
// kiePMMLNameValueListParam.stream().filter((KiePMMLNameValue kpmmlnv) -> Objects.equals(fieldNameToRef,
// kpmmlnv.getName()))
MethodCallExpr initializerScope = new MethodCallExpr("filter");
initializerScope.setScope(initializerScopeScope);
initializerScope.setArguments(NodeList.nodeList(argument));
// kiePMMLNameValueListParam.stream().filter((KiePMMLNameValue kpmmlnv) -> Objects.equals(fieldNameToRef,
// kpmmlnv.getName())).findFirst()
MethodCallExpr initializer = new MethodCallExpr("findFirst");
initializer.setScope(initializerScope);
// Optional<KiePMMLNameValue> kiePMMLNameValue
VariableDeclarator variableDeclarator =
new VariableDeclarator(getTypedClassOrInterfaceTypeByTypeNames(Optional.class.getName(),
Collections.singletonList(KiePMMLNameValue.class.getName())),
OPTIONAL_FILTERED_KIEPMMLNAMEVALUE_NAME);
// Optional<KiePMMLNameValue> kiePMMLNameValue = kiePMMLNameValueListParam.stream().filter((KiePMMLNameValue
// kpmmlnv) -> Objects.equals(fieldNameToRef, kpmmlnv.getName())).findFirst()
variableDeclarator.setInitializer(initializer);
//
VariableDeclarationExpr variableDeclarationExpr =
new VariableDeclarationExpr(NodeList.nodeList(variableDeclarator));
ExpressionStmt toReturn = new ExpressionStmt();
toReturn.setExpression(variableDeclarationExpr);
return toReturn;
}
/**
* For every entry in the given map, add
* <pre>
* (<i>mapName</i>).put(<i>entry_key<i/>, this::<i>entry_value_ref</i>>);
* </pre>
* e.g.
* <pre>
* MAP_NAME.put("KEY_0", this::METHOD_015);
* MAP_NAME.put("KEY_3", this::METHOD_33);
* MAP_NAME.put("KEY_2", this::METHOD_219);
* MAP_NAME.put("KEY_4", this::METHOD_46);
* </pre>
* inside the given <code>BlockStmt</code>
* @param toAdd
* @param body
* @param mapName
*/
public static void addMapPopulation(final Map<String, MethodDeclaration> toAdd,
final BlockStmt body,
final String mapName) {
Map<String, Expression> toAddExpr = toAdd.entrySet().stream().collect(Collectors.toMap(
Map.Entry::getKey,
entry -> {
MethodReferenceExpr methodReferenceExpr = new MethodReferenceExpr();
methodReferenceExpr.setScope(new ThisExpr());
methodReferenceExpr.setIdentifier(entry.getValue().getNameAsString());
return methodReferenceExpr;
}
));
addMapPopulationExpressions(toAddExpr, body, mapName);
}
/**
* For every entry in the given map, add a "put" statement to the provided {@link BlockStmt} body.
* @param toAdd the map containing the input values to process
* @param body the destination body
* @param mapName the name of the map to populate in the codegenerated statements
*/
public static void addMapPopulationExpressions(Map<String, Expression> toAdd, BlockStmt body, String mapName) {
toAdd.forEach((key, value) -> {
NodeList<Expression> expressions = NodeList.nodeList(new StringLiteralExpr(key), value);
body.addStatement(new MethodCallExpr(new NameExpr(mapName), "put", expressions));
});
}
/**
* For every entry in the given list, add
* <pre>
* (<i>listName</i>).add(new <i>ObjectCreationExpr</i>>);
* </pre>
* e.g.
* <pre>
* LIST_NAME.add(new OBJA());
* LIST_NAME.add(new OBJB());
* LIST_NAME.add(new OBJC());
* LIST_NAME.add(new OBJD());
* </pre>
* inside the given <code>BlockStmt</code>
* @param toAdd
* @param body
* @param listName
*/
public static void addListPopulationByObjectCreationExpr(final List<ObjectCreationExpr> toAdd,
final BlockStmt body,
final String listName) {
toAdd.forEach(objectCreationExpr -> {
NodeList<Expression> arguments = NodeList.nodeList(objectCreationExpr);
MethodCallExpr methodCallExpr = new MethodCallExpr();
methodCallExpr.setScope(new NameExpr(listName));
methodCallExpr.setName("add");
methodCallExpr.setArguments(arguments);
ExpressionStmt expressionStmt = new ExpressionStmt();
expressionStmt.setExpression(methodCallExpr);
body.addStatement(expressionStmt);
});
}
/**
* For every entry in the given list, add
* <pre>
* (<i>listName</i>).add(<i>MethodCallExpr</i>>);
* </pre>
* e.g.
* <pre>
* LIST_NAME.add(ObjectA.builder().build());
* LIST_NAME.add(ObjectB.builder().build());
* LIST_NAME.add(ObjectC.builder().build());
* LIST_NAME.add(ObjectD.builder().build());
* </pre>
* inside the given <code>BlockStmt</code>
* @param toAdd
* @param body
* @param listName
*/
public static void addListPopulationByMethodCallExpr(final List<MethodCallExpr> toAdd,
final BlockStmt body,
final String listName) {
toAdd.forEach(methodCallExpr1 -> {
NodeList<Expression> arguments = NodeList.nodeList(methodCallExpr1);
MethodCallExpr methodCallExpr = new MethodCallExpr();
methodCallExpr.setScope(new NameExpr(listName));
methodCallExpr.setName("add");
methodCallExpr.setArguments(arguments);
ExpressionStmt expressionStmt = new ExpressionStmt();
expressionStmt.setExpression(methodCallExpr);
body.addStatement(expressionStmt);
});
}
/**
* Create an empty <b>Arrays.asList()</b> <code>ExpressionStmt</code>
* @return
*/
public static ExpressionStmt createArraysAsListExpression() {
ExpressionStmt toReturn = new ExpressionStmt();
MethodCallExpr arraysCallExpression = new MethodCallExpr();
SimpleName arraysName = new SimpleName(Arrays.class.getName());
arraysCallExpression.setScope(new NameExpr(arraysName));
arraysCallExpression.setName(new SimpleName("asList"));
toReturn.setExpression(arraysCallExpression);
return toReturn;
}
/**
* Create a populated <b>Arrays.asList(?... a)</b> <code>ExpressionStmt</code>
* @param source
* @return
*/
public static ExpressionStmt createArraysAsListFromList(List<?> source) {
ExpressionStmt toReturn = createArraysAsListExpression();
MethodCallExpr arraysCallExpression = toReturn.getExpression().asMethodCallExpr();
NodeList<Expression> arguments = new NodeList<>();
source.forEach(value -> arguments.add(getExpressionForObject(value)));
arraysCallExpression.setArguments(arguments);
toReturn.setExpression(arraysCallExpression);
return toReturn;
}
/**
* Returns
* <pre>
* empty (<i>methodName</i>)((list of <i>parameterType</i> <i>parameter name</i>)) {
* }
* </pre>
* <p>
* <p>
* a <b>multi-parameters</b> <code>MethodDeclaration</code> whose names are the <b>key</b>s of the given
* <code>Map</code>
* and <b>methodArity</b>, and whose parameters types are the <b>value</b>s
*
* <b>The </b>
* @param methodName
* @param parameterNameTypeMap expecting an <b>ordered</b> map here, since parameters order matter for
* <i>caller</i> code
* @return
*/
public static MethodDeclaration getMethodDeclaration(final String methodName,
final Map<String, ClassOrInterfaceType> parameterNameTypeMap) {
MethodDeclaration toReturn = getMethodDeclaration(methodName);
NodeList<Parameter> typeParameters = new NodeList<>();
parameterNameTypeMap.forEach((parameterName, classOrInterfaceType) -> {
Parameter toAdd = new Parameter();
toAdd.setName(parameterName);
toAdd.setType(classOrInterfaceType);
typeParameters.add(toAdd);
});
toReturn.setParameters(typeParameters);
return toReturn;
}
/**
* Returns
* <pre>
* empty (<i>methodName</i>)() {
* }
* </pre>
* <p>
* A <b>no-parameter</b> <code>MethodDeclaration</code> whose name is derived from given <b>methodName</b>
* and <b>methodArity</b>
* @param methodName
* @return
*/
public static MethodDeclaration getMethodDeclaration(final String methodName) {
MethodDeclaration toReturn = new MethodDeclaration();
toReturn.setName(methodName);
return toReturn;
}
/**
* Returns
* <pre>
* return (<i>returnedVariableName</i>);
* </pre>
* <p>
* e.g
* <pre>
* return varOne;
* </pre>
* @param returnedVariableName
* @return
*/
public static ReturnStmt getReturnStmt(final String returnedVariableName) {
ReturnStmt toReturn = new ReturnStmt();
toReturn.setExpression(new NameExpr(returnedVariableName));
return toReturn;
}
/**
* Returns
* <pre>
* (<i>className</i>)<(<i>comma-separated list of types</i>)>
* </pre>
* <p>
* e.g
* <pre>
* CLASS_NAME<TypeA, TypeB>
* </pre>
* a <b>typed</b> <code>ClassOrInterfaceType</code>
* @param className
* @param typesName
* @return
*/
public static ClassOrInterfaceType getTypedClassOrInterfaceTypeByTypeNames(final String className,
final List<String> typesName) {
List<Type> types = typesName.stream()
.map(StaticJavaParser::parseClassOrInterfaceType).collect(Collectors.toList());
return getTypedClassOrInterfaceTypeByTypes(className, types);
}
/**
* Returns
* <pre>
* (<i>className</i>)<(<i>comma-separated list of types</i>)>
* </pre>
* <p>
* e.g
* <pre>
* CLASS_NAME<TypeA, TypeB>
* </pre>
* a <b>typed</b> <code>ClassOrInterfaceType</code>
* @param className
* @param types
* @return
*/
public static ClassOrInterfaceType getTypedClassOrInterfaceTypeByTypes(final String className,
final List<Type> types) {
ClassOrInterfaceType toReturn = parseClassOrInterfaceType(className);
toReturn.setTypeArguments(NodeList.nodeList(types));
return toReturn;
}
/**
* Set the value of the variable with the given <b>assignExpressionName</b> in the given <code>BlockStmt</code>
* It throws <code>KiePMMLException</code> if variable is not found
* @param body
* @param assignExpressionName
* @param value
* @throws <code>KiePMMLException</code> if <code>AssignExpr</code> with given <b>assignExpressionName</b> is not
* found
*/
public static void setAssignExpressionValue(final BlockStmt body, final String assignExpressionName,
final Expression value) {
AssignExpr assignExpr = getAssignExpression(body, assignExpressionName)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, assignExpressionName,
body)));
assignExpr.setValue(value);
}
/**
* Return an <code>Optional<AssignExpr></code> with the given <b>assignExpressionName</b> from the given
* <code>BlockStmt</code>
* @param body
* @param assignExpressionName
* @return <code>Optional<AssignExpr></code> with the found <code>AssignExpr</code>, or <code>Optional
* .empty()</code> if no match
* has been found
*/
public static Optional<AssignExpr> getAssignExpression(final BlockStmt body, final String assignExpressionName) {
final List<AssignExpr> assignExprs = body.findAll(AssignExpr.class);
return assignExprs.stream()
.filter(assignExpr -> assignExpressionName.equals(assignExpr.getTarget().asNameExpr().getNameAsString()))
.findFirst();
}
/**
* Return an <code>Optional<ExplicitConstructorInvocationStmt></code> from the given <code>BlockStmt</code>
* @param body
* @return <code>Optional<ExplicitConstructorInvocationStmt></code> with the found
* <code>ExplicitConstructorInvocationStmt</code>, or <code>Optional.empty()</code> if none is found
*/
public static Optional<ExplicitConstructorInvocationStmt> getExplicitConstructorInvocationStmt(final BlockStmt body) {
return body.getStatements().stream()
.filter(ExplicitConstructorInvocationStmt.class::isInstance)
.map(ExplicitConstructorInvocationStmt.class::cast)
.findFirst();
}
/**
* Set the <b>value</b> of the given <b>parameterName</b> in the given <code>ConstructorDeclaration</code>
* @param constructorDeclaration
* @param parameterName
* @param value
* @throws KiePMMLException if the given parameter is not found
*/
public static void setConstructorDeclarationParameterArgument(final ConstructorDeclaration constructorDeclaration,
final String parameterName, final String value) {
final BlockStmt body = constructorDeclaration.getBody();
final ExplicitConstructorInvocationStmt superStatement =
CommonCodegenUtils.getExplicitConstructorInvocationStmt(body)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_CONSTRUCTOR_IN_BODY, body)));
final NameExpr parameterExpr = getExplicitConstructorInvocationParameter(superStatement, parameterName)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_PARAMETER_IN_CONSTRUCTOR_INVOCATION,
parameterName, constructorDeclaration)));
if (value != null) {
parameterExpr.setName(value);
} else {
superStatement.getArguments().replace(parameterExpr, new NullLiteralExpr());
}
}
/**
* Set the <b>value</b> of the given <b>parameterName</b> in the given <code>ConstructorDeclaration</code>
* @param constructorDeclaration
* @param referenceName
* @param value
* @throws KiePMMLException if the given parameter is not found
*/
public static void setConstructorDeclarationReferenceArgument(final ConstructorDeclaration constructorDeclaration,
final String referenceName, final String value) {
final BlockStmt body = constructorDeclaration.getBody();
final ExplicitConstructorInvocationStmt superStatement =
CommonCodegenUtils.getExplicitConstructorInvocationStmt(body)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_CONSTRUCTOR_IN_BODY, body)));
final MethodReferenceExpr methodReferenceExpr = getExplicitConstructorInvocationMethodReference(superStatement,
referenceName)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_PARAMETER_IN_CONSTRUCTOR_INVOCATION,
referenceName, constructorDeclaration)));
if (value != null) {
methodReferenceExpr.setScope(new TypeExpr(parseClassOrInterfaceType(value)));
} else {
superStatement.getArguments().replace(methodReferenceExpr, new NullLiteralExpr());
}
}
/**
* Set the <b>value</b> of the given <b>parameterName</b> in the given
* <code>ExplicitConstructorInvocationStmt</code>
* @param constructorInvocationStmt
* @param parameterName
* @param value
* @throws KiePMMLException if the given parameter is not found
*/
public static void setExplicitConstructorInvocationStmtArgument(final ExplicitConstructorInvocationStmt constructorInvocationStmt, final String parameterName, final String value) {
final NameExpr parameterExpr = getExplicitConstructorInvocationParameter(constructorInvocationStmt,
parameterName)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_PARAMETER_IN_CONSTRUCTOR_INVOCATION,
parameterName, constructorInvocationStmt)));
parameterExpr.setName(value);
}
/**
* Return an <code>BlockStmt</code> from the given <code>ClassOrInterfaceDeclaration</code>
*
* @param classOrInterfaceDeclaration
*
* @throws KiePMMLException if none is found
*/
public static BlockStmt getInitializerBlockStmt(final ClassOrInterfaceDeclaration classOrInterfaceDeclaration) {
return getInitializerDeclaration(classOrInterfaceDeclaration).getBody();
}
/**
* Return an <code>InitializerDeclaration</code> from the given <code>ClassOrInterfaceDeclaration</code>
*
* @param classOrInterfaceDeclaration
*
* @throws KiePMMLException if none is found
*/
public static InitializerDeclaration getInitializerDeclaration(final ClassOrInterfaceDeclaration classOrInterfaceDeclaration) {
return classOrInterfaceDeclaration.getMembers()
.stream()
.filter(InitializerDeclaration.class::isInstance)
.map(InitializerDeclaration.class::cast)
.findFirst()
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_STATIC_INITIALIZER, classOrInterfaceDeclaration)));
}
/**
* Return an <code>Optional<NameExpr></code> from the given <code>ExplicitConstructorInvocationStmt</code>
* @param constructorInvocationStmt
* @param parameterName
* @return <code>Optional<NameExpr></code> with the found <code>NameExpr</code>, or <code>Optional.empty()
* </code> if none is found
*/
public static Optional<NameExpr> getExplicitConstructorInvocationParameter(final ExplicitConstructorInvocationStmt constructorInvocationStmt, final String parameterName) {
return constructorInvocationStmt.getArguments()
.stream()
.filter(expression -> expression instanceof NameExpr && ((NameExpr) expression).getName().asString().equals(parameterName))
.map(NameExpr.class::cast)
.findFirst();
}
/**
* Return an <code>Optional<MethodReferenceExpr></code> from the given
* <code>ExplicitConstructorInvocationStmt</code>
* @param constructorInvocationStmt
* @param typeName
* @return <code>Optional<MethodReferenceExpr></code> with the found <code>MethodReferenceExpr</code>, or
* <code>Optional.empty()</code> if none is found
*/
public static Optional<MethodReferenceExpr> getExplicitConstructorInvocationMethodReference(final ExplicitConstructorInvocationStmt constructorInvocationStmt, final String typeName) {
return constructorInvocationStmt.getArguments()
.stream()
.filter(expression -> expression instanceof MethodReferenceExpr && ((MethodReferenceExpr) expression).getScope().asTypeExpr().getType().asString().equals(typeName))
.map(MethodReferenceExpr.class::cast)
.findFirst();
}
/**
* Return an <code>BlockStmt</code> for the method <b>methodName</b> from the given <code>ClassOrInterfaceDeclaration</code>
*
* @param classOrInterfaceDeclaration
* @param methodName
*
* @throws KiePMMLException if none is found
*/
public static BlockStmt getMethodDeclarationBlockStmt(final ClassOrInterfaceDeclaration classOrInterfaceDeclaration, final String methodName) {
return getMethodDeclaration(classOrInterfaceDeclaration, methodName)
.map(MethodDeclaration::getBody)
.map(Optional::get)
.orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_BODY_IN_METHOD, methodName)));
}
/**
* Return an <code>Optional<MethodDeclaration></code> with the <b>first</b> method <b>methodName</b> from
* the given <code>ClassOrInterfaceDeclaration</code>
* @param classOrInterfaceDeclaration
* @param methodName
* @return <code>Optional<MethodDeclaration></code> with the first found <code>MethodDeclaration</code>, or
* <code>Optional.empty()</code> if no match
* has been found
*/
public static Optional<MethodDeclaration> getMethodDeclaration(final ClassOrInterfaceDeclaration classOrInterfaceDeclaration, final String methodName) {
final List<MethodDeclaration> methodDeclarations = classOrInterfaceDeclaration.getMethodsByName(methodName);
return methodDeclarations.isEmpty() ? Optional.empty() : Optional.of(methodDeclarations.get(0));
}
/**
* Add a <code>MethodDeclaration</code> to the class
* @param methodTemplate
* @param tableTemplate
* @param methodName
* @return
*/
public static MethodDeclaration addMethod(final MethodDeclaration methodTemplate,
final ClassOrInterfaceDeclaration tableTemplate,
final String methodName) {
final BlockStmt body =
methodTemplate.getBody().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_BODY_TEMPLATE, methodTemplate.getName())));
final MethodDeclaration toReturn = tableTemplate.addMethod(methodName).setBody(body);
toReturn.setModifiers(methodTemplate.getModifiers());
methodTemplate.getParameters().forEach(toReturn::addParameter);
toReturn.setType(methodTemplate.getType());
return toReturn;
}
/**
* Set the value of the variable with the given <b>variableDeclaratorName</b> in the given <code>BlockStmt</code>
* It throws <code>KiePMMLException</code> if variable is not found
* @param body
* @param variableDeclaratorName
* @param value
* @throws <code>KiePMMLException</code> if <code>VariableDeclarator</code> with given <b>variableDeclaratorName</b> is not
* found
*/
public static void setVariableDeclaratorValue(final BlockStmt body, final String variableDeclaratorName,
final Expression value) {
VariableDeclarator variableDeclarator = getVariableDeclarator(body, variableDeclaratorName)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, variableDeclaratorName,
body)));
variableDeclarator.setInitializer(value);
}
/**
* Return an <code>Optional<VariableDeclarator></code> with the <b>first</b> variable <b>variableName</b>
* from the given <code>MethodDeclaration</code>
* @param methodDeclaration
* @param variableName
* @return <code>Optional<VariableDeclarator></code> with the first found <code>VariableDeclarator</code>,
* or <code>Optional.empty()</code> if no match
* has been found
*/
public static Optional<VariableDeclarator> getVariableDeclarator(final MethodDeclaration methodDeclaration, final String variableName) {
final BlockStmt body = methodDeclaration.getBody()
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration)));
return getVariableDeclarator(body, variableName);
}
/**
* Return an <code>Optional<VariableDeclarator></code> with the <b>first</b> variable <b>variableName</b>
* from the given <code>BlockStmt</code>
* @param body
* @param variableName
* @return <code>Optional<VariableDeclarator></code> with the first found <code>VariableDeclarator</code>,
* or <code>Optional.empty()</code> if no match
* has been found
*/
public static Optional<VariableDeclarator> getVariableDeclarator(final BlockStmt body, final String variableName) {
return body.findAll(VariableDeclarator.class)
.stream()
.filter(variableDeclarator -> variableDeclarator.getName().asString().equals(variableName))
.findFirst();
}
public static Expression getExpressionForDataType(DataType dataTypeParam) {
final Expression toReturn;
if (dataTypeParam != null) {
final DATA_TYPE dataType = DATA_TYPE.byName(dataTypeParam.value());
toReturn = new NameExpr(DATA_TYPE.class.getName() + "." + dataType.name());
} else {
toReturn = new NullLiteralExpr();
}
return toReturn;
}
public static Expression getExpressionForOpType(OpType opTypeParam) {
final Expression toReturn;
if (opTypeParam != null) {
final OP_TYPE opType = OP_TYPE.byName(opTypeParam.value());
toReturn = new NameExpr(OP_TYPE.class.getName() + "." + opType.name());
} else {
toReturn = new NullLiteralExpr();
}
return toReturn;
}
public static Expression getExpressionForObject(Object source) {
if (source == null) {
return new NullLiteralExpr();
}
String className = source.getClass().getSimpleName();
switch (className) {
case "String":
return new StringLiteralExpr((String) source);
case "int":
case "Integer":
return new IntegerLiteralExpr((Integer) source);
case "double":
case "Double":
return new DoubleLiteralExpr((Double) source);
case "float":
case "Float":
return new DoubleLiteralExpr(((Float) source).doubleValue());
case "boolean":
case "Boolean":
return new BooleanLiteralExpr((Boolean) source);
default:
return new NameExpr(source.toString());
}
}
/**
* Return a <code>lit<NameExpr></code> with all the instances of the given <b>exprName</b>
* @param toRead
* @param exprName
* @return
*/
public static List<NameExpr> getNameExprsFromBlock(final BlockStmt toRead, final String exprName) {
return toRead.stream()
.filter(node -> node instanceof NameExpr &&
((NameExpr) node).getName().asString().equals(exprName))
.map(NameExpr.class::cast)
.collect(Collectors.toList());
}
/**
* Return a new {@link AssignExpr} from a target name and a generic {@link Expression}.
* @param target {@link String} containing the name to assign the expression to
* @param value the value to be assigned
* @return the new {@link AssignExpr}
*/
public static AssignExpr assignExprFrom(String target, Expression value) {
return new AssignExpr(new NameExpr(target), value, AssignExpr.Operator.ASSIGN);
}
/**
* Return a new {@link AssignExpr} from a target name and an enum literal.
* @param target {@link String} containing the name to assign the expression to
* @param value the enum value to be assigned
* @return the new {@link AssignExpr}
*/
public static AssignExpr assignExprFrom(String target, Enum<?> value) {
return assignExprFrom(target, literalExprFrom(value));
}
/**
* Return a new {@link AssignExpr} from a target name and {@link String} literal.
* @param target {@link String} containing the name to assign the expression to
* @param value the {@link String} value to be assigned
* @return the new {@link AssignExpr}
*/
public static AssignExpr assignExprFrom(String target, String value) {
return assignExprFrom(target, literalExprFrom(value));
}
/**
* Return a new {@link Expression} containing an enum literal.
* @param input the enum value to be assigned
* @return the new {@link Expression}
*/
public static Expression literalExprFrom(Enum<?> input) {
return input == null ? new NullLiteralExpr() : new NameExpr(input.getClass().getCanonicalName() + "." + input.name());
}
/**
* Return a new {@link Expression} containing an {@link String}.
* @param input the {@link String} value to be assigned
* @return the new {@link Expression}
*/
public static Expression literalExprFrom(String input) {
return input == null ? new NullLiteralExpr() : new StringLiteralExpr(input);
}
/**
* Return a new {@link Expression} containing an object with a specific value of a specific {@link DATA_TYPE}.
* This can either be a new object (for date and time) or a literal.
* @param type the {@link DATA_TYPE} of the specified value
* @param value the value represented as {@link String}
* @return the new {@link Expression}
*/
public static Expression literalExprFrom(DATA_TYPE type, String value) {
if (type == null) {
throw new IllegalArgumentException("Invalid \"null\" data type");
}
if (value == null) {
return new NullLiteralExpr();
}
switch (type) {
case STRING:
return new StringLiteralExpr(value);
case INTEGER:
return new IntegerLiteralExpr(value);
case DOUBLE:
case FLOAT:
return new DoubleLiteralExpr(value);
case BOOLEAN:
return new BooleanLiteralExpr(Boolean.parseBoolean(value));
case DATE:
return new MethodCallExpr(new NameExpr(LocalDate.class.getName()), "parse", NodeList.nodeList(new StringLiteralExpr(value)));
case TIME:
return new MethodCallExpr(new NameExpr(LocalTime.class.getName()), "parse", NodeList.nodeList(new StringLiteralExpr(value)));
case DATE_TIME:
return new MethodCallExpr(new NameExpr(LocalDateTime.class.getName()), "parse", NodeList.nodeList(new StringLiteralExpr(value)));
case DATE_DAYS_SINCE_0:
case DATE_DAYS_SINCE_1960:
case DATE_DAYS_SINCE_1970:
case DATE_DAYS_SINCE_1980:
case TIME_SECONDS:
case DATE_TIME_SECONDS_SINCE_0:
case DATE_TIME_SECONDS_SINCE_1960:
case DATE_TIME_SECONDS_SINCE_1970:
case DATE_TIME_SECONDS_SINCE_1980:
return new LongLiteralExpr(value);
default:
throw new IllegalArgumentException("Can't create literal from " + type.getName() + " data type");
}
}
/**
* Return a new {@link MethodCallExpr} from scope, name and arguments.
* @param scope the scope of the method to call
* @param name the name of the method to call
* @param arguments vararg list of {@link Expression} arguments
* @return the new {@link MethodCallExpr}
*/
public static MethodCallExpr methodCallExprFrom(String scope, String name, Expression... arguments) {
return new MethodCallExpr(new NameExpr(scope), name, new NodeList<>(arguments));
}
/**
* Return a "chained" {@link MethodCallExpr} by name <b>parent</b> one.
* @param name the name of the method to call
* @param parent vararg list of {@link Expression} arguments
* @return the found {@link MethodCallExpr}
*/
public static MethodCallExpr getChainedMethodCallExprFrom(String name, MethodCallExpr parent) {
return parent.stream()
.filter(expr -> expr instanceof MethodCallExpr &&
((MethodCallExpr) expr).getName().toString().equals(name))
.map(MethodCallExpr.class::cast)
.findFirst()
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_CHAINED_METHOD_DECLARATION_TEMPLATE, name, parent)));
}
/**
* Replace <code>StringLiteralExpresion</code>s in the given <code>Statement</code>
* @param container
* @param toReplace
* @param replacement
*/
public static void replaceStringLiteralExpressionInStatement(final Statement container,
final String toReplace,
final String replacement) {
final StringLiteralExpr toReplaceExpr = new StringLiteralExpr(toReplace);
final StringLiteralExpr replacementExpr = new StringLiteralExpr(replacement);
container.walk(node -> {
if (node.equals(toReplaceExpr)) {
node.getParentNode()
.ifPresent(parentNode -> parentNode.replace(node, replacementExpr));
}
});
}
/**
* Replace <code>Node</code>s in the given <code>Statement</code>
* @param container
* @param replacementTuplas
*/
public static void replaceNodesInStatement(final Statement container,
final List<ReplacementTupla> replacementTuplas) {
replacementTuplas.forEach(replacementTupla -> replaceNodeInStatement(container, replacementTupla));
}
/**
* Replace <code>Node</code> in the given <code>Statement</code>
* @param container
* @param replacementTupla
*/
public static void replaceNodeInStatement(final Statement container,
final ReplacementTupla replacementTupla) {
container.walk(node -> {
if (node.equals(replacementTupla.toReplace)) {
node.getParentNode()
.ifPresent(parentNode -> parentNode.replace(replacementTupla.toReplace, replacementTupla.replacement));
}
});
}
/**
* Add a <code>MethodDeclaration</code>s to the given <code>ClassOrInterfaceDeclaration</code>
*
* @param classOrInterfaceDeclaration
* @param toAdd
*/
public static void addMethodDeclarationsToClass(final ClassOrInterfaceDeclaration classOrInterfaceDeclaration,
final List<MethodDeclaration> toAdd) {
toAdd.forEach(methodDeclaration -> addMethodDeclarationToClass(classOrInterfaceDeclaration, methodDeclaration));
}
/**
* Add a <code>MethodDeclaration</code> to the given <code>ClassOrInterfaceDeclaration</code>
*
* @param classOrInterfaceDeclaration
* @param toAdd
*/
public static void addMethodDeclarationToClass(final ClassOrInterfaceDeclaration classOrInterfaceDeclaration,
final MethodDeclaration toAdd) {
classOrInterfaceDeclaration.addMethod(toAdd.getName().asString())
.setModifiers(toAdd.getModifiers())
.setType(toAdd.getType())
.setParameters(toAdd.getParameters())
.setBody(toAdd.getBody().get());
}
/**
* Retrieve the <b>initializer</b> of the given <b>variableName</b> from the given <code>MethodDeclaration</code>
* @return
*/
public static Expression getVariableInitializer(final MethodDeclaration methodDeclaration, final String variableName) {
return getOptionalVariableInitializer(methodDeclaration, variableName)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, variableName, methodDeclaration)));
}
/**
* Retrieve the <b>initializer</b> of the given <b>variableName</b> from the given <code>MethodDeclaration</code>
* @return
*/
public static Optional<Expression> getOptionalVariableInitializer(final MethodDeclaration methodDeclaration, final String variableName) {
final BlockStmt blockStmt = methodDeclaration.getBody()
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration)));
return getVariableInitializer(blockStmt, variableName);
}
/**
* Retrieve the <b>initializer</b> of the given <b>variableName</b> from the given <code>MethodDeclaration</code>
* @return
*/
public static Optional<Expression> getVariableInitializer(final BlockStmt blockStmt, final String variableName) {
final VariableDeclarator variableDeclarator = getVariableDeclarator(blockStmt, variableName)
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, variableName, blockStmt)));
return variableDeclarator.getInitializer();
}
/**
* Replace the <code>List<NameExpr></code>s in the given <code>Statement</code> with <code>NullLiteralExpr</code>
* @param container
* @param toReplace
*/
public static void replaceNameExprWithNullInStatement(final Statement container,
final List<NameExpr> toReplace) {
final List<ReplacementTupla> replacementTuplas =
toReplace.stream()
.map(nameExpr -> {
NullLiteralExpr toAdd = new NullLiteralExpr();
return new ReplacementTupla(nameExpr, toAdd);
})
.collect(Collectors.toList());
replacementTuplas.forEach(replacementTupla -> replaceNodeInStatement(container, replacementTupla));
}
public static MethodCallExpr getArraysAsListInvocationMethodCall(NodeList<Expression> arguments) {
MethodCallExpr methodCallExpr = new MethodCallExpr();
methodCallExpr.setScope(new NameExpr(Arrays.class.getSimpleName()));
methodCallExpr.setName("asList");
methodCallExpr.setArguments(arguments);
return methodCallExpr;
}
public static NodeList<Expression> getArraysAsListInvocation(NodeList<Expression> arguments) {
return NodeList.nodeList(getArraysAsListInvocationMethodCall(arguments));
}
public static class ReplacementTupla {
final Node toReplace;
final Node replacement;
public ReplacementTupla(Node toReplace, Node replacement) {
this.toReplace = toReplace;
this.replacement = replacement;
}
}
}
| |
package com.pushtorefresh.storio3.contentresolver.operations.get;
import android.database.Cursor;
import android.net.Uri;
import com.pushtorefresh.storio3.Optional;
import com.pushtorefresh.storio3.StorIOException;
import com.pushtorefresh.storio3.contentresolver.Changes;
import com.pushtorefresh.storio3.contentresolver.StorIOContentResolver;
import com.pushtorefresh.storio3.contentresolver.operations.SchedulerChecker;
import com.pushtorefresh.storio3.contentresolver.queries.Query;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Flowable;
import io.reactivex.Maybe;
import io.reactivex.Single;
import io.reactivex.observers.TestObserver;
import io.reactivex.subscribers.TestSubscriber;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
import static org.assertj.core.api.Java6Assertions.assertThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@RunWith(Enclosed.class)
public class PreparedGetObjectTest {
public static class WithoutTypeMapping {
@Test
public void shouldGetObjectWithoutTypeMappingBlocking() {
final GetObjectStub getStub = GetObjectStub.newStubWithoutTypeMapping();
final TestItem testItem = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.withGetResolver(getStub.getResolver)
.prepare()
.executeAsBlocking();
getStub.verifyBehavior(testItem);
}
@Test
public void shouldGetObjectWithoutTypeMappingAsFlowable() {
final GetObjectStub getStub = GetObjectStub.newStubWithoutTypeMapping();
final Flowable<Optional<TestItem>> testItemFlowable = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.withGetResolver(getStub.getResolver)
.prepare()
.asRxFlowable(BackpressureStrategy.MISSING)
.take(1);
getStub.verifyBehavior(testItemFlowable);
}
@Test
public void shouldGetObjectWithoutTypeMappingAsSingle() {
final GetObjectStub getStub = GetObjectStub.newStubWithoutTypeMapping();
final Single<Optional<TestItem>> testItemSingle = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.withGetResolver(getStub.getResolver)
.prepare()
.asRxSingle();
getStub.verifyBehavior(testItemSingle);
}
@Test
public void shouldGetObjectWithoutTypeMappingAsMaybe() {
final GetObjectStub getStub = GetObjectStub.newStubWithoutTypeMapping();
final Maybe<TestItem> testItemMaybe = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.withGetResolver(getStub.getResolver)
.prepare()
.asRxMaybe();
getStub.verifyBehavior(testItemMaybe);
}
}
public static class WithTypeMapping {
@Test
public void shouldGetObjectWithTypeMappingBlocking() {
final GetObjectStub getStub = GetObjectStub.newStubWithTypeMapping();
final TestItem testItem = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.prepare()
.executeAsBlocking();
getStub.verifyBehavior(testItem);
}
@Test
public void shouldGetObjectWithTypeMappingAsFlowable() {
final GetObjectStub getStub = GetObjectStub.newStubWithTypeMapping();
final Flowable<Optional<TestItem>> testItemFlowable = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.prepare()
.asRxFlowable(BackpressureStrategy.MISSING)
.take(1);
getStub.verifyBehavior(testItemFlowable);
}
@Test
public void shouldGetObjectWithTypeMappingAsSingle() {
final GetObjectStub getStub = GetObjectStub.newStubWithTypeMapping();
final Single<Optional<TestItem>> testItemSingle = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.prepare()
.asRxSingle();
getStub.verifyBehavior(testItemSingle);
}
@Test
public void shouldGetObjectWithTypeMappingAsMaybe() {
final GetObjectStub getStub = GetObjectStub.newStubWithTypeMapping();
final Maybe<TestItem> testItemMaybe = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.prepare()
.asRxMaybe();
getStub.verifyBehavior(testItemMaybe);
}
}
public static class NoTypeMappingError {
@Test
public void shouldThrowExceptionIfNoTypeMappingWasFoundWithoutAccessingContentProviderBlocking() {
final StorIOContentResolver storIOContentResolver = mock(StorIOContentResolver.class);
final StorIOContentResolver.LowLevel lowLevel = mock(StorIOContentResolver.LowLevel.class);
when(storIOContentResolver.lowLevel()).thenReturn(lowLevel);
when(storIOContentResolver.get()).thenReturn(new PreparedGet.Builder(storIOContentResolver));
final PreparedGetObject<TestItem> preparedGet = storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(Query.builder().uri(mock(Uri.class)).build())
.prepare();
try {
preparedGet.executeAsBlocking();
failBecauseExceptionWasNotThrown(StorIOException.class);
} catch (StorIOException expected) {
// it's okay, no type mapping was found
assertThat(expected).hasCauseInstanceOf(IllegalStateException.class);
assertThat(expected.getCause()).hasMessage("This type does not have type mapping: " +
"type = " + TestItem.class + "," +
"ContentProvider was not touched by this operation, please add type mapping for this type");
}
verify(storIOContentResolver).get();
verify(storIOContentResolver).lowLevel();
verify(storIOContentResolver).interceptors();
verify(lowLevel).typeMapping(TestItem.class);
verify(lowLevel, never()).query(any(Query.class));
verifyNoMoreInteractions(storIOContentResolver, lowLevel);
}
@Test
public void shouldThrowExceptionIfNoTypeMappingWasFoundWithoutAccessingContentProviderAsFlowable() {
final StorIOContentResolver storIOContentResolver = mock(StorIOContentResolver.class);
final StorIOContentResolver.LowLevel lowLevel = mock(StorIOContentResolver.LowLevel.class);
when(storIOContentResolver.lowLevel()).thenReturn(lowLevel);
when(storIOContentResolver.get()).thenReturn(new PreparedGet.Builder(storIOContentResolver));
when(storIOContentResolver.observeChangesOfUri(any(Uri.class), eq(BackpressureStrategy.MISSING)))
.thenReturn(Flowable.<Changes>empty());
final TestSubscriber<Optional<TestItem>> testSubscriber = new TestSubscriber<Optional<TestItem>>();
storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(Query.builder().uri(mock(Uri.class)).build())
.prepare()
.asRxFlowable(BackpressureStrategy.MISSING)
.subscribe(testSubscriber);
testSubscriber.awaitTerminalEvent();
testSubscriber.assertNoValues();
assertThat(testSubscriber.errors().get(0))
.isInstanceOf(StorIOException.class)
.hasCauseInstanceOf(IllegalStateException.class);
verify(storIOContentResolver).get();
verify(storIOContentResolver).lowLevel();
verify(storIOContentResolver).interceptors();
verify(storIOContentResolver).defaultRxScheduler();
verify(lowLevel).typeMapping(TestItem.class);
verify(lowLevel, never()).query(any(Query.class));
verify(storIOContentResolver).observeChangesOfUri(any(Uri.class), eq(BackpressureStrategy.MISSING));
verifyNoMoreInteractions(storIOContentResolver, lowLevel);
}
@Test
public void shouldThrowExceptionIfNoTypeMappingWasFoundWithoutAccessingContentProviderAsSingle() {
final StorIOContentResolver storIOContentResolver = mock(StorIOContentResolver.class);
final StorIOContentResolver.LowLevel lowLevel = mock(StorIOContentResolver.LowLevel.class);
when(storIOContentResolver.lowLevel()).thenReturn(lowLevel);
when(storIOContentResolver.get()).thenReturn(new PreparedGet.Builder(storIOContentResolver));
final TestObserver<Optional<TestItem>> testObserver = new TestObserver<Optional<TestItem>>();
storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(Query.builder().uri(mock(Uri.class)).build())
.prepare()
.asRxSingle()
.subscribe(testObserver);
testObserver.awaitTerminalEvent();
testObserver.assertNoValues();
assertThat(testObserver.errors().get(0))
.isInstanceOf(StorIOException.class)
.hasCauseInstanceOf(IllegalStateException.class);
verify(storIOContentResolver).get();
verify(storIOContentResolver).lowLevel();
verify(storIOContentResolver).interceptors();
verify(storIOContentResolver).defaultRxScheduler();
verify(lowLevel).typeMapping(TestItem.class);
verify(lowLevel, never()).query(any(Query.class));
verifyNoMoreInteractions(storIOContentResolver, lowLevel);
}
@Test
public void shouldThrowExceptionIfNoTypeMappingWasFoundWithoutAccessingContentProviderAsMaybe() {
final StorIOContentResolver storIOContentResolver = mock(StorIOContentResolver.class);
final StorIOContentResolver.LowLevel lowLevel = mock(StorIOContentResolver.LowLevel.class);
when(storIOContentResolver.lowLevel()).thenReturn(lowLevel);
when(storIOContentResolver.get()).thenReturn(new PreparedGet.Builder(storIOContentResolver));
final TestObserver<TestItem> testObserver = new TestObserver<TestItem>();
storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(Query.builder().uri(mock(Uri.class)).build())
.prepare()
.asRxMaybe()
.subscribe(testObserver);
testObserver.awaitTerminalEvent();
testObserver.assertNoValues();
assertThat(testObserver.errors().get(0))
.isInstanceOf(StorIOException.class)
.hasCauseInstanceOf(IllegalStateException.class);
verify(storIOContentResolver).get();
verify(storIOContentResolver).lowLevel();
verify(storIOContentResolver).interceptors();
verify(storIOContentResolver).defaultRxScheduler();
verify(lowLevel).typeMapping(TestItem.class);
verify(lowLevel, never()).query(any(Query.class));
verifyNoMoreInteractions(storIOContentResolver, lowLevel);
}
}
// With Enclosed runner we can not have tests in root class
public static class OtherTests {
@Test
public void shouldReturnQueryInGetData() {
final Query query = Query.builder()
.uri(mock(Uri.class))
.build();
final StorIOContentResolver storIOContentResolver = mock(StorIOContentResolver.class);
//noinspection unchecked
final GetResolver<Object> getResolver = mock(GetResolver.class);
final PreparedGetObject<Object> operation =
new PreparedGetObject.Builder<Object>(storIOContentResolver, Object.class)
.withQuery(query)
.withGetResolver(getResolver)
.prepare();
assertThat(operation.getData()).isEqualTo(query);
}
@Test
public void shouldCloseCursorInCaseOfException() {
StorIOContentResolver storIOContentResolver = mock(StorIOContentResolver.class);
Query query = Query.builder()
.uri(mock(Uri.class))
.build();
//noinspection unchecked
GetResolver<Object> getResolver = mock(GetResolver.class);
Cursor cursor = mock(Cursor.class);
when(getResolver.performGet(storIOContentResolver, query))
.thenReturn(cursor);
when(getResolver.mapFromCursor(storIOContentResolver, cursor))
.thenThrow(new IllegalStateException("Breaking execution"));
when(cursor.getCount()).thenReturn(1);
when(cursor.moveToFirst()).thenReturn(true);
try {
new PreparedGetObject.Builder<Object>(storIOContentResolver, Object.class)
.withQuery(query)
.withGetResolver(getResolver)
.prepare()
.executeAsBlocking();
failBecauseExceptionWasNotThrown(StorIOException.class);
} catch (StorIOException expected) {
assertThat(expected.getCause())
.isInstanceOf(IllegalStateException.class)
.hasMessage("Breaking execution");
// Main check: in case of exception cursor must be closed
verify(cursor).close();
verify(cursor).getCount();
verify(cursor).moveToFirst();
verify(storIOContentResolver).interceptors();
verifyNoMoreInteractions(storIOContentResolver, cursor);
}
}
@Test
public void getObjectFlowableExecutesOnSpecifiedScheduler() {
final GetObjectStub getStub = GetObjectStub.newStubWithoutTypeMapping();
final SchedulerChecker schedulerChecker = SchedulerChecker.create(getStub.storIOContentResolver);
final PreparedGetObject<TestItem> operation = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.withGetResolver(getStub.getResolver)
.prepare();
schedulerChecker.checkAsFlowable(operation);
}
@Test
public void getObjectSingleExecutesOnSpecifiedScheduler() {
final GetObjectStub getStub = GetObjectStub.newStubWithoutTypeMapping();
final SchedulerChecker schedulerChecker = SchedulerChecker.create(getStub.storIOContentResolver);
final PreparedGetObject<TestItem> operation = getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.withGetResolver(getStub.getResolver)
.prepare();
schedulerChecker.checkAsSingle(operation);
}
@Test
public void shouldPassStorIOContentResolverToGetResolver() {
final GetObjectStub getStub = GetObjectStub.newStubWithoutTypeMapping();
getStub.storIOContentResolver
.get()
.object(TestItem.class)
.withQuery(getStub.query)
.withGetResolver(getStub.getResolver)
.prepare()
.executeAsBlocking();
verify(getStub.getResolver).mapFromCursor(eq(getStub.storIOContentResolver), any(Cursor.class));
}
}
}
| |
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.jpa.hibernate.sql;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import org.hibernate.Query;
import org.hibernate.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Sets;
import com.mysema.commons.lang.CloseableIterator;
import com.querydsl.core.*;
import com.querydsl.core.NonUniqueResultException;
import com.querydsl.core.types.Expression;
import com.querydsl.core.types.FactoryExpression;
import com.querydsl.jpa.AbstractSQLQuery;
import com.querydsl.jpa.FactoryExpressionTransformer;
import com.querydsl.jpa.NativeSQLSerializer;
import com.querydsl.jpa.ScrollableResultsIterator;
import com.querydsl.jpa.hibernate.DefaultSessionHolder;
import com.querydsl.jpa.hibernate.HibernateUtil;
import com.querydsl.jpa.hibernate.SessionHolder;
import com.querydsl.jpa.hibernate.StatelessSessionHolder;
import com.querydsl.sql.Configuration;
import com.querydsl.sql.SQLSerializer;
/**
* {@code AbstractHibernateSQLQuery} is the base class for Hibernate Native SQL queries
*
* @param <T> result type
* @param <Q> concrete subtype
*
* @author tiwe
*/
public abstract class AbstractHibernateSQLQuery<T, Q extends AbstractHibernateSQLQuery<T, Q>> extends AbstractSQLQuery<T, Q> {
private static final Logger logger = LoggerFactory.getLogger(AbstractHibernateSQLQuery.class);
protected Boolean cacheable, readOnly;
protected String cacheRegion;
protected int fetchSize = 0;
private final SessionHolder session;
protected int timeout = 0;
public AbstractHibernateSQLQuery(Session session, Configuration conf) {
this(new DefaultSessionHolder(session), conf, new DefaultQueryMetadata());
}
public AbstractHibernateSQLQuery(StatelessSession session, Configuration conf) {
this(new StatelessSessionHolder(session), conf, new DefaultQueryMetadata());
}
public AbstractHibernateSQLQuery(SessionHolder session, Configuration conf, QueryMetadata metadata) {
super(metadata, conf);
this.session = session;
}
public Query createQuery() {
return createQuery(false);
}
private Query createQuery(boolean forCount) {
NativeSQLSerializer serializer = (NativeSQLSerializer) serialize(forCount);
String queryString = serializer.toString();
logQuery(queryString, serializer.getConstantToAllLabels());
org.hibernate.SQLQuery query = session.createSQLQuery(queryString);
// set constants
HibernateUtil.setConstants(query, serializer.getConstantToNamedLabel(), serializer.getConstantToNumberedLabel(),
queryMixin.getMetadata().getParams());
if (!forCount) {
ListMultimap<Expression<?>, String> aliases = serializer.getAliases();
Set<String> used = Sets.newHashSet();
// set entity paths
Expression<?> projection = queryMixin.getMetadata().getProjection();
if (projection instanceof FactoryExpression) {
for (Expression<?> expr : ((FactoryExpression<?>) projection).getArgs()) {
if (isEntityExpression(expr)) {
query.addEntity(extractEntityExpression(expr).toString(), expr.getType());
} else if (aliases.containsKey(expr)) {
for (String scalar : aliases.get(expr)) {
if (!used.contains(scalar)) {
query.addScalar(scalar);
used.add(scalar);
break;
}
}
}
}
} else if (isEntityExpression(projection)) {
query.addEntity(extractEntityExpression(projection).toString(), projection.getType());
} else if (aliases.containsKey(projection)) {
for (String scalar : aliases.get(projection)) {
if (!used.contains(scalar)) {
query.addScalar(scalar);
used.add(scalar);
break;
}
}
}
// set result transformer, if projection is a FactoryExpression instance
if (projection instanceof FactoryExpression) {
query.setResultTransformer(new FactoryExpressionTransformer((FactoryExpression<?>) projection));
}
}
if (fetchSize > 0) {
query.setFetchSize(fetchSize);
}
if (timeout > 0) {
query.setTimeout(timeout);
}
if (cacheable != null) {
query.setCacheable(cacheable);
}
if (cacheRegion != null) {
query.setCacheRegion(cacheRegion);
}
if (readOnly != null) {
query.setReadOnly(readOnly);
}
return query;
}
@Override
protected SQLSerializer createSerializer() {
return new NativeSQLSerializer(configuration, true);
}
@SuppressWarnings("unchecked")
@Override
public List<T> fetch() {
try {
return createQuery().list();
} finally {
reset();
}
}
@Override
public CloseableIterator<T> iterate() {
try {
Query query = createQuery();
ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
return new ScrollableResultsIterator<T>(results);
} finally {
reset();
}
}
@Override
public QueryResults<T> fetchResults() {
// TODO : handle entity projections as well
try {
Query query = createQuery(true);
long total = ((Number) query.uniqueResult()).longValue();
if (total > 0) {
QueryModifiers modifiers = queryMixin.getMetadata().getModifiers();
query = createQuery(false);
@SuppressWarnings("unchecked")
List<T> list = query.list();
return new QueryResults<T>(list, modifiers, total);
} else {
return QueryResults.emptyResults();
}
} finally {
reset();
}
}
protected void logQuery(String queryString, Map<Object, String> parameters) {
if (logger.isDebugEnabled()) {
String normalizedQuery = queryString.replace('\n', ' ');
MDC.put(MDC_QUERY, normalizedQuery);
MDC.put(MDC_PARAMETERS, String.valueOf(parameters));
logger.debug(normalizedQuery);
}
}
protected void cleanupMDC() {
MDC.remove(MDC_QUERY);
MDC.remove(MDC_PARAMETERS);
}
protected void reset() {
cleanupMDC();
}
@SuppressWarnings("unchecked")
@Override
public T fetchOne() throws NonUniqueResultException {
try {
Query query = createQuery();
return (T) uniqueResult(query);
} finally {
reset();
}
}
@Nullable
private Object uniqueResult(Query query) {
try {
return query.uniqueResult();
} catch (org.hibernate.NonUniqueResultException e) {
throw new NonUniqueResultException(e);
}
}
/**
* Enable caching of this query result set.
* @param cacheable Should the query results be cacheable?
*/
@SuppressWarnings("unchecked")
public Q setCacheable(boolean cacheable) {
this.cacheable = cacheable;
return (Q) this;
}
/**
* Set the name of the cache region.
* @param cacheRegion the name of a query cache region, or <tt>null</tt>
* for the default query cache
*/
@SuppressWarnings("unchecked")
public Q setCacheRegion(String cacheRegion) {
this.cacheRegion = cacheRegion;
return (Q) this;
}
/**
* Set a fetchJoin size for the underlying JDBC query.
* @param fetchSize the fetchJoin size
*/
@SuppressWarnings("unchecked")
public Q setFetchSize(int fetchSize) {
this.fetchSize = fetchSize;
return (Q) this;
}
/**
* Entities retrieved by this query will be loaded in
* a read-only mode where Hibernate will never dirty-check
* them or make changes persistent.
*
*/
@SuppressWarnings("unchecked")
public Q setReadOnly(boolean readOnly) {
this.readOnly = readOnly;
return (Q) this;
}
/**
* Set a timeout for the underlying JDBC query.
* @param timeout the timeout in seconds
*/
@SuppressWarnings("unchecked")
public Q setTimeout(int timeout) {
this.timeout = timeout;
return (Q) this;
}
@Override
protected void clone(Q query) {
super.clone(query);
cacheable = query.cacheable;
cacheRegion = query.cacheRegion;
fetchSize = query.fetchSize;
readOnly = query.readOnly;
timeout = query.timeout;
}
protected abstract Q clone(SessionHolder session);
public Q clone(Session session) {
return this.clone(new DefaultSessionHolder(session));
}
public Q clone(StatelessSession statelessSession) {
return this.clone(new StatelessSessionHolder(statelessSession));
}
@Override
public Q clone() {
return this.clone(this.session);
}
}
| |
package com.michaelfotiadis.advuncedpeasants.ui.base.recyclerview.animation;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPropertyAnimatorCompat;
import android.support.v4.view.ViewPropertyAnimatorListener;
import android.support.v7.widget.RecyclerView.ViewHolder;
import android.support.v7.widget.SimpleItemAnimator;
import android.view.View;
import java.util.ArrayList;
import java.util.List;
/**
* Cloned from android.support.v7.widget.DefaultItemAnimator and hacked^H^H^H^H^H^Himproved to allow animations to be changed
*
* @see android.support.v7.widget.RecyclerView#setItemAnimator(android.support.v7.widget.RecyclerView.ItemAnimator)
* @see android.support.v7.widget.DefaultItemAnimator
*/
/*package*/ class CustomItemAnimator extends SimpleItemAnimator {
private static final boolean DEBUG = false;
private final ArrayList<ViewHolder> mAddAnimations = new ArrayList<>();
private final ArrayList<ArrayList<ViewHolder>> mAdditionsList = new ArrayList<>();
private final ArrayList<ViewHolder> mChangeAnimations = new ArrayList<>();
private final ArrayList<ArrayList<ChangeInfo>> mChangesList = new ArrayList<>();
private final ArrayList<ViewHolder> mMoveAnimations = new ArrayList<>();
private final ArrayList<ArrayList<MoveInfo>> mMovesList = new ArrayList<>();
private final ArrayList<ViewHolder> mPendingAdditions = new ArrayList<>();
private final ArrayList<ChangeInfo> mPendingChanges = new ArrayList<>();
private final ArrayList<MoveInfo> mPendingMoves = new ArrayList<>();
private final ArrayList<ViewHolder> mPendingRemovals = new ArrayList<>();
private final ArrayList<ViewHolder> mRemoveAnimations = new ArrayList<>();
private ItemAnimationCustomizer itemAddCustomizer = PredefinedAnimations.FADE_IN;
private ItemAnimationCustomizer itemRemoveCustomizer = PredefinedAnimations.FADE_OUT;
@Override
public boolean animateRemove(final ViewHolder holder) {
endAnimation(holder);
mPendingRemovals.add(holder);
return true;
}
@Override
public boolean animateAdd(final ViewHolder holder) {
endAnimation(holder);
ViewCompat.setAlpha(holder.itemView, 0);
mPendingAdditions.add(holder);
return true;
}
@Override
public boolean animateMove(final ViewHolder holder, int fromX, int fromY,
final int toX, final int toY) {
final View view = holder.itemView;
fromX += ViewCompat.getTranslationX(holder.itemView);
fromY += ViewCompat.getTranslationY(holder.itemView);
endAnimation(holder);
final int deltaX = toX - fromX;
final int deltaY = toY - fromY;
if (deltaX == 0 && deltaY == 0) {
dispatchMoveFinished(holder);
return false;
}
if (deltaX != 0) {
ViewCompat.setTranslationX(view, -deltaX);
}
if (deltaY != 0) {
ViewCompat.setTranslationY(view, -deltaY);
}
mPendingMoves.add(new MoveInfo(holder, fromX, fromY, toX, toY));
return true;
}
@Override
public boolean animateChange(final ViewHolder oldHolder, final ViewHolder newHolder,
final int fromX, final int fromY, final int toX, final int toY) {
final float prevTranslationX = ViewCompat.getTranslationX(oldHolder.itemView);
final float prevTranslationY = ViewCompat.getTranslationY(oldHolder.itemView);
final float prevAlpha = ViewCompat.getAlpha(oldHolder.itemView);
endAnimation(oldHolder);
final int deltaX = (int) (toX - fromX - prevTranslationX);
final int deltaY = (int) (toY - fromY - prevTranslationY);
// recover prev translation state after ending animation
ViewCompat.setTranslationX(oldHolder.itemView, prevTranslationX);
ViewCompat.setTranslationY(oldHolder.itemView, prevTranslationY);
ViewCompat.setAlpha(oldHolder.itemView, prevAlpha);
if (newHolder != null && newHolder.itemView != null) {
// carry over translation values
endAnimation(newHolder);
ViewCompat.setTranslationX(newHolder.itemView, -deltaX);
ViewCompat.setTranslationY(newHolder.itemView, -deltaY);
ViewCompat.setAlpha(newHolder.itemView, 0);
}
mPendingChanges.add(new ChangeInfo(oldHolder, newHolder, fromX, fromY, toX, toY));
return true;
}
@Override
public void runPendingAnimations() {
final boolean removalsPending = !mPendingRemovals.isEmpty();
final boolean movesPending = !mPendingMoves.isEmpty();
final boolean changesPending = !mPendingChanges.isEmpty();
final boolean additionsPending = !mPendingAdditions.isEmpty();
if (!removalsPending && !movesPending && !additionsPending && !changesPending) {
// nothing to animate
return;
}
// First, remove stuff
for (final ViewHolder holder : mPendingRemovals) {
animateRemoveImpl(holder);
}
mPendingRemovals.clear();
// Next, move stuff
if (movesPending) {
final ArrayList<MoveInfo> moves = new ArrayList<>();
moves.addAll(mPendingMoves);
mMovesList.add(moves);
mPendingMoves.clear();
final Runnable mover = new Runnable() {
@Override
public void run() {
for (final MoveInfo moveInfo : moves) {
animateMoveImpl(moveInfo.holder, moveInfo.fromX, moveInfo.fromY,
moveInfo.toX, moveInfo.toY);
}
moves.clear();
mMovesList.remove(moves);
}
};
if (removalsPending) {
final View view = moves.get(0).holder.itemView;
ViewCompat.postOnAnimationDelayed(view, mover, getRemoveDuration());
} else {
mover.run();
}
}
// Next, change stuff, to run in parallel with move animations
if (changesPending) {
final ArrayList<ChangeInfo> changes = new ArrayList<>();
changes.addAll(mPendingChanges);
mChangesList.add(changes);
mPendingChanges.clear();
final Runnable changer = new Runnable() {
@Override
public void run() {
for (final ChangeInfo change : changes) {
animateChangeImpl(change);
}
changes.clear();
mChangesList.remove(changes);
}
};
if (removalsPending) {
final ViewHolder holder = changes.get(0).oldHolder;
ViewCompat.postOnAnimationDelayed(holder.itemView, changer, getRemoveDuration());
} else {
changer.run();
}
}
// Next, add stuff
if (additionsPending) {
final ArrayList<ViewHolder> additions = new ArrayList<>();
additions.addAll(mPendingAdditions);
mAdditionsList.add(additions);
mPendingAdditions.clear();
final Runnable adder = new Runnable() {
public void run() {
for (final ViewHolder holder : additions) {
animateAddImpl(holder);
}
additions.clear();
mAdditionsList.remove(additions);
}
};
if (removalsPending || movesPending || changesPending) {
final long removeDuration = removalsPending ? getRemoveDuration() : 0;
final long moveDuration = movesPending ? getMoveDuration() : 0;
final long changeDuration = changesPending ? getChangeDuration() : 0;
final long totalDelay = removeDuration + Math.max(moveDuration, changeDuration);
final View view = additions.get(0).itemView;
ViewCompat.postOnAnimationDelayed(view, adder, totalDelay);
} else {
adder.run();
}
}
}
@Override
public void endAnimation(final ViewHolder item) {
final View view = item.itemView;
// this will trigger end callback which should set properties to their target values.
ViewCompat.animate(view).cancel();
// TODO if some other animations are chained to end, how do we cancel them as well?
for (int i = mPendingMoves.size() - 1; i >= 0; i--) {
final MoveInfo moveInfo = mPendingMoves.get(i);
if (moveInfo.holder == item) {
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(item);
mPendingMoves.remove(i);
}
}
endChangeAnimation(mPendingChanges, item);
if (mPendingRemovals.remove(item)) {
ViewCompat.setAlpha(view, 1);
dispatchRemoveFinished(item);
}
if (mPendingAdditions.remove(item)) {
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
}
for (int i = mChangesList.size() - 1; i >= 0; i--) {
final ArrayList<ChangeInfo> changes = mChangesList.get(i);
endChangeAnimation(changes, item);
if (changes.isEmpty()) {
mChangesList.remove(i);
}
}
for (int i = mMovesList.size() - 1; i >= 0; i--) {
final ArrayList<MoveInfo> moves = mMovesList.get(i);
for (int j = moves.size() - 1; j >= 0; j--) {
final MoveInfo moveInfo = moves.get(j);
if (moveInfo.holder == item) {
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(item);
moves.remove(j);
if (moves.isEmpty()) {
mMovesList.remove(i);
}
break;
}
}
}
for (int i = mAdditionsList.size() - 1; i >= 0; i--) {
final ArrayList<ViewHolder> additions = mAdditionsList.get(i);
if (additions.remove(item)) {
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
if (additions.isEmpty()) {
mAdditionsList.remove(i);
}
}
}
// animations should be ended by the cancel above.
if (mRemoveAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mRemoveAnimations list");
}
if (mAddAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mAddAnimations list");
}
if (mChangeAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mChangeAnimations list");
}
if (mMoveAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mMoveAnimations list");
}
dispatchFinishedWhenDone();
}
@Override
public void endAnimations() {
int count = mPendingMoves.size();
for (int i = count - 1; i >= 0; i--) {
final MoveInfo item = mPendingMoves.get(i);
final View view = item.holder.itemView;
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(item.holder);
mPendingMoves.remove(i);
}
count = mPendingRemovals.size();
for (int i = count - 1; i >= 0; i--) {
final ViewHolder item = mPendingRemovals.get(i);
dispatchRemoveFinished(item);
mPendingRemovals.remove(i);
}
count = mPendingAdditions.size();
for (int i = count - 1; i >= 0; i--) {
final ViewHolder item = mPendingAdditions.get(i);
final View view = item.itemView;
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
mPendingAdditions.remove(i);
}
count = mPendingChanges.size();
for (int i = count - 1; i >= 0; i--) {
endChangeAnimationIfNecessary(mPendingChanges.get(i));
}
mPendingChanges.clear();
if (!isRunning()) {
return;
}
int listCount = mMovesList.size();
for (int i = listCount - 1; i >= 0; i--) {
final ArrayList<MoveInfo> moves = mMovesList.get(i);
count = moves.size();
for (int j = count - 1; j >= 0; j--) {
final MoveInfo moveInfo = moves.get(j);
final ViewHolder item = moveInfo.holder;
final View view = item.itemView;
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(moveInfo.holder);
moves.remove(j);
if (moves.isEmpty()) {
mMovesList.remove(moves);
}
}
}
listCount = mAdditionsList.size();
for (int i = listCount - 1; i >= 0; i--) {
final ArrayList<ViewHolder> additions = mAdditionsList.get(i);
count = additions.size();
for (int j = count - 1; j >= 0; j--) {
final ViewHolder item = additions.get(j);
final View view = item.itemView;
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
additions.remove(j);
if (additions.isEmpty()) {
mAdditionsList.remove(additions);
}
}
}
listCount = mChangesList.size();
for (int i = listCount - 1; i >= 0; i--) {
final ArrayList<ChangeInfo> changes = mChangesList.get(i);
count = changes.size();
for (int j = count - 1; j >= 0; j--) {
endChangeAnimationIfNecessary(changes.get(j));
if (changes.isEmpty()) {
mChangesList.remove(changes);
}
}
}
cancelAll(mRemoveAnimations);
cancelAll(mMoveAnimations);
cancelAll(mAddAnimations);
cancelAll(mChangeAnimations);
dispatchAnimationsFinished();
}
@Override
public boolean isRunning() {
return (!mPendingAdditions.isEmpty() ||
!mPendingChanges.isEmpty() ||
!mPendingMoves.isEmpty() ||
!mPendingRemovals.isEmpty() ||
!mMoveAnimations.isEmpty() ||
!mRemoveAnimations.isEmpty() ||
!mAddAnimations.isEmpty() ||
!mChangeAnimations.isEmpty() ||
!mMovesList.isEmpty() ||
!mAdditionsList.isEmpty() ||
!mChangesList.isEmpty());
}
public void setItemAddCustomizer(final ItemAnimationCustomizer itemAddCustomizer) {
this.itemAddCustomizer = itemAddCustomizer;
}
public void setItemRemoveCustomizer(final ItemAnimationCustomizer customizer) {
this.itemRemoveCustomizer = customizer;
}
/*package*/ void cancelAll(final List<ViewHolder> viewHolders) {
for (int i = viewHolders.size() - 1; i >= 0; i--) {
ViewCompat.animate(viewHolders.get(i).itemView).cancel();
}
}
private void animateAddImpl(final ViewHolder holder) {
final View view = holder.itemView;
mAddAnimations.add(holder);
final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view);
// animation.alpha(1);
itemAddCustomizer.onPrepare(animation, holder);
animation.setDuration(getAddDuration());
animation.setListener(new VpaListenerAdapter() {
@Override
public void onAnimationCancel(final View view) {
itemAddCustomizer.onCancel(holder);
// ViewCompat.setAlpha(view, 1);
}
@Override
public void onAnimationEnd(final View view) {
itemAddCustomizer.onEnd(holder);
animation.setListener(null);
dispatchAddFinished(holder);
mAddAnimations.remove(holder);
dispatchFinishedWhenDone();
}
@Override
public void onAnimationStart(final View view) {
dispatchAddStarting(holder);
}
});
animation.start();
}
private void animateChangeImpl(final ChangeInfo changeInfo) {
final ViewHolder holder = changeInfo.oldHolder;
final View view = holder == null ? null : holder.itemView;
final ViewHolder newHolder = changeInfo.newHolder;
final View newView = newHolder != null ? newHolder.itemView : null;
if (view != null) {
mChangeAnimations.add(changeInfo.oldHolder);
final ViewPropertyAnimatorCompat oldViewAnim = ViewCompat.animate(view).setDuration(
getChangeDuration());
oldViewAnim.translationX(changeInfo.toX - changeInfo.fromX);
oldViewAnim.translationY(changeInfo.toY - changeInfo.fromY);
oldViewAnim.alpha(0).setListener(new VpaListenerAdapter() {
@Override
public void onAnimationEnd(final View view) {
oldViewAnim.setListener(null);
ViewCompat.setAlpha(view, 1);
ViewCompat.setTranslationX(view, 0);
ViewCompat.setTranslationY(view, 0);
dispatchChangeFinished(changeInfo.oldHolder, true);
mChangeAnimations.remove(changeInfo.oldHolder);
dispatchFinishedWhenDone();
}
@Override
public void onAnimationStart(final View view) {
dispatchChangeStarting(changeInfo.oldHolder, true);
}
}).start();
}
if (newView != null) {
mChangeAnimations.add(changeInfo.newHolder);
final ViewPropertyAnimatorCompat newViewAnimation = ViewCompat.animate(newView);
newViewAnimation.translationX(0).translationY(0).setDuration(getChangeDuration()).
alpha(1).setListener(new VpaListenerAdapter() {
@Override
public void onAnimationEnd(final View view) {
newViewAnimation.setListener(null);
ViewCompat.setAlpha(newView, 1);
ViewCompat.setTranslationX(newView, 0);
ViewCompat.setTranslationY(newView, 0);
dispatchChangeFinished(changeInfo.newHolder, false);
mChangeAnimations.remove(changeInfo.newHolder);
dispatchFinishedWhenDone();
}
@Override
public void onAnimationStart(final View view) {
dispatchChangeStarting(changeInfo.newHolder, false);
}
}).start();
}
}
private void animateMoveImpl(final ViewHolder holder, final int fromX, final int fromY, final int toX, final int toY) {
final View view = holder.itemView;
final int deltaX = toX - fromX;
final int deltaY = toY - fromY;
if (deltaX != 0) {
ViewCompat.animate(view).translationX(0);
}
if (deltaY != 0) {
ViewCompat.animate(view).translationY(0);
}
// TODO: make EndActions end listeners instead, since end actions aren't called when
// vpas are canceled (and can't end them. why?)
// need listener functionality in VPACompat for this. Ick.
mMoveAnimations.add(holder);
final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view);
animation.setDuration(getMoveDuration()).setListener(new VpaListenerAdapter() {
@Override
public void onAnimationCancel(final View view) {
if (deltaX != 0) {
ViewCompat.setTranslationX(view, 0);
}
if (deltaY != 0) {
ViewCompat.setTranslationY(view, 0);
}
}
@Override
public void onAnimationEnd(final View view) {
animation.setListener(null);
dispatchMoveFinished(holder);
mMoveAnimations.remove(holder);
dispatchFinishedWhenDone();
}
@Override
public void onAnimationStart(final View view) {
dispatchMoveStarting(holder);
}
}).start();
}
private void animateRemoveImpl(final ViewHolder holder) {
final View view = holder.itemView;
final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view);
animation.setDuration(getRemoveDuration());
// animation.alpha(0);
itemRemoveCustomizer.onPrepare(animation, holder);
animation.setListener(new VpaListenerAdapter() {
@Override
public void onAnimationCancel(final View view) {
itemRemoveCustomizer.onCancel(holder);
}
@Override
public void onAnimationEnd(final View view) {
animation.setListener(null);
// ViewCompat.setAlpha(view, 1);
itemRemoveCustomizer.onEnd(holder);
dispatchRemoveFinished(holder);
mRemoveAnimations.remove(holder);
dispatchFinishedWhenDone();
}
@Override
public void onAnimationStart(final View view) {
dispatchRemoveStarting(holder);
}
}).start();
mRemoveAnimations.add(holder);
}
/**
* Check the state of currently pending and running animations. If there are none
* pending/running, call {@link #dispatchAnimationsFinished()} to notify any
* listeners.
*/
private void dispatchFinishedWhenDone() {
if (!isRunning()) {
dispatchAnimationsFinished();
}
}
private void endChangeAnimation(final List<ChangeInfo> infoList, final ViewHolder item) {
for (int i = infoList.size() - 1; i >= 0; i--) {
final ChangeInfo changeInfo = infoList.get(i);
if (endChangeAnimationIfNecessary(changeInfo, item)) {
if (changeInfo.oldHolder == null && changeInfo.newHolder == null) {
infoList.remove(changeInfo);
}
}
}
}
private void endChangeAnimationIfNecessary(final ChangeInfo changeInfo) {
if (changeInfo.oldHolder != null) {
endChangeAnimationIfNecessary(changeInfo, changeInfo.oldHolder);
}
if (changeInfo.newHolder != null) {
endChangeAnimationIfNecessary(changeInfo, changeInfo.newHolder);
}
}
private boolean endChangeAnimationIfNecessary(final ChangeInfo changeInfo, final ViewHolder item) {
boolean oldItem = false;
if (changeInfo.newHolder == item) {
changeInfo.newHolder = null;
} else if (changeInfo.oldHolder == item) {
changeInfo.oldHolder = null;
oldItem = true;
} else {
return false;
}
ViewCompat.setAlpha(item.itemView, 1);
ViewCompat.setTranslationX(item.itemView, 0);
ViewCompat.setTranslationY(item.itemView, 0);
dispatchChangeFinished(item, oldItem);
return true;
}
private static final class ChangeInfo {
public int fromX;
public int fromY;
public ViewHolder newHolder;
public ViewHolder oldHolder;
public int toX;
public int toY;
private ChangeInfo(final ViewHolder oldHolder, final ViewHolder newHolder) {
this.oldHolder = oldHolder;
this.newHolder = newHolder;
}
private ChangeInfo(final ViewHolder oldHolder, final ViewHolder newHolder,
final int fromX, final int fromY, final int toX, final int toY) {
this(oldHolder, newHolder);
this.fromX = fromX;
this.fromY = fromY;
this.toX = toX;
this.toY = toY;
}
@Override
public String toString() {
return "ChangeInfo{" +
"oldHolder=" + oldHolder +
", newHolder=" + newHolder +
", fromX=" + fromX +
", fromY=" + fromY +
", toX=" + toX +
", toY=" + toY +
'}';
}
}
private static final class MoveInfo {
public final int fromX;
public final int fromY;
public final ViewHolder holder;
public final int toX;
public final int toY;
private MoveInfo(final ViewHolder holder, final int fromX, final int fromY, final int toX, final int toY) {
this.holder = holder;
this.fromX = fromX;
this.fromY = fromY;
this.toX = toX;
this.toY = toY;
}
}
private static class VpaListenerAdapter implements ViewPropertyAnimatorListener {
@Override
public void onAnimationStart(final View view) {
}
@Override
public void onAnimationEnd(final View view) {
}
@Override
public void onAnimationCancel(final View view) {
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.s3control;
import javax.annotation.Generated;
import com.amazonaws.services.s3control.model.*;
/**
* Abstract implementation of {@code AWSS3ControlAsync}. Convenient method forms pass through to the corresponding
* overload that takes a request object and an {@code AsyncHandler}, which throws an
* {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAWSS3ControlAsync extends AbstractAWSS3Control implements AWSS3ControlAsync {
protected AbstractAWSS3ControlAsync() {
}
@Override
public java.util.concurrent.Future<CreateAccessPointResult> createAccessPointAsync(CreateAccessPointRequest request) {
return createAccessPointAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateAccessPointResult> createAccessPointAsync(CreateAccessPointRequest request,
com.amazonaws.handlers.AsyncHandler<CreateAccessPointRequest, CreateAccessPointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateAccessPointForObjectLambdaResult> createAccessPointForObjectLambdaAsync(
CreateAccessPointForObjectLambdaRequest request) {
return createAccessPointForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateAccessPointForObjectLambdaResult> createAccessPointForObjectLambdaAsync(
CreateAccessPointForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<CreateAccessPointForObjectLambdaRequest, CreateAccessPointForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateBucketResult> createBucketAsync(CreateBucketRequest request) {
return createBucketAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateBucketResult> createBucketAsync(CreateBucketRequest request,
com.amazonaws.handlers.AsyncHandler<CreateBucketRequest, CreateBucketResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateJobResult> createJobAsync(CreateJobRequest request) {
return createJobAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateJobResult> createJobAsync(CreateJobRequest request,
com.amazonaws.handlers.AsyncHandler<CreateJobRequest, CreateJobResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateMultiRegionAccessPointResult> createMultiRegionAccessPointAsync(CreateMultiRegionAccessPointRequest request) {
return createMultiRegionAccessPointAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateMultiRegionAccessPointResult> createMultiRegionAccessPointAsync(CreateMultiRegionAccessPointRequest request,
com.amazonaws.handlers.AsyncHandler<CreateMultiRegionAccessPointRequest, CreateMultiRegionAccessPointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteAccessPointResult> deleteAccessPointAsync(DeleteAccessPointRequest request) {
return deleteAccessPointAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteAccessPointResult> deleteAccessPointAsync(DeleteAccessPointRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteAccessPointRequest, DeleteAccessPointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteAccessPointForObjectLambdaResult> deleteAccessPointForObjectLambdaAsync(
DeleteAccessPointForObjectLambdaRequest request) {
return deleteAccessPointForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteAccessPointForObjectLambdaResult> deleteAccessPointForObjectLambdaAsync(
DeleteAccessPointForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteAccessPointForObjectLambdaRequest, DeleteAccessPointForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteAccessPointPolicyResult> deleteAccessPointPolicyAsync(DeleteAccessPointPolicyRequest request) {
return deleteAccessPointPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteAccessPointPolicyResult> deleteAccessPointPolicyAsync(DeleteAccessPointPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteAccessPointPolicyRequest, DeleteAccessPointPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteAccessPointPolicyForObjectLambdaResult> deleteAccessPointPolicyForObjectLambdaAsync(
DeleteAccessPointPolicyForObjectLambdaRequest request) {
return deleteAccessPointPolicyForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteAccessPointPolicyForObjectLambdaResult> deleteAccessPointPolicyForObjectLambdaAsync(
DeleteAccessPointPolicyForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteAccessPointPolicyForObjectLambdaRequest, DeleteAccessPointPolicyForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteBucketResult> deleteBucketAsync(DeleteBucketRequest request) {
return deleteBucketAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteBucketResult> deleteBucketAsync(DeleteBucketRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteBucketRequest, DeleteBucketResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteBucketLifecycleConfigurationResult> deleteBucketLifecycleConfigurationAsync(
DeleteBucketLifecycleConfigurationRequest request) {
return deleteBucketLifecycleConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteBucketLifecycleConfigurationResult> deleteBucketLifecycleConfigurationAsync(
DeleteBucketLifecycleConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteBucketLifecycleConfigurationRequest, DeleteBucketLifecycleConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteBucketPolicyResult> deleteBucketPolicyAsync(DeleteBucketPolicyRequest request) {
return deleteBucketPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteBucketPolicyResult> deleteBucketPolicyAsync(DeleteBucketPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteBucketPolicyRequest, DeleteBucketPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteBucketTaggingResult> deleteBucketTaggingAsync(DeleteBucketTaggingRequest request) {
return deleteBucketTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteBucketTaggingResult> deleteBucketTaggingAsync(DeleteBucketTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteBucketTaggingRequest, DeleteBucketTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteJobTaggingResult> deleteJobTaggingAsync(DeleteJobTaggingRequest request) {
return deleteJobTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteJobTaggingResult> deleteJobTaggingAsync(DeleteJobTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteJobTaggingRequest, DeleteJobTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteMultiRegionAccessPointResult> deleteMultiRegionAccessPointAsync(DeleteMultiRegionAccessPointRequest request) {
return deleteMultiRegionAccessPointAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteMultiRegionAccessPointResult> deleteMultiRegionAccessPointAsync(DeleteMultiRegionAccessPointRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteMultiRegionAccessPointRequest, DeleteMultiRegionAccessPointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeletePublicAccessBlockResult> deletePublicAccessBlockAsync(DeletePublicAccessBlockRequest request) {
return deletePublicAccessBlockAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeletePublicAccessBlockResult> deletePublicAccessBlockAsync(DeletePublicAccessBlockRequest request,
com.amazonaws.handlers.AsyncHandler<DeletePublicAccessBlockRequest, DeletePublicAccessBlockResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteStorageLensConfigurationResult> deleteStorageLensConfigurationAsync(DeleteStorageLensConfigurationRequest request) {
return deleteStorageLensConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteStorageLensConfigurationResult> deleteStorageLensConfigurationAsync(DeleteStorageLensConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteStorageLensConfigurationRequest, DeleteStorageLensConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteStorageLensConfigurationTaggingResult> deleteStorageLensConfigurationTaggingAsync(
DeleteStorageLensConfigurationTaggingRequest request) {
return deleteStorageLensConfigurationTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteStorageLensConfigurationTaggingResult> deleteStorageLensConfigurationTaggingAsync(
DeleteStorageLensConfigurationTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteStorageLensConfigurationTaggingRequest, DeleteStorageLensConfigurationTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeJobResult> describeJobAsync(DescribeJobRequest request) {
return describeJobAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeJobResult> describeJobAsync(DescribeJobRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeJobRequest, DescribeJobResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeMultiRegionAccessPointOperationResult> describeMultiRegionAccessPointOperationAsync(
DescribeMultiRegionAccessPointOperationRequest request) {
return describeMultiRegionAccessPointOperationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeMultiRegionAccessPointOperationResult> describeMultiRegionAccessPointOperationAsync(
DescribeMultiRegionAccessPointOperationRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeMultiRegionAccessPointOperationRequest, DescribeMultiRegionAccessPointOperationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetAccessPointResult> getAccessPointAsync(GetAccessPointRequest request) {
return getAccessPointAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetAccessPointResult> getAccessPointAsync(GetAccessPointRequest request,
com.amazonaws.handlers.AsyncHandler<GetAccessPointRequest, GetAccessPointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetAccessPointConfigurationForObjectLambdaResult> getAccessPointConfigurationForObjectLambdaAsync(
GetAccessPointConfigurationForObjectLambdaRequest request) {
return getAccessPointConfigurationForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetAccessPointConfigurationForObjectLambdaResult> getAccessPointConfigurationForObjectLambdaAsync(
GetAccessPointConfigurationForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<GetAccessPointConfigurationForObjectLambdaRequest, GetAccessPointConfigurationForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetAccessPointForObjectLambdaResult> getAccessPointForObjectLambdaAsync(GetAccessPointForObjectLambdaRequest request) {
return getAccessPointForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetAccessPointForObjectLambdaResult> getAccessPointForObjectLambdaAsync(GetAccessPointForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<GetAccessPointForObjectLambdaRequest, GetAccessPointForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyResult> getAccessPointPolicyAsync(GetAccessPointPolicyRequest request) {
return getAccessPointPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyResult> getAccessPointPolicyAsync(GetAccessPointPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<GetAccessPointPolicyRequest, GetAccessPointPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyForObjectLambdaResult> getAccessPointPolicyForObjectLambdaAsync(
GetAccessPointPolicyForObjectLambdaRequest request) {
return getAccessPointPolicyForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyForObjectLambdaResult> getAccessPointPolicyForObjectLambdaAsync(
GetAccessPointPolicyForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<GetAccessPointPolicyForObjectLambdaRequest, GetAccessPointPolicyForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyStatusResult> getAccessPointPolicyStatusAsync(GetAccessPointPolicyStatusRequest request) {
return getAccessPointPolicyStatusAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyStatusResult> getAccessPointPolicyStatusAsync(GetAccessPointPolicyStatusRequest request,
com.amazonaws.handlers.AsyncHandler<GetAccessPointPolicyStatusRequest, GetAccessPointPolicyStatusResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyStatusForObjectLambdaResult> getAccessPointPolicyStatusForObjectLambdaAsync(
GetAccessPointPolicyStatusForObjectLambdaRequest request) {
return getAccessPointPolicyStatusForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetAccessPointPolicyStatusForObjectLambdaResult> getAccessPointPolicyStatusForObjectLambdaAsync(
GetAccessPointPolicyStatusForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<GetAccessPointPolicyStatusForObjectLambdaRequest, GetAccessPointPolicyStatusForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetBucketResult> getBucketAsync(GetBucketRequest request) {
return getBucketAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetBucketResult> getBucketAsync(GetBucketRequest request,
com.amazonaws.handlers.AsyncHandler<GetBucketRequest, GetBucketResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetBucketLifecycleConfigurationResult> getBucketLifecycleConfigurationAsync(
GetBucketLifecycleConfigurationRequest request) {
return getBucketLifecycleConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetBucketLifecycleConfigurationResult> getBucketLifecycleConfigurationAsync(
GetBucketLifecycleConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<GetBucketLifecycleConfigurationRequest, GetBucketLifecycleConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetBucketPolicyResult> getBucketPolicyAsync(GetBucketPolicyRequest request) {
return getBucketPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetBucketPolicyResult> getBucketPolicyAsync(GetBucketPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<GetBucketPolicyRequest, GetBucketPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetBucketTaggingResult> getBucketTaggingAsync(GetBucketTaggingRequest request) {
return getBucketTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetBucketTaggingResult> getBucketTaggingAsync(GetBucketTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<GetBucketTaggingRequest, GetBucketTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetJobTaggingResult> getJobTaggingAsync(GetJobTaggingRequest request) {
return getJobTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetJobTaggingResult> getJobTaggingAsync(GetJobTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<GetJobTaggingRequest, GetJobTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMultiRegionAccessPointResult> getMultiRegionAccessPointAsync(GetMultiRegionAccessPointRequest request) {
return getMultiRegionAccessPointAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMultiRegionAccessPointResult> getMultiRegionAccessPointAsync(GetMultiRegionAccessPointRequest request,
com.amazonaws.handlers.AsyncHandler<GetMultiRegionAccessPointRequest, GetMultiRegionAccessPointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMultiRegionAccessPointPolicyResult> getMultiRegionAccessPointPolicyAsync(
GetMultiRegionAccessPointPolicyRequest request) {
return getMultiRegionAccessPointPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMultiRegionAccessPointPolicyResult> getMultiRegionAccessPointPolicyAsync(
GetMultiRegionAccessPointPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<GetMultiRegionAccessPointPolicyRequest, GetMultiRegionAccessPointPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMultiRegionAccessPointPolicyStatusResult> getMultiRegionAccessPointPolicyStatusAsync(
GetMultiRegionAccessPointPolicyStatusRequest request) {
return getMultiRegionAccessPointPolicyStatusAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMultiRegionAccessPointPolicyStatusResult> getMultiRegionAccessPointPolicyStatusAsync(
GetMultiRegionAccessPointPolicyStatusRequest request,
com.amazonaws.handlers.AsyncHandler<GetMultiRegionAccessPointPolicyStatusRequest, GetMultiRegionAccessPointPolicyStatusResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetPublicAccessBlockResult> getPublicAccessBlockAsync(GetPublicAccessBlockRequest request) {
return getPublicAccessBlockAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetPublicAccessBlockResult> getPublicAccessBlockAsync(GetPublicAccessBlockRequest request,
com.amazonaws.handlers.AsyncHandler<GetPublicAccessBlockRequest, GetPublicAccessBlockResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetStorageLensConfigurationResult> getStorageLensConfigurationAsync(GetStorageLensConfigurationRequest request) {
return getStorageLensConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetStorageLensConfigurationResult> getStorageLensConfigurationAsync(GetStorageLensConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<GetStorageLensConfigurationRequest, GetStorageLensConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetStorageLensConfigurationTaggingResult> getStorageLensConfigurationTaggingAsync(
GetStorageLensConfigurationTaggingRequest request) {
return getStorageLensConfigurationTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetStorageLensConfigurationTaggingResult> getStorageLensConfigurationTaggingAsync(
GetStorageLensConfigurationTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<GetStorageLensConfigurationTaggingRequest, GetStorageLensConfigurationTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListAccessPointsResult> listAccessPointsAsync(ListAccessPointsRequest request) {
return listAccessPointsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListAccessPointsResult> listAccessPointsAsync(ListAccessPointsRequest request,
com.amazonaws.handlers.AsyncHandler<ListAccessPointsRequest, ListAccessPointsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListAccessPointsForObjectLambdaResult> listAccessPointsForObjectLambdaAsync(
ListAccessPointsForObjectLambdaRequest request) {
return listAccessPointsForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListAccessPointsForObjectLambdaResult> listAccessPointsForObjectLambdaAsync(
ListAccessPointsForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<ListAccessPointsForObjectLambdaRequest, ListAccessPointsForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListJobsResult> listJobsAsync(ListJobsRequest request) {
return listJobsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListJobsResult> listJobsAsync(ListJobsRequest request,
com.amazonaws.handlers.AsyncHandler<ListJobsRequest, ListJobsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListMultiRegionAccessPointsResult> listMultiRegionAccessPointsAsync(ListMultiRegionAccessPointsRequest request) {
return listMultiRegionAccessPointsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListMultiRegionAccessPointsResult> listMultiRegionAccessPointsAsync(ListMultiRegionAccessPointsRequest request,
com.amazonaws.handlers.AsyncHandler<ListMultiRegionAccessPointsRequest, ListMultiRegionAccessPointsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListRegionalBucketsResult> listRegionalBucketsAsync(ListRegionalBucketsRequest request) {
return listRegionalBucketsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListRegionalBucketsResult> listRegionalBucketsAsync(ListRegionalBucketsRequest request,
com.amazonaws.handlers.AsyncHandler<ListRegionalBucketsRequest, ListRegionalBucketsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListStorageLensConfigurationsResult> listStorageLensConfigurationsAsync(ListStorageLensConfigurationsRequest request) {
return listStorageLensConfigurationsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListStorageLensConfigurationsResult> listStorageLensConfigurationsAsync(ListStorageLensConfigurationsRequest request,
com.amazonaws.handlers.AsyncHandler<ListStorageLensConfigurationsRequest, ListStorageLensConfigurationsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutAccessPointConfigurationForObjectLambdaResult> putAccessPointConfigurationForObjectLambdaAsync(
PutAccessPointConfigurationForObjectLambdaRequest request) {
return putAccessPointConfigurationForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutAccessPointConfigurationForObjectLambdaResult> putAccessPointConfigurationForObjectLambdaAsync(
PutAccessPointConfigurationForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<PutAccessPointConfigurationForObjectLambdaRequest, PutAccessPointConfigurationForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutAccessPointPolicyResult> putAccessPointPolicyAsync(PutAccessPointPolicyRequest request) {
return putAccessPointPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutAccessPointPolicyResult> putAccessPointPolicyAsync(PutAccessPointPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<PutAccessPointPolicyRequest, PutAccessPointPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutAccessPointPolicyForObjectLambdaResult> putAccessPointPolicyForObjectLambdaAsync(
PutAccessPointPolicyForObjectLambdaRequest request) {
return putAccessPointPolicyForObjectLambdaAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutAccessPointPolicyForObjectLambdaResult> putAccessPointPolicyForObjectLambdaAsync(
PutAccessPointPolicyForObjectLambdaRequest request,
com.amazonaws.handlers.AsyncHandler<PutAccessPointPolicyForObjectLambdaRequest, PutAccessPointPolicyForObjectLambdaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutBucketLifecycleConfigurationResult> putBucketLifecycleConfigurationAsync(
PutBucketLifecycleConfigurationRequest request) {
return putBucketLifecycleConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutBucketLifecycleConfigurationResult> putBucketLifecycleConfigurationAsync(
PutBucketLifecycleConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<PutBucketLifecycleConfigurationRequest, PutBucketLifecycleConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutBucketPolicyResult> putBucketPolicyAsync(PutBucketPolicyRequest request) {
return putBucketPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutBucketPolicyResult> putBucketPolicyAsync(PutBucketPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<PutBucketPolicyRequest, PutBucketPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutBucketTaggingResult> putBucketTaggingAsync(PutBucketTaggingRequest request) {
return putBucketTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutBucketTaggingResult> putBucketTaggingAsync(PutBucketTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<PutBucketTaggingRequest, PutBucketTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutJobTaggingResult> putJobTaggingAsync(PutJobTaggingRequest request) {
return putJobTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutJobTaggingResult> putJobTaggingAsync(PutJobTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<PutJobTaggingRequest, PutJobTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutMultiRegionAccessPointPolicyResult> putMultiRegionAccessPointPolicyAsync(
PutMultiRegionAccessPointPolicyRequest request) {
return putMultiRegionAccessPointPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutMultiRegionAccessPointPolicyResult> putMultiRegionAccessPointPolicyAsync(
PutMultiRegionAccessPointPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<PutMultiRegionAccessPointPolicyRequest, PutMultiRegionAccessPointPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutPublicAccessBlockResult> putPublicAccessBlockAsync(PutPublicAccessBlockRequest request) {
return putPublicAccessBlockAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutPublicAccessBlockResult> putPublicAccessBlockAsync(PutPublicAccessBlockRequest request,
com.amazonaws.handlers.AsyncHandler<PutPublicAccessBlockRequest, PutPublicAccessBlockResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutStorageLensConfigurationResult> putStorageLensConfigurationAsync(PutStorageLensConfigurationRequest request) {
return putStorageLensConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutStorageLensConfigurationResult> putStorageLensConfigurationAsync(PutStorageLensConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<PutStorageLensConfigurationRequest, PutStorageLensConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutStorageLensConfigurationTaggingResult> putStorageLensConfigurationTaggingAsync(
PutStorageLensConfigurationTaggingRequest request) {
return putStorageLensConfigurationTaggingAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutStorageLensConfigurationTaggingResult> putStorageLensConfigurationTaggingAsync(
PutStorageLensConfigurationTaggingRequest request,
com.amazonaws.handlers.AsyncHandler<PutStorageLensConfigurationTaggingRequest, PutStorageLensConfigurationTaggingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateJobPriorityResult> updateJobPriorityAsync(UpdateJobPriorityRequest request) {
return updateJobPriorityAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateJobPriorityResult> updateJobPriorityAsync(UpdateJobPriorityRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateJobPriorityRequest, UpdateJobPriorityResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateJobStatusResult> updateJobStatusAsync(UpdateJobStatusRequest request) {
return updateJobStatusAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateJobStatusResult> updateJobStatusAsync(UpdateJobStatusRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateJobStatusRequest, UpdateJobStatusResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.ofbiz.content.content;
import java.sql.Timestamp;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javolution.util.FastList;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.UtilGenerics;
import org.ofbiz.base.util.UtilHttp;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.Delegator;
import org.ofbiz.content.content.ContentSearch.ResultSortOrder;
import org.ofbiz.content.content.ContentSearch.SortKeywordRelevancy;
import org.ofbiz.content.content.ContentSearch.ContentSearchConstraint;
public class ContentSearchSession {
public static final String module = ContentSearchSession.class.getName();
@SuppressWarnings("serial")
public static class ContentSearchOptions implements java.io.Serializable {
protected List<ContentSearchConstraint> constraintList = null;
protected ResultSortOrder resultSortOrder = null;
protected Integer viewIndex = null;
protected Integer viewSize = null;
protected boolean changed = false;
public ContentSearchOptions() { }
/** Basic copy constructor */
public ContentSearchOptions(ContentSearchOptions contentSearchOptions) {
this.constraintList = UtilMisc.makeListWritable(contentSearchOptions.constraintList);
this.resultSortOrder = contentSearchOptions.resultSortOrder;
this.viewIndex = contentSearchOptions.viewIndex;
this.viewSize = contentSearchOptions.viewSize;
this.changed = contentSearchOptions.changed;
}
public List<ContentSearchConstraint> getConstraintList() {
return this.constraintList;
}
public static List<ContentSearchConstraint> getConstraintList(HttpSession session) {
return getContentSearchOptions(session).constraintList;
}
public static void addConstraint(ContentSearchConstraint contentSearchConstraint, HttpSession session) {
ContentSearchOptions contentSearchOptions = getContentSearchOptions(session);
if (contentSearchOptions.constraintList == null) {
contentSearchOptions.constraintList = FastList.newInstance();
}
if (!contentSearchOptions.constraintList.contains(contentSearchConstraint)) {
contentSearchOptions.constraintList.add(contentSearchConstraint);
contentSearchOptions.changed = true;
}
}
public ResultSortOrder getResultSortOrder() {
if (this.resultSortOrder == null) {
this.resultSortOrder = new SortKeywordRelevancy();
this.changed = true;
}
return this.resultSortOrder;
}
public static ResultSortOrder getResultSortOrder(HttpServletRequest request) {
ContentSearchOptions contentSearchOptions = getContentSearchOptions(request.getSession());
return contentSearchOptions.getResultSortOrder();
}
public static void setResultSortOrder(ResultSortOrder resultSortOrder, HttpSession session) {
ContentSearchOptions contentSearchOptions = getContentSearchOptions(session);
contentSearchOptions.resultSortOrder = resultSortOrder;
contentSearchOptions.changed = true;
}
public static void clearSearchOptions(HttpSession session) {
ContentSearchOptions contentSearchOptions = getContentSearchOptions(session);
contentSearchOptions.constraintList = null;
contentSearchOptions.resultSortOrder = null;
}
public void clearViewInfo() {
this.viewIndex = null;
this.viewSize = null;
}
/**
* @return Returns the viewIndex.
*/
public Integer getViewIndex() {
return viewIndex;
}
/**
* @param viewIndex The viewIndex to set.
*/
public void setViewIndex(Integer viewIndex) {
this.viewIndex = viewIndex;
}
/**
* @return Returns the viewSize.
*/
public Integer getViewSize() {
return viewSize;
}
/**
* @param viewSize The viewSize to set.
*/
public void setViewSize(Integer viewSize) {
this.viewSize = viewSize;
}
public List<String> searchGetConstraintStrings(boolean detailed, Delegator delegator, Locale locale) {
List<ContentSearchConstraint> contentSearchConstraintList = this.getConstraintList();
List<String> constraintStrings = FastList.newInstance();
if (contentSearchConstraintList == null) {
return constraintStrings;
}
for (ContentSearchConstraint contentSearchConstraint: contentSearchConstraintList) {
if (contentSearchConstraint == null) continue;
String constraintString = contentSearchConstraint.prettyPrintConstraint(delegator, detailed, locale);
if (UtilValidate.isNotEmpty(constraintString)) {
constraintStrings.add(constraintString);
} else {
constraintStrings.add("Description not available");
}
}
return constraintStrings;
}
}
public static ContentSearchOptions getContentSearchOptions(HttpSession session) {
ContentSearchOptions contentSearchOptions = (ContentSearchOptions) session.getAttribute("_CONTENT_SEARCH_OPTIONS_CURRENT_");
if (contentSearchOptions == null) {
contentSearchOptions = new ContentSearchOptions();
session.setAttribute("_CONTENT_SEARCH_OPTIONS_CURRENT_", contentSearchOptions);
}
return contentSearchOptions;
}
public static void processSearchParameters(Map<String, Object> parameters, HttpServletRequest request) {
Boolean alreadyRun = (Boolean) request.getAttribute("processSearchParametersAlreadyRun");
if (Boolean.TRUE.equals(alreadyRun)) {
return;
} else {
request.setAttribute("processSearchParametersAlreadyRun", Boolean.TRUE);
}
HttpSession session = request.getSession();
boolean constraintsChanged = false;
// clear search? by default yes, but if the clearSearch parameter is N then don't
String clearSearchString = (String) parameters.get("clearSearch");
if (!"N".equals(clearSearchString)) {
searchClear(session);
constraintsChanged = true;
} else {
String removeConstraint = (String) parameters.get("removeConstraint");
if (UtilValidate.isNotEmpty(removeConstraint)) {
try {
searchRemoveConstraint(Integer.parseInt(removeConstraint), session);
constraintsChanged = true;
} catch (Exception e) {
Debug.logError(e, "Error removing constraint [" + removeConstraint + "]", module);
}
}
}
// add a Content Assoc Type to the search
if (UtilValidate.isNotEmpty(parameters.get("SEARCH_CONTENT_ID"))) {
String contentId=(String) parameters.get("SEARCH_CONTENT_ID");
String contentAssocTypeId=(String) parameters.get("contentAssocTypeId");
boolean includeAllSubContents =!"N".equalsIgnoreCase((String) parameters.get("SEARCH_SUB_CONTENTS"));
searchAddConstraint(new ContentSearch.ContentAssocConstraint(contentId,contentAssocTypeId,includeAllSubContents), session);
constraintsChanged = true;
}
// add a Content fromDate thruDate to the search
if (UtilValidate.isNotEmpty(parameters.get("fromDate")) || UtilValidate.isNotEmpty(parameters.get("thruDate"))) {
Timestamp fromDate =null;
if (UtilValidate.isNotEmpty(parameters.get("fromDate"))) {
fromDate=Timestamp.valueOf((String) parameters.get("fromDate"));
}
Timestamp thruDate = null;
if (UtilValidate.isNotEmpty(parameters.get("thruDate"))) {
thruDate = Timestamp.valueOf((String) parameters.get("thruDate"));
}
searchAddConstraint(new ContentSearch.LastUpdatedRangeConstraint(fromDate,thruDate), session);
constraintsChanged = true;
}
// if keywords were specified, add a constraint for them
if (UtilValidate.isNotEmpty(parameters.get("SEARCH_STRING"))) {
String keywordString = (String) parameters.get("SEARCH_STRING");
String searchOperator = (String) parameters.get("SEARCH_OPERATOR");
// defaults to true/Y, ie anything but N is true/Y
boolean anyPrefixSuffix = !"N".equals(parameters.get("SEARCH_ANYPRESUF"));
searchAddConstraint(new ContentSearch.KeywordConstraint(keywordString, anyPrefixSuffix, anyPrefixSuffix, null, "AND".equals(searchOperator)), session);
constraintsChanged = true;
}
// set the sort order
String sortOrder = (String) parameters.get("sortOrder");
String sortAscending = (String) parameters.get("sortAscending");
boolean ascending = !"N".equals(sortAscending);
if (sortOrder != null) {
if (sortOrder.equals("SortKeywordRelevancy")) {
searchSetSortOrder(new ContentSearch.SortKeywordRelevancy(), session);
} else if (sortOrder.startsWith("SortContentField:")) {
String fieldName = sortOrder.substring("SortContentField:".length());
searchSetSortOrder(new ContentSearch.SortContentField(fieldName, ascending), session);
}
}
ContentSearchOptions contentSearchOptions = getContentSearchOptions(session);
if (constraintsChanged) {
// query changed, clear out the VIEW_INDEX & VIEW_SIZE
contentSearchOptions.clearViewInfo();
}
String viewIndexStr = (String) parameters.get("VIEW_INDEX");
if (UtilValidate.isNotEmpty(viewIndexStr)) {
try {
contentSearchOptions.setViewIndex(Integer.valueOf(viewIndexStr));
} catch (Exception e) {
Debug.logError(e, "Error formatting VIEW_INDEX, setting to 0", module);
// we could just do nothing here, but we know something was specified so we don't want to use the previous value from the session
contentSearchOptions.setViewIndex(Integer.valueOf(0));
}
}
String viewSizeStr = (String) parameters.get("VIEW_SIZE");
if (UtilValidate.isNotEmpty(viewSizeStr)) {
try {
contentSearchOptions.setViewSize(Integer.valueOf(viewSizeStr));
} catch (Exception e) {
Debug.logError(e, "Error formatting VIEW_SIZE, setting to 20", module);
contentSearchOptions.setViewSize(Integer.valueOf(20));
}
}
}
public static void searchAddConstraint(ContentSearchConstraint contentSearchConstraint, HttpSession session) {
ContentSearchOptions.addConstraint(contentSearchConstraint, session);
}
public static void searchSetSortOrder(ResultSortOrder resultSortOrder, HttpSession session) {
ContentSearchOptions.setResultSortOrder(resultSortOrder, session);
}
public static List<ContentSearchOptions> getSearchOptionsHistoryList(HttpSession session) {
List<ContentSearchOptions> optionsHistoryList = UtilGenerics.checkList(session.getAttribute("_CONTENT_SEARCH_OPTIONS_HISTORY_"));
if (optionsHistoryList == null) {
optionsHistoryList = FastList.newInstance();
session.setAttribute("_CONTENT_SEARCH_OPTIONS_HISTORY_", optionsHistoryList);
}
return optionsHistoryList;
}
public static List<String> searchGetConstraintStrings(boolean detailed, HttpSession session, Delegator delegator) {
Locale locale = UtilHttp.getLocale(session);
ContentSearchOptions contentSearchOptions = getContentSearchOptions(session);
return contentSearchOptions.searchGetConstraintStrings(detailed, delegator, locale);
}
public static String searchGetSortOrderString(boolean detailed, HttpServletRequest request) {
Locale locale = UtilHttp.getLocale(request);
ResultSortOrder resultSortOrder = ContentSearchOptions.getResultSortOrder(request);
if (resultSortOrder == null) return "";
return resultSortOrder.prettyPrintSortOrder(detailed, locale);
}
public static void checkSaveSearchOptionsHistory(HttpSession session) {
ContentSearchOptions contentSearchOptions = ContentSearchSession.getContentSearchOptions(session);
// if the options have changed since the last search, add it to the beginning of the search options history
if (contentSearchOptions.changed) {
List<ContentSearchOptions> optionsHistoryList = ContentSearchSession.getSearchOptionsHistoryList(session);
optionsHistoryList.add(0, new ContentSearchOptions(contentSearchOptions));
contentSearchOptions.changed = false;
}
}
public static void searchRemoveConstraint(int index, HttpSession session) {
List<ContentSearchConstraint> contentSearchConstraintList = ContentSearchOptions.getConstraintList(session);
if (contentSearchConstraintList == null) {
return;
} else if (index >= contentSearchConstraintList.size()) {
return;
} else {
contentSearchConstraintList.remove(index);
}
}
public static void searchClear(HttpSession session) {
ContentSearchOptions.clearSearchOptions(session);
}
}
| |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Handler;
import android.os.Message;
import android.os.PowerManager;
import android.os.SystemClock;
import android.util.Slog;
import java.lang.Float;
/**
* Determines if the device has been set upon a stationary object.
*/
public class AnyMotionDetector {
interface DeviceIdleCallback {
public void onAnyMotionResult(int result);
}
private static final String TAG = "AnyMotionDetector";
private static final boolean DEBUG = false;
/** Stationary status is unknown due to insufficient orientation measurements. */
public static final int RESULT_UNKNOWN = -1;
/** Device is stationary, e.g. still on a table. */
public static final int RESULT_STATIONARY = 0;
/** Device has been moved. */
public static final int RESULT_MOVED = 1;
/** Orientation measurements are being performed or are planned. */
private static final int STATE_INACTIVE = 0;
/** No orientation measurements are being performed or are planned. */
private static final int STATE_ACTIVE = 1;
/** Current measurement state. */
private int mState;
/** Threshold angle in degrees beyond which the device is considered moving. */
private final float THRESHOLD_ANGLE = 2f;
/** Threshold energy above which the device is considered moving. */
private final float THRESHOLD_ENERGY = 5f;
/** The duration of the accelerometer orientation measurement. */
private static final long ORIENTATION_MEASUREMENT_DURATION_MILLIS = 2500;
/** The maximum duration we will collect accelerometer data. */
private static final long ACCELEROMETER_DATA_TIMEOUT_MILLIS = 3000;
/** The interval between accelerometer orientation measurements. */
private static final long ORIENTATION_MEASUREMENT_INTERVAL_MILLIS = 5000;
/**
* The duration in milliseconds after which an orientation measurement is considered
* too stale to be used.
*/
private static final int STALE_MEASUREMENT_TIMEOUT_MILLIS = 2 * 60 * 1000;
/** The accelerometer sampling interval. */
private static final int SAMPLING_INTERVAL_MILLIS = 40;
private final Handler mHandler;
private final Object mLock = new Object();
private Sensor mAccelSensor;
private SensorManager mSensorManager;
private PowerManager.WakeLock mWakeLock;
/** The minimum number of samples required to detect AnyMotion. */
private int mNumSufficientSamples;
/** True if an orientation measurement is in progress. */
private boolean mMeasurementInProgress;
/** The most recent gravity vector. */
private Vector3 mCurrentGravityVector = null;
/** The second most recent gravity vector. */
private Vector3 mPreviousGravityVector = null;
/** Running sum of squared errors. */
private RunningSignalStats mRunningStats;
private DeviceIdleCallback mCallback = null;
public AnyMotionDetector(PowerManager pm, Handler handler, SensorManager sm,
DeviceIdleCallback callback) {
if (DEBUG) Slog.d(TAG, "AnyMotionDetector instantiated.");
mWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG);
mWakeLock.setReferenceCounted(false);
mHandler = handler;
mSensorManager = sm;
mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mMeasurementInProgress = false;
mState = STATE_INACTIVE;
mCallback = callback;
mRunningStats = new RunningSignalStats();
mNumSufficientSamples = (int) Math.ceil(
((double)ORIENTATION_MEASUREMENT_DURATION_MILLIS / SAMPLING_INTERVAL_MILLIS));
if (DEBUG) Slog.d(TAG, "mNumSufficientSamples = " + mNumSufficientSamples);
}
/*
* Acquire accel data until we determine AnyMotion status.
*/
public void checkForAnyMotion() {
if (DEBUG) Slog.d(TAG, "checkForAnyMotion(). mState = " + mState);
if (mState != STATE_ACTIVE) {
mState = STATE_ACTIVE;
if (DEBUG) Slog.d(TAG, "Moved from STATE_INACTIVE to STATE_ACTIVE.");
mCurrentGravityVector = null;
mPreviousGravityVector = null;
startOrientationMeasurement();
}
}
public void stop() {
if (mState == STATE_ACTIVE) {
mState = STATE_INACTIVE;
if (DEBUG) Slog.d(TAG, "Moved from STATE_ACTIVE to STATE_INACTIVE.");
if (mMeasurementInProgress) {
mMeasurementInProgress = false;
mSensorManager.unregisterListener(mListener);
}
mHandler.removeCallbacks(mMeasurementTimeout);
mHandler.removeCallbacks(mSensorRestart);
mWakeLock.release();
mCurrentGravityVector = null;
mPreviousGravityVector = null;
}
}
private void startOrientationMeasurement() {
if (DEBUG) Slog.d(TAG, "startOrientationMeasurement: mMeasurementInProgress=" +
mMeasurementInProgress + ", (mAccelSensor != null)=" + (mAccelSensor != null));
if (!mMeasurementInProgress && mAccelSensor != null) {
if (mSensorManager.registerListener(mListener, mAccelSensor,
SAMPLING_INTERVAL_MILLIS * 1000)) {
mWakeLock.acquire();
mMeasurementInProgress = true;
mRunningStats.reset();
}
Message msg = Message.obtain(mHandler, mMeasurementTimeout);
msg.setAsynchronous(true);
mHandler.sendMessageDelayed(msg, ACCELEROMETER_DATA_TIMEOUT_MILLIS);
}
}
private int stopOrientationMeasurementLocked() {
if (DEBUG) Slog.d(TAG, "stopOrientationMeasurement. mMeasurementInProgress=" +
mMeasurementInProgress);
int status = RESULT_UNKNOWN;
if (mMeasurementInProgress) {
mSensorManager.unregisterListener(mListener);
mHandler.removeCallbacks(mMeasurementTimeout);
mWakeLock.release();
long detectionEndTime = SystemClock.elapsedRealtime();
mMeasurementInProgress = false;
mPreviousGravityVector = mCurrentGravityVector;
mCurrentGravityVector = mRunningStats.getRunningAverage();
if (DEBUG) {
Slog.d(TAG, "mRunningStats = " + mRunningStats.toString());
String currentGravityVectorString = (mCurrentGravityVector == null) ?
"null" : mCurrentGravityVector.toString();
String previousGravityVectorString = (mPreviousGravityVector == null) ?
"null" : mPreviousGravityVector.toString();
Slog.d(TAG, "mCurrentGravityVector = " + currentGravityVectorString);
Slog.d(TAG, "mPreviousGravityVector = " + previousGravityVectorString);
}
mRunningStats.reset();
status = getStationaryStatus();
if (DEBUG) Slog.d(TAG, "getStationaryStatus() returned " + status);
if (status != RESULT_UNKNOWN) {
if (DEBUG) Slog.d(TAG, "Moved from STATE_ACTIVE to STATE_INACTIVE. status = " +
status);
mState = STATE_INACTIVE;
} else {
/*
* Unknown due to insufficient measurements. Schedule another orientation
* measurement.
*/
if (DEBUG) Slog.d(TAG, "stopOrientationMeasurementLocked(): another measurement" +
" scheduled in " + ORIENTATION_MEASUREMENT_INTERVAL_MILLIS +
" milliseconds.");
Message msg = Message.obtain(mHandler, mSensorRestart);
msg.setAsynchronous(true);
mHandler.sendMessageDelayed(msg, ORIENTATION_MEASUREMENT_INTERVAL_MILLIS);
}
}
return status;
}
/*
* Updates mStatus to the current AnyMotion status.
*/
public int getStationaryStatus() {
if ((mPreviousGravityVector == null) || (mCurrentGravityVector == null)) {
return RESULT_UNKNOWN;
}
Vector3 previousGravityVectorNormalized = mPreviousGravityVector.normalized();
Vector3 currentGravityVectorNormalized = mCurrentGravityVector.normalized();
float angle = previousGravityVectorNormalized.angleBetween(currentGravityVectorNormalized);
if (DEBUG) Slog.d(TAG, "getStationaryStatus: angle = " + angle);
if ((angle < THRESHOLD_ANGLE) && (mRunningStats.getEnergy() < THRESHOLD_ENERGY)) {
return RESULT_STATIONARY;
} else if (Float.isNaN(angle)) {
/**
* Floating point rounding errors have caused the angle calcuation's dot product to
* exceed 1.0. In such case, we report RESULT_MOVED to prevent devices from rapidly
* retrying this measurement.
*/
return RESULT_MOVED;
}
long diffTime = mCurrentGravityVector.timeMillisSinceBoot -
mPreviousGravityVector.timeMillisSinceBoot;
if (diffTime > STALE_MEASUREMENT_TIMEOUT_MILLIS) {
if (DEBUG) Slog.d(TAG, "getStationaryStatus: mPreviousGravityVector is too stale at " +
diffTime + " ms ago. Returning RESULT_UNKNOWN.");
return RESULT_UNKNOWN;
}
return RESULT_MOVED;
}
private final SensorEventListener mListener = new SensorEventListener() {
@Override
public void onSensorChanged(SensorEvent event) {
int status = RESULT_UNKNOWN;
synchronized (mLock) {
Vector3 accelDatum = new Vector3(SystemClock.elapsedRealtime(), event.values[0],
event.values[1], event.values[2]);
mRunningStats.accumulate(accelDatum);
// If we have enough samples, stop accelerometer data acquisition.
if (mRunningStats.getSampleCount() >= mNumSufficientSamples) {
status = stopOrientationMeasurementLocked();
}
}
if (status != RESULT_UNKNOWN) {
mCallback.onAnyMotionResult(status);
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
};
private final Runnable mSensorRestart = new Runnable() {
@Override
public void run() {
synchronized (mLock) {
startOrientationMeasurement();
}
}
};
private final Runnable mMeasurementTimeout = new Runnable() {
@Override
public void run() {
int status = RESULT_UNKNOWN;
synchronized (mLock) {
if (DEBUG) Slog.i(TAG, "mMeasurementTimeout. Failed to collect sufficient accel " +
"data within " + ACCELEROMETER_DATA_TIMEOUT_MILLIS + " ms. Stopping " +
"orientation measurement.");
status = stopOrientationMeasurementLocked();
}
if (status != RESULT_UNKNOWN) {
mCallback.onAnyMotionResult(status);
}
}
};
/**
* A timestamped three dimensional vector and some vector operations.
*/
private static class Vector3 {
public long timeMillisSinceBoot;
public float x;
public float y;
public float z;
public Vector3(long timeMillisSinceBoot, float x, float y, float z) {
this.timeMillisSinceBoot = timeMillisSinceBoot;
this.x = x;
this.y = y;
this.z = z;
}
private float norm() {
return (float) Math.sqrt(dotProduct(this));
}
private Vector3 normalized() {
float mag = norm();
return new Vector3(timeMillisSinceBoot, x / mag, y / mag, z / mag);
}
/**
* Returns the angle between this 3D vector and another given 3D vector.
* Assumes both have already been normalized.
*
* @param other The other Vector3 vector.
* @return angle between this vector and the other given one.
*/
public float angleBetween(Vector3 other) {
double degrees = Math.toDegrees(Math.acos(this.dotProduct(other)));
float returnValue = (float) degrees;
Slog.d(TAG, "angleBetween: this = " + this.toString() +
", other = " + other.toString());
Slog.d(TAG, " degrees = " + degrees + ", returnValue = " + returnValue);
return returnValue;
}
@Override
public String toString() {
String msg = "";
msg += "timeMillisSinceBoot=" + timeMillisSinceBoot;
msg += " | x=" + x;
msg += ", y=" + y;
msg += ", z=" + z;
return msg;
}
public float dotProduct(Vector3 v) {
return x * v.x + y * v.y + z * v.z;
}
public Vector3 times(float val) {
return new Vector3(timeMillisSinceBoot, x * val, y * val, z * val);
}
public Vector3 plus(Vector3 v) {
return new Vector3(v.timeMillisSinceBoot, x + v.x, y + v.y, z + v.z);
}
public Vector3 minus(Vector3 v) {
return new Vector3(v.timeMillisSinceBoot, x - v.x, y - v.y, z - v.z);
}
}
/**
* Maintains running statistics on the signal revelant to AnyMotion detection, including:
* <ul>
* <li>running average.
* <li>running sum-of-squared-errors as the energy of the signal derivative.
* <ul>
*/
private static class RunningSignalStats {
Vector3 previousVector;
Vector3 currentVector;
Vector3 runningSum;
float energy;
int sampleCount;
public RunningSignalStats() {
reset();
}
public void reset() {
previousVector = null;
currentVector = null;
runningSum = new Vector3(0, 0, 0, 0);
energy = 0;
sampleCount = 0;
}
/**
* Apply a 3D vector v as the next element in the running SSE.
*/
public void accumulate(Vector3 v) {
if (v == null) {
if (DEBUG) Slog.i(TAG, "Cannot accumulate a null vector.");
return;
}
sampleCount++;
runningSum = runningSum.plus(v);
previousVector = currentVector;
currentVector = v;
if (previousVector != null) {
Vector3 dv = currentVector.minus(previousVector);
float incrementalEnergy = dv.x * dv.x + dv.y * dv.y + dv.z * dv.z;
energy += incrementalEnergy;
if (DEBUG) Slog.i(TAG, "Accumulated vector " + currentVector.toString() +
", runningSum = " + runningSum.toString() +
", incrementalEnergy = " + incrementalEnergy +
", energy = " + energy);
}
}
public Vector3 getRunningAverage() {
if (sampleCount > 0) {
return runningSum.times((float)(1.0f / sampleCount));
}
return null;
}
public float getEnergy() {
return energy;
}
public int getSampleCount() {
return sampleCount;
}
@Override
public String toString() {
String msg = "";
String currentVectorString = (currentVector == null) ?
"null" : currentVector.toString();
String previousVectorString = (previousVector == null) ?
"null" : previousVector.toString();
msg += "previousVector = " + previousVectorString;
msg += ", currentVector = " + currentVectorString;
msg += ", sampleCount = " + sampleCount;
msg += ", energy = " + energy;
return msg;
}
}
}
| |
/*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao.cassandra;
import com.datastax.driver.core.Session;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.utils.JsonMapperProvider;
import com.netflix.conductor.config.TestConfiguration;
import com.netflix.conductor.core.execution.ApplicationException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.util.EmbeddedCassandra;
import com.netflix.conductor.util.Statements;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import static com.netflix.conductor.dao.cassandra.CassandraBaseDAO.WorkflowMetadata;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
public class CassandraExecutionDAOTest {
private final TestConfiguration testConfiguration = new TestConfiguration();
private final ObjectMapper objectMapper = new JsonMapperProvider().get();
private EmbeddedCassandra embeddedCassandra;
private CassandraExecutionDAO executionDAO;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() throws Exception {
embeddedCassandra = new EmbeddedCassandra();
Session session = embeddedCassandra.getSession();
Statements statements = new Statements(testConfiguration);
executionDAO = new CassandraExecutionDAO(session, objectMapper, testConfiguration, statements);
}
@After
public void teardown() {
embeddedCassandra.cleanupData();
}
@Test
public void testValidateTasks() {
List<Task> tasks = new ArrayList<>();
// create tasks for a workflow and add to list
Task task1 = new Task();
task1.setWorkflowInstanceId("uuid");
task1.setTaskId("task1id");
task1.setReferenceTaskName("task1");
tasks.add(task1);
Task task2 = new Task();
task2.setWorkflowInstanceId("uuid");
task2.setTaskId("task2id");
task2.setReferenceTaskName("task2");
tasks.add(task2);
executionDAO.validateTasks(tasks);
// add a task from a different workflow to the list
Task task3 = new Task();
task3.setWorkflowInstanceId("other-uuid");
task3.setTaskId("task3id");
task3.setReferenceTaskName("task3");
tasks.add(task3);
expectedException.expect(ApplicationException.class);
expectedException.expectMessage("Tasks of multiple workflows cannot be created/updated simultaneously");
executionDAO.validateTasks(tasks);
}
@Test
public void testWorkflowCRUD() {
String workflowId = IDGenerator.generate();
Workflow workflow = new Workflow();
workflow.setWorkflowId(workflowId);
workflow.setInput(new HashMap<>());
workflow.setStatus(Workflow.WorkflowStatus.RUNNING);
// create a new workflow in the datastore
String id = executionDAO.createWorkflow(workflow);
assertEquals(workflowId, id);
// read the workflow from the datastore
Workflow found = executionDAO.getWorkflow(workflowId);
assertEquals(workflow, found);
// update the workflow
workflow.setStatus(Workflow.WorkflowStatus.COMPLETED);
executionDAO.updateWorkflow(workflow);
found = executionDAO.getWorkflow(workflowId);
assertEquals(workflow, found);
// remove the workflow from datastore
executionDAO.removeWorkflow(workflowId);
// read workflow again
workflow = executionDAO.getWorkflow(workflowId, true);
assertNull(workflow);
}
@Test
public void testTasksCRUD() {
String workflowId = IDGenerator.generate();
// create a workflow
Workflow workflow = new Workflow();
workflow.setWorkflowId(workflowId);
workflow.setInput(new HashMap<>());
workflow.setStatus(Workflow.WorkflowStatus.RUNNING);
// add it to the datastore
executionDAO.createWorkflow(workflow);
// create tasks for this workflow
Task task1 = new Task();
task1.setWorkflowInstanceId(workflowId);
task1.setTaskType("task1");
task1.setReferenceTaskName("task1");
task1.setStatus(Task.Status.SCHEDULED);
String task1Id = IDGenerator.generate();
task1.setTaskId(task1Id);
Task task2 = new Task();
task2.setWorkflowInstanceId(workflowId);
task2.setTaskType("task2");
task2.setReferenceTaskName("task2");
task1.setStatus(Task.Status.SCHEDULED);
String task2Id = IDGenerator.generate();
task2.setTaskId(task2Id);
Task task3 = new Task();
task3.setWorkflowInstanceId(workflowId);
task3.setTaskType("task3");
task3.setReferenceTaskName("task3");
task1.setStatus(Task.Status.SCHEDULED);
String task3Id = IDGenerator.generate();
task3.setTaskId(task3Id);
List<Task> taskList = new ArrayList<>(Arrays.asList(task1, task2, task3));
// add the tasks to the datastore
List<Task> tasks = executionDAO.createTasks(taskList);
assertNotNull(tasks);
assertEquals(taskList, tasks);
// read the tasks from the datastore
Task task = executionDAO.getTask(task1Id);
assertEquals(task1, task);
task = executionDAO.getTask(task2Id);
assertEquals(task2, task);
task = executionDAO.getTask(task3Id);
assertEquals(task3, task);
// check the task lookup table
String foundId = executionDAO.lookupWorkflowIdFromTaskId(task1Id);
assertEquals(foundId, workflowId);
foundId = executionDAO.lookupWorkflowIdFromTaskId(task2Id);
assertEquals(foundId, workflowId);
foundId = executionDAO.lookupWorkflowIdFromTaskId(task3Id);
assertEquals(foundId, workflowId);
WorkflowMetadata workflowMetadata = executionDAO.getWorkflowMetadata(workflowId);
assertEquals(3, workflowMetadata.getTotalTasks());
assertEquals(1, workflowMetadata.getTotalPartitions());
List<Task> fetchedTasks = executionDAO.getTasks(Arrays.asList(task1Id, task2Id, task3Id));
assertNotNull(fetchedTasks);
assertEquals(3, fetchedTasks.size());
fetchedTasks = executionDAO.getTasksForWorkflow(workflowId);
assertNotNull(fetchedTasks);
assertEquals(3, fetchedTasks.size());
// read workflow with tasks
Workflow found = executionDAO.getWorkflow(workflowId, true);
assertNotNull(found);
assertEquals(workflow.getWorkflowId(), found.getWorkflowId());
assertEquals(3, found.getTasks().size());
assertEquals(task1, found.getTaskByRefName("task1"));
assertEquals(task2, found.getTaskByRefName("task2"));
assertEquals(task3, found.getTaskByRefName("task3"));
// update a task
task1.setStatus(Task.Status.IN_PROGRESS);
executionDAO.updateTask(task1);
task = executionDAO.getTask(task1Id);
assertEquals(task1, task);
// update multiple tasks
task2.setStatus(Task.Status.COMPLETED);
task3.setStatus(Task.Status.FAILED);
executionDAO.updateTasks(Arrays.asList(task2, task3));
task = executionDAO.getTask(task2Id);
assertEquals(task2, task);
task = executionDAO.getTask(task3Id);
assertEquals(task3, task);
// get pending tasks for the workflow
List<Task> pendingTasks = executionDAO.getPendingTasksByWorkflow(task1.getTaskType(), workflowId);
assertNotNull(pendingTasks);
assertEquals(1, pendingTasks.size());
assertEquals(task1, pendingTasks.get(0));
// remove a task
executionDAO.removeTask(task3.getTaskId());
workflowMetadata = executionDAO.getWorkflowMetadata(workflowId);
assertEquals(2, workflowMetadata.getTotalTasks());
assertEquals(1, workflowMetadata.getTotalPartitions());
// read workflow with tasks again
found = executionDAO.getWorkflow(workflowId);
assertNotNull(found);
assertEquals(workflow.getWorkflowId(), found.getWorkflowId());
assertEquals(2, found.getTasks().size());
assertEquals(task1, found.getTaskByRefName("task1"));
assertEquals(task2, found.getTaskByRefName("task2"));
// check the task lookup table
foundId = executionDAO.lookupWorkflowIdFromTaskId(task1Id);
assertEquals(foundId, workflowId);
foundId = executionDAO.lookupWorkflowIdFromTaskId(task2Id);
assertEquals(foundId, workflowId);
foundId = executionDAO.lookupWorkflowIdFromTaskId(task3Id);
assertNull(foundId);
// try to read removed task
Task t = executionDAO.getTask(task3.getTaskId());
assertNull(t);
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Educational
* Community License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the
* License at:
*
* http://opensource.org/licenses/ecl2.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.signup.tool.jsf.organizer;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.faces.context.FacesContext;
import javax.faces.event.ActionEvent;
import javax.faces.event.ValueChangeEvent;
import javax.faces.model.SelectItem;
import org.apache.commons.lang3.StringUtils;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.signup.logic.SignupUser;
import org.sakaiproject.signup.logic.SignupUserActionException;
import org.sakaiproject.signup.model.SignupAttachment;
import org.sakaiproject.signup.model.SignupAttendee;
import org.sakaiproject.signup.model.SignupMeeting;
import org.sakaiproject.signup.model.SignupTimeslot;
import org.sakaiproject.signup.tool.jsf.SignupMeetingWrapper;
import org.sakaiproject.signup.tool.jsf.SignupSiteWrapper;
import org.sakaiproject.signup.tool.jsf.SignupUIBaseBean;
import org.sakaiproject.signup.tool.jsf.TimeslotWrapper;
import org.sakaiproject.signup.tool.jsf.organizer.action.CreateMeetings;
import org.sakaiproject.signup.tool.jsf.organizer.action.CreateSitesGroups;
import org.sakaiproject.signup.tool.util.Utilities;
import org.sakaiproject.util.DateFormatterUtil;
import lombok.extern.slf4j.Slf4j;
/**
* <p>
* This JSF UIBean class will handle information exchanges between Organizer's
* copy meeting page:<b>copyMeeting.jsp</b> and backbone system.
*
* @author Peter Liu
*
* </P>
*/
@Slf4j
public class CopyMeetingSignupMBean extends SignupUIBaseBean {
private SignupMeeting signupMeeting;
private boolean keepAttendees;
private int maxNumOfAttendees;
private boolean unlimited;
private String signupBeginsType;
/* singup can start before this minutes/hours/days */
private int signupBegins;
private String deadlineTimeType;
/* singup deadline before this minutes/hours/days */
private int deadlineTime;
//Meeting title
private String title;
//Location selected from the dropdown
private String selectedLocation;
//Category selected from the dropdown
private String selectedCategory;
//from the dropdown
private String creatorUserId;
private Date repeatUntil;
private String repeatType;
/* 0 for num of repeat, 1 for date choice*/
private String recurLengthChoice;
private int occurrences;
//private int timeSlotDuration;
private int numberOfSlots;
private boolean showAttendeeName;
private SignupSiteWrapper currentSite;
private List<SignupSiteWrapper> otherSites;
private List<SignupUser> allowedUserList;
private boolean missingSitGroupWarning;
private List<String> missingSites;
private List<String> missingGroups;
private boolean assignParicitpantsToAllRecurEvents;
private boolean validationError;
private boolean repeatTypeUnknown=true;
private List<SelectItem> meetingTypeRadioBttns;
private UserDefineTimeslotBean userDefineTimeslotBean;
//discontinued time slots case
private List<TimeslotWrapper> customTimeSlotWrpList;
private boolean userDefinedTS=false;
protected static boolean NEW_MEETING_SEND_EMAIL = "true".equalsIgnoreCase(Utilities.getSignupConfigParamVal(
"signup.email.notification.mandatory.for.newMeeting", "true")) ? true : false;
private boolean mandatorySendEmail = NEW_MEETING_SEND_EMAIL;
private List<SelectItem> categories = null;
private List<SelectItem> locations=null;
private String startTimeString;
private String endTimeString;
private String repeatUntilString;
private static String HIDDEN_ISO_STARTTIME = "startTimeISO8601";
private static String HIDDEN_ISO_ENDTIME = "endTimeISO8601";
private static String HIDDEN_ISO_UNTILTIME = "untilISO8601";
/**
* this reset information which contains in this UIBean lived in a session
* scope
*
*/
public void reset() {
unlimited = false;
keepAttendees = false;
assignParicitpantsToAllRecurEvents = false;
sendEmail = DEFAULT_SEND_EMAIL;
if(NEW_MEETING_SEND_EMAIL){
//mandatory send email out
sendEmail= true;
}
//sendEmailAttendeeOnly = false;
sendEmailToSelectedPeopleOnly = DEFAULT_SEND_EMAIL_TO_SELECTED_PEOPLE_ONLY;
publishToCalendar= DEFAULT_EXPORT_TO_CALENDAR_TOOL;
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
repeatUntil = calendar.getTime();
recurLengthChoice="1";//0 for num of repeat, 1 for date choice
occurrences=0;
repeatType = ONCE_ONLY;
repeatTypeUnknown=true;
showAttendeeName = false;
missingSitGroupWarning = false;
/*cleanup previously unused attachments in CHS*/
if(this.signupMeeting !=null)
cleanUpUnusedAttachmentCopies(this.signupMeeting.getSignupAttachments());
/*refresh copy of original*/
this.signupMeeting = signupMeetingService.loadSignupMeeting(meetingWrapper.getMeeting().getId(), sakaiFacade
.getCurrentUserId(), sakaiFacade.getCurrentLocationId());
/*get meeting title*/
title = this.signupMeeting.getTitle();
/*prepare new attachments*/
assignMainAttachmentsCopyToSignupMeeting();
//TODO not consider copy time slot attachment yet
List<SignupTimeslot> signupTimeSlots = signupMeeting.getSignupTimeSlots();
if (signupTimeSlots != null && !signupTimeSlots.isEmpty()) {
SignupTimeslot ts = (SignupTimeslot) signupTimeSlots.get(0);
maxNumOfAttendees = ts.getMaxNoOfAttendees();
this.unlimited = ts.isUnlimitedAttendee();
showAttendeeName = ts.isDisplayAttendees();
this.numberOfSlots = signupTimeSlots.size();
} else {// announcement meeting type
setNumberOfSlots(1);
}
//populate location and cateogry data for new meeting
//since it's copymeeting, the dropdown selections should have it already there.
this.selectedLocation=this.signupMeeting.getLocation();
this.selectedCategory = this.signupMeeting.getCategory();
this.customLocation="";
this.customCategory="";
this.categories = null;
this.locations = null;
populateDataForBeginDeadline(this.signupMeeting);
/*Case: recurrence events*/
prepareRecurredEvents();
/* Initialize site/groups for current organizer */
initializeSitesGroups();
/* custom-ts case */
this.customTimeSlotWrpList = null;
this.userDefinedTS = false;
/*populate timeslot data*/
updateTimeSlotWrappers(this.meetingWrapper);
if(CUSTOM_TIMESLOTS.equals(this.signupMeeting.getMeetingType())){
this.userDefinedTS=true;
this.customTimeSlotWrpList= getTimeslotWrappers();
markerTimeslots(this.customTimeSlotWrpList);
}
getUserDefineTimeslotBean().init(this.signupMeeting, COPTY_MEETING_PAGE_URL, this.customTimeSlotWrpList, UserDefineTimeslotBean.COPY_MEETING);
}
/* process the relative time for Signup begin/deadline */
private void populateDataForBeginDeadline(SignupMeeting sMeeting) {
long signupBeginsTime = sMeeting.getSignupBegins() == null ? new Date().getTime() : sMeeting.getSignupBegins()
.getTime();
long signupDeadline = sMeeting.getSignupDeadline() == null ? new Date().getTime() : sMeeting
.getSignupDeadline().getTime();
/* get signup begin & deadline relative time in minutes */
long signupBeginBeforMeeting = (sMeeting.getStartTime().getTime() - signupBeginsTime) / MINUTE_IN_MILLISEC;
long signupDeadLineBeforMeetingEnd = (sMeeting.getEndTime().getTime() - signupDeadline) / MINUTE_IN_MILLISEC;
this.signupBeginsType = Utilities.getTimeScaleType(signupBeginBeforMeeting);
this.signupBegins = Utilities.getRelativeTimeValue(signupBeginsType, signupBeginBeforMeeting);
this.deadlineTimeType = Utilities.getTimeScaleType(signupDeadLineBeforMeetingEnd);
this.deadlineTime = Utilities.getRelativeTimeValue(deadlineTimeType, signupDeadLineBeforMeetingEnd);
/*user readability case for big numbers of minutes*/
if(MINUTES.equals(this.signupBeginsType) && sMeeting.getSignupBegins().before(new Date())
&& this.signupBegins > 500){
/*we assume it has to be 'start now' before and we convert it to round to days*/
this.signupBeginsType=DAYS;
this.signupBegins = Utilities.getRelativeTimeValue(DAYS, signupBeginBeforMeeting);
if(this.signupBegins == 0)
this.signupBegins = 1; //add a day
}
}
/**
* Just to overwrite the parent one
*/
public SignupMeetingWrapper getMeetingWrapper() {
return meetingWrapper;
}
/**
* This is a JSF action call method by UI to copy the event/meeting into a
* new one
*
* @return an action outcome string
*/
// TODO: what to do if timeslot is locked or canceled
public String processSaveCopy() {
if (validationError) {
validationError = false;
return COPTY_MEETING_PAGE_URL;
}
SignupMeeting sMeeting = getSignupMeeting();
try {
prepareCopy(sMeeting);
sMeeting.setRepeatUntil(getRepeatUntil());
int repeatNum = getOccurrences();
if("1".equals(getRecurLengthChoice())){
repeatNum = CreateMeetings.getNumOfRecurrence(getRepeatType(), sMeeting.getStartTime(),
getRepeatUntil());
}
sMeeting.setRepeatNum(repeatNum);
sMeeting.setRepeatType(getRepeatType());
if(CUSTOM_TIMESLOTS.equals(this.signupMeeting.getMeetingType())){
boolean multipleCalBlocks = getUserDefineTimeslotBean().getPutInMultipleCalendarBlocks();
sMeeting.setInMultipleCalendarBlocks(multipleCalBlocks);
}
/*pass who are receiving emails*/
sMeeting.setSendEmailToSelectedPeopleOnly(getSendEmailToSelectedPeopleOnly());
CreateMeetings createMeeting = new CreateMeetings(sMeeting, sendEmail, keepAttendees
&& !assignParicitpantsToAllRecurEvents, keepAttendees && assignParicitpantsToAllRecurEvents,
getSignupBegins(), getSignupBeginsType(), getDeadlineTime(), getDeadlineTimeType(), getRecurLengthChoice(), sakaiFacade,
signupMeetingService, getAttachmentHandler(), sakaiFacade.getCurrentUserId(), sakaiFacade.getCurrentLocationId(), true);
createMeeting.setPublishToCalendar(isPublishToCalendar());
createMeeting.processSaveMeetings();
/*make sure that they don't get cleaned up in CHS when saved successfully*/
this.signupMeeting.getSignupAttachments().clear();
} catch (PermissionException e) {
log.info(Utilities.rb.getString("no.permission_create_event") + " - " + e.getMessage());
Utilities.addErrorMessage(Utilities.rb.getString("no.permission_create_event"));
return ORGANIZER_MEETING_PAGE_URL;
} catch (SignupUserActionException ue) {
Utilities.addErrorMessage(ue.getMessage());
return COPTY_MEETING_PAGE_URL;
} catch (Exception e) {
log.error(Utilities.rb.getString("error.occurred_try_again") + " - " + e.getMessage());
Utilities.addErrorMessage(Utilities.rb.getString("error.occurred_try_again"));
return ORGANIZER_MEETING_PAGE_URL;
}
getUserDefineTimeslotBean().reset(UserDefineTimeslotBean.COPY_MEETING);
return MAIN_EVENTS_LIST_PAGE_URL;
}
/**
* This is a validator to make sure that the event/meeting starting time is
* before ending time etc.
*
* @param e
* an ActionEvent object.
*/
public void validateCopyMeeting(ActionEvent e) {
Map<String, String> params = FacesContext.getCurrentInstance().getExternalContext().getRequestParameterMap();
String isoStartTime = params.get(HIDDEN_ISO_STARTTIME);
if(DateFormatterUtil.isValidISODate(isoStartTime)){
this.signupMeeting.setStartTime(sakaiFacade.getTimeService().parseISODateInUserTimezone(isoStartTime));
}
String isoEndTime = params.get(HIDDEN_ISO_ENDTIME);
if(DateFormatterUtil.isValidISODate(isoEndTime)){
this.signupMeeting.setEndTime(sakaiFacade.getTimeService().parseISODateInUserTimezone(isoEndTime));
}
String isoUntilTime = params.get(HIDDEN_ISO_UNTILTIME);
if(DateFormatterUtil.isValidISODate(isoUntilTime)){
setRepeatUntil(sakaiFacade.getTimeService().parseISODateInUserTimezone(isoUntilTime));
}
Date eventEndTime = signupMeeting.getEndTime();
Date eventStartTime = signupMeeting.getStartTime();
//Set Title
if (StringUtils.isNotBlank(title)){
log.debug("title set: " + title);
this.signupMeeting.setTitle(title);
}else{
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("event.title_cannot_be_blank"));
return;
}
/*user defined own TS case*/
if(isUserDefinedTS()){
if(getUserDefineTimeslotBean().getDestTSwrpList()==null || getUserDefineTimeslotBean().getDestTSwrpList().isEmpty()){
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("event.create_custom_defined_TS_blocks"));
return;
} else {
eventEndTime = getUserDefineTimeslotBean().getEventEndTime();
eventStartTime = getUserDefineTimeslotBean().getEventStartTime();
/*pass the value since they may be null*/
this.signupMeeting.setStartTime(eventStartTime);
this.signupMeeting.setEndTime(eventEndTime);
}
}
if (eventEndTime.before(eventStartTime) || eventStartTime.equals(eventEndTime)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("event.endTime_should_after_startTime"));
return;
}
if (!(getRepeatType().equals(ONCE_ONLY))) {
int repeatNum = getOccurrences();
if("1".equals(getRecurLengthChoice())){
repeatNum = CreateMeetings.getNumOfRecurrence(getRepeatType(), eventStartTime,
getRepeatUntil());
}
if ((DAILY.equals(getRepeatType())|| WEEKDAYS.equals(getRepeatType())) && isMeetingOverRepeatPeriod(eventStartTime, eventEndTime, 1)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("crossDay.event.repeat.daily.problem"));
return;
}
if (WEEKLY.equals(getRepeatType()) && isMeetingOverRepeatPeriod(eventStartTime, eventEndTime, 7)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("crossDay.event.repeat.weekly.problem"));
return;
}
if (BIWEEKLY.equals(getRepeatType()) && isMeetingOverRepeatPeriod(eventStartTime, eventEndTime, 14)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("crossDay.event.repeat.biweekly.problem"));
return;
}
if (repeatNum < 1) {
validationError = true;
if("1".equals(getRecurLengthChoice()))
Utilities.addErrorMessage(Utilities.rb.getString("event.repeatbeforestart"));
else
Utilities.addErrorMessage(Utilities.rb.getString("event.repeatNnum.bigger.than.one"));
return;
}
}
if (!CreateSitesGroups.isAtleastASiteOrGroupSelected(this.getCurrentSite(), this.getOtherSites())) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("select.atleast.oneGroup.for.copyMeeting"));
}
/*for custom defined time slot case*/
if(!validationError && isUserDefinedTS()){
this.signupMeeting.setStartTime(eventStartTime);
this.signupMeeting.setEndTime(eventEndTime);
this.signupMeeting.setMeetingType(CUSTOM_TIMESLOTS);
}
//Set Location
if (StringUtils.isBlank(getCustomLocation())){
if (StringUtils.isBlank(selectedLocation) || selectedLocation.equals(Utilities.rb.getString("select_location"))){
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("event.location_not_assigned"));
return;
}
this.signupMeeting.setLocation(selectedLocation);
}
else{
this.signupMeeting.setLocation(getCustomLocation());
}
//clear the location fields???
this.selectedLocation="";
//Set Category
//if textfield is blank, check the dropdown
if (StringUtils.isBlank(getCustomCategory())){
//if dropdown is not the default, then use its value
if(!StringUtils.equals(selectedCategory, Utilities.rb.getString("select_category"))) {
this.signupMeeting.setCategory(selectedCategory);
}
}
else{
this.signupMeeting.setCategory(getCustomCategory());
}
//clear the category fields???
this.selectedCategory="";
//set the creator/organiser
this.signupMeeting.setCreatorUserId(creatorUserId);
this.creatorUserId="";
}
/**
* This method is called by JSP page for adding/removing attachments action.
* @return null.
*/
public String addRemoveAttachments(){
getAttachmentHandler().processAddAttachRedirect(this.signupMeeting.getSignupAttachments(),null,true);
return null;
}
public String doCancelAction(){
cleanUpUnusedAttachmentCopies(this.signupMeeting.getSignupAttachments());
getUserDefineTimeslotBean().reset(UserDefineTimeslotBean.COPY_MEETING);
this.selectedLocation=null; //Reset selected option
this.selectedCategory=null; //Reset selected option
return ORGANIZER_MEETING_PAGE_URL;
}
/**
* This is a ValueChange Listener to watch changes on the selection of
* 'unlimited attendee' choice by user.
*
* @param vce
* a ValuechangeEvent object.
* @return a outcome string.
*/
public String processGroup(ValueChangeEvent vce) {
Boolean changeValue = (Boolean) vce.getNewValue();
if (changeValue != null) {
unlimited = changeValue.booleanValue();
if (unlimited)
maxNumOfAttendees = 10;
}
return "";
}
/**
* Modify the existing time slot blocks
* @return String object for next page url
*/
public String editUserDefTimeSlots(){
if(this.customTimeSlotWrpList == null){
/*initialize when it comes from other meeting type*/
this.customTimeSlotWrpList = getTimeslotWrappers();
/*Mark the time slot sequence for recurring events changes issues*/
markerTimeslots(this.customTimeSlotWrpList);
getUserDefineTimeslotBean().init(this.signupMeeting, COPTY_MEETING_PAGE_URL,this.customTimeSlotWrpList, UserDefineTimeslotBean.COPY_MEETING);
}else{
if(!Utilities.isDataIntegritySafe(isUserDefinedTS(),UserDefineTimeslotBean.COPY_MEETING,getUserDefineTimeslotBean())){
return ORGANIZER_MEETING_PAGE_URL;
}
this.customTimeSlotWrpList = getUserDefineTimeslotBean().getDestTSwrpList();
getUserDefineTimeslotBean().init(this.signupMeeting, COPTY_MEETING_PAGE_URL,this.customTimeSlotWrpList, UserDefineTimeslotBean.COPY_MEETING);
}
return CUSTOM_DEFINED_TIMESLOT_PAGE_URL;
}
private void prepareCopy(SignupMeeting meeting) throws Exception {
meeting.setId(null);// to save as new meeting in db
meeting.setRecurrenceId(null);
meeting.setSignupSites(CreateSitesGroups.getSelectedSignupSites(getCurrentSite(), getOtherSites()));
this.allowedUserList = LoadAllowedUsers(meeting);
List<SignupTimeslot> timeslots = meeting.getSignupTimeSlots();
boolean lockOrCanceledTimeslot = false;
Calendar calendar = Calendar.getInstance();
calendar.setTime(meeting.getStartTime());
/* Announcement type */
if (getAnnouncementType() || timeslots == null || timeslots.isEmpty()) {
calendar.add(Calendar.MINUTE, getTimeSlotDuration());
meeting.setMeetingType(ANNOUNCEMENT);
meeting.setSignupTimeSlots(null);
} else {
List<SignupTimeslot> cpTimeslotList = new ArrayList<SignupTimeslot>();
List<SignupTimeslot> origTsList=null;
if (!isUserDefinedTS() && (meeting.getMeetingType().equals(INDIVIDUAL) || meeting.getMeetingType().equals(GROUP))){
origTsList = meeting.getSignupTimeSlots();
SignupTimeslot origTs = null;
for (int i = 0; i < getNumberOfSlots(); i++) {
SignupTimeslot cpTs = new SignupTimeslot();
int maxAttendees = (unlimited) ? SignupTimeslot.UNLIMITED : maxNumOfAttendees;
cpTs.setMaxNoOfAttendees(maxAttendees);
cpTs.setDisplayAttendees(showAttendeeName);
cpTs.setStartTime(calendar.getTime());
calendar.add(Calendar.MINUTE, getTimeSlotDuration());
cpTs.setEndTime(calendar.getTime());
/* pass attendees */
if (i < origTsList.size()) {
origTs = origTsList.get(i);
List<SignupAttendee> attList = origTs.getAttendees();
/* screening attendees */
removeNotAllowedAttedees(attList);
if (!unlimited && attList != null && attList.size() > maxAttendees) {
/* attendee may be truncated */
//this.truncateAttendee = true; validate by javaScript
for (int j = attList.size(); j > maxAttendees; j--)
attList.remove(j - 1);
}
cpTs.setAttendees(attList);
origTs.setAttendees(null);// cleanup,may not necessary
cpTs.setLocked(origTs.isLocked());
cpTs.setCanceled(origTs.isCanceled());
if (origTs.isCanceled() || origTs.isLocked())
lockOrCanceledTimeslot = true;
}
cpTimeslotList.add(cpTs);
}
}
/*User defined time slots case*/
if (meeting.getMeetingType().equals(CUSTOM_TIMESLOTS) || isUserDefinedTS()){
UserDefineTimeslotBean uBean = getUserDefineTimeslotBean();
if(uBean ==null || !uBean.COPY_MEETING.equals(uBean.getPlaceOrderBean())){
throw new SignupUserActionException(MessageFormat.format(Utilities.rb.getString("you.have.multiple.tabs.in.browser"),
new Object[]{getSakaiFacade().getServerConfigurationService().getServerName()}));
}
List<TimeslotWrapper> tsWrpList = uBean.getDestTSwrpList();
if (tsWrpList != null){
for (TimeslotWrapper wrapper : tsWrpList) {
SignupTimeslot slot = wrapper.getTimeSlot();
List<SignupAttendee> attList = slot.getAttendees();
/* screening attendees */
removeNotAllowedAttedees(attList);
if (attList != null && attList.size() > slot.getMaxNoOfAttendees()) {
/* attendee may be truncated */
for (int j = attList.size(); j > slot.getMaxNoOfAttendees(); j--)
attList.remove(j - 1);
}
if(slot.isLocked() || slot.isCanceled())
lockOrCanceledTimeslot = true;
cpTimeslotList.add(slot);
}
}
/*for end time purpose*/
int duration = getUserDefineTimeslotBean().getEventDuration();
calendar.add(Calendar.MINUTE, duration);
}
meeting.setSignupTimeSlots(cpTimeslotList);// pass over
if (lockOrCanceledTimeslot)
Utilities.addErrorMessage(Utilities.rb.getString("warning.some_timeslot_may_locked_canceled"));
}
meeting.setEndTime(calendar.getTime());
/* setup signup begin / deadline */
setSignupBeginDeadlineData(meeting, getSignupBegins(), getSignupBeginsType(), getDeadlineTime(),
getDeadlineTimeType());
// copySites(meeting);
/*Remove the coordinates who are not in the meeting any more due to the site group changes
* we are simplify and just copy over coordinators over and user can change it via modify meeting page*/
//TODO later we may add the coordinators ability in the copy page too and need ajax to the trick.
meeting.setCoordinatorIds(getValidatedMeetingCoordinators(meeting));
}
/**
* This method is called to get all locations to populate the dropdown
*
* @return list of allLocations
*/
public List<SelectItem> getAllLocations(){
if(locations ==null){
locations= new ArrayList<SelectItem>();
locations.addAll(Utilities.getSignupMeetingsBean().getAllLocations());
locations.add(0, new SelectItem(Utilities.rb.getString("select_location")));
}
return locations;
}
/**
* This method is called to get all categories to populate the dropdown
*
* @return list of categories
*/
public List<SelectItem> getAllCategories(){
if(categories == null){
categories= new ArrayList<SelectItem>();
categories.addAll(Utilities.getSignupMeetingsBean().getAllCategories());
//remove option 'All'
categories.remove(0);
categories.add(0, new SelectItem(Utilities.rb.getString("select_category")));
}
return categories;
}
/**
* check if the attendees in the event/meeting should be copied along with
* it
*
* @return true if the attendees in the event/meeting is copied along with
* it
*/
public boolean isKeepAttendees() {
return keepAttendees;
}
/**
* this is a setter for UI
*
* @param keepAttendees
*/
public void setKeepAttendees(boolean keepAttendees) {
this.keepAttendees = keepAttendees;
}
/**
* this is a getter method
*
* @return an integer number
*/
public int getMaxNumOfAttendees() {
return maxNumOfAttendees;
}
/**
* this is a setter
*
* @param maxNumOfAttendees
* an integer number
*/
public void setMaxNumOfAttendees(int maxNumOfAttendees) {
this.maxNumOfAttendees = maxNumOfAttendees;
}
/**
* this is a getter method for UI
*
* @return a SignupMeeting object
*/
public SignupMeeting getSignupMeeting() {
return signupMeeting;
}
/**
* this is a setter
*
* @param signupMeeting
* a SignupMeeting object
*/
public void setSignupMeeting(SignupMeeting signupMeeting) {
this.signupMeeting = signupMeeting;
}
/**
* check to see if the attendees are limited in the event/meeting
*
* @return true if the attendees are limited in the event/meeting
*/
public boolean isUnlimited() {
return unlimited;
}
/**
* this is a setter for UI
*
* @param unlimited
* a boolean value
*/
public void setUnlimited(boolean unlimited) {
this.unlimited = unlimited;
}
/**
* this is a getter method to provide a relative time
*
* @return am integer number
*/
public int getDeadlineTime() {
return deadlineTime;
}
/**
* this is a setter
*
* @param deadlineTime
* an integer number, which represents a relative time to meeting
* starting time
*/
public void setDeadlineTime(int deadlineTime) {
this.deadlineTime = deadlineTime;
}
/**
* this is a getter method for UI
*
* @return a string value
*/
public String getDeadlineTimeType() {
return deadlineTimeType;
}
/**
* this is a setter for UI
*
* @param deadlineTimeType
* an integer number
*/
public void setDeadlineTimeType(String deadlineTimeType) {
this.deadlineTimeType = deadlineTimeType;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
/**
* This is a getter method to provide selected location.
*
* @return String
*/
public String getselectedLocation() {
return selectedLocation;
}
/**
* This is a setter.
*
* @param selectedLoction
* String that represents the selected location
*/
public void setselectedLocation(String selectedLocation) {
this.selectedLocation = selectedLocation;
}
public String getselectedCategory() {
return selectedCategory;
}
public void setselectedCategory(String selectedCategory) {
this.selectedCategory = selectedCategory;
}
public String getcreatorUserId() {
if(this.creatorUserId ==null){
//set current user as default meeting organizer if case people forget to select one
return sakaiFacade.getCurrentUserId();
}
return creatorUserId;
}
public void setcreatorUserId(String creatorUserId) {
this.creatorUserId=creatorUserId;
}
/**
* this is a getter method for UI
*
* @return an integer number
*/
public int getSignupBegins() {
return signupBegins;
}
/**
* this is a setter for UI
*
* @param signupBegins
* an integer number
*/
public void setSignupBegins(int signupBegins) {
this.signupBegins = signupBegins;
}
/**
* this is a getter method for UI
*
* @return an integer number
*/
public String getSignupBeginsType() {
return signupBeginsType;
}
/**
* this is a setter for UI
*
* @param signupBeginsType
* an integer number
*/
public void setSignupBeginsType(String signupBeginsType) {
this.signupBeginsType = signupBeginsType;
}
public Date getRepeatUntil() {
return repeatUntil;
}
public void setRepeatUntil(Date repeatUntil) {
this.repeatUntil = repeatUntil;
}
public String getStartTimeString() {
return startTimeString;
}
public String getEndTimeString() {
return endTimeString;
}
public void setStartTimeString(String startTimeString) {
this.startTimeString = startTimeString;
}
public void setEndTimeString(String endTimeString) {
this.endTimeString = endTimeString;
}
public String getRepeatUntilString() {
return repeatUntilString;
}
public void setRepeatUntilString(String repeatUntilString) {
this.repeatUntilString = repeatUntilString;
}
public String getRepeatType() {
return repeatType;
}
public void setRepeatType(String repeatType) {
this.repeatType = repeatType;
}
/**
* This is a getter method for UI.
*
* @return a HtmlInputHidden object.
*/
public int getTimeSlotDuration() {
long duration = (getSignupMeeting().getEndTime().getTime() - getSignupMeeting().getStartTime().getTime())
/ (MINUTE_IN_MILLISEC * getNumberOfSlots());
return (int) duration;
}
/*public void setTimeSlotDuration(int timeSlotDuration) {
this.timeSlotDuration = timeSlotDuration;
}*/
/**
* This is a getter method for UI.
*
* @return a HtmlInputHidden object.
*/
public int getNumberOfSlots() {
return numberOfSlots;
}
/**
* This is a setter method for UI.
*
* @param numberOfSlots
* an int value
*/
public void setNumberOfSlots(int numberOfSlots) {
this.numberOfSlots = numberOfSlots;
}
/**
* This is a getter method for UI.
*
* @return a list of SignupSiteWrapper objects.
*/
public List<SignupSiteWrapper> getOtherSites() {
return otherSites;
}
/**
* This is a setter method for UI.
*
* @param signupSiteWrapperList
* a list of SignupSiteWrapper object.
*/
public void setOtherSites(List<SignupSiteWrapper> signupSiteWrapperList) {
this.otherSites = signupSiteWrapperList;
}
/**
* This is a getter method for UI.
*
* @return a SignupSiteWrapper object.
*/
public SignupSiteWrapper getCurrentSite() {
return currentSite;
}
/**
* This is a setter for UI.
*
* @param currentSite
* a SignupSiteWrapper object.
*/
public void setCurrentSite(SignupSiteWrapper currentSite) {
this.currentSite = currentSite;
}
private void initializeSitesGroups() {
/*
* Temporary bug fix for AuthZ code ( isAllowed(..) ), which gives wrong
* permission for the first time at 'Create new or Copy meeting pages'.
* The bug will be gone by second time go into it. Once it's fixed,
* remove this below and other places and make it into a more clean way
* by not sharing the same CreateSitesGroups Object. new
* CreateSitesGroups(getSignupMeeting(),sakaiFacade,signupMeetingService);
*/
CreateSitesGroups createSiteGroups = Utilities.getSignupMeetingsBean().getCreateSitesGroups();
createSiteGroups.resetSiteGroupCheckboxMark();
createSiteGroups.setSignupMeeting(this.getSignupMeeting());
createSiteGroups.processSiteGroupSelectionMarks();
setCurrentSite(createSiteGroups.getCurrentSite());
setOtherSites(createSiteGroups.getOtherSites());
setMissingSitGroupWarning(createSiteGroups.isSiteOrGroupTruncated());
setMissingSites(createSiteGroups.getMissingSites());
setMissingGroups(createSiteGroups.getMissingGroups());
}
private List<SignupUser> LoadAllowedUsers(SignupMeeting meeting) {
return sakaiFacade.getAllUsers(getSignupMeeting());
}
private void removeNotAllowedAttedees(List<SignupAttendee> screenAttendeeList) {
if (screenAttendeeList == null || screenAttendeeList.isEmpty())
return;
boolean notFound = true;
for (int i = screenAttendeeList.size(); i > 0; i--) {
notFound = true;
for (SignupUser allowedOne : allowedUserList) {
if (allowedOne.getInternalUserId().equals(screenAttendeeList.get(i - 1).getAttendeeUserId())) {
notFound = false;
break;
}
}
if (notFound) {
screenAttendeeList.remove(i - 1);
}
}
}
private String getValidatedMeetingCoordinators(SignupMeeting meeting){
List<String> allCoordinatorIds = meeting.getCoordinatorIdsList();
StringBuilder sb = new StringBuilder();
boolean isFirst = true;
for (String couId : allCoordinatorIds) {
if(this.sakaiFacade.hasPermissionToCreate(meeting,couId)){
if(isFirst){
sb.append(couId);
isFirst = false;
}else{
//safeguard -db column max size, hardly have over 10 coordinators per meeting
if(sb.length() < 1000)
sb.append("|" + couId);
}
}
}
return sb.length()<1? null : sb.toString();
}
/**
* It's a getter method for UI.
*
* @return a boolean value
*/
public boolean isMissingSitGroupWarning() {
return missingSitGroupWarning;
}
private void setMissingSitGroupWarning(boolean missingSitGroupWarning) {
this.missingSitGroupWarning = missingSitGroupWarning;
}
public List<String> getMissingSites() {
return missingSites;
}
private void setMissingSites(List<String> missingSites) {
this.missingSites = missingSites;
}
/**
* It's a getter method for UI.
*
* @return a boolean value
*/
public boolean isMissingSitesThere() {
if (this.missingSites == null || this.missingSites.isEmpty())
return false;
return true;
}
public List<String> getMissingGroups() {
return missingGroups;
}
private void setMissingGroups(List<String> missingGroups) {
this.missingGroups = missingGroups;
}
public boolean isMissingGroupsThere() {
if (this.missingGroups == null || this.missingGroups.isEmpty())
return false;
return true;
}
/**
* It's a getter method for UI.
*
* @return a boolean value
*/
public boolean isAssignParicitpantsToAllRecurEvents() {
return assignParicitpantsToAllRecurEvents;
}
/**
* It's a setter for UI
*
* @param assignParicitpantsToAllRecurEvents
* a boolean value
*/
public void setAssignParicitpantsToAllRecurEvents(boolean assignParicitpantsToAllRecurEvents) {
this.assignParicitpantsToAllRecurEvents = assignParicitpantsToAllRecurEvents;
}
/**
* It's a getter method for UI
*
* @return a list of SelectItem objects for radio buttons.
*/
public List<SelectItem> getMeetingTypeRadioBttns() {
this.meetingTypeRadioBttns = Utilities.getMeetingTypeSelectItems(getSignupMeeting().getMeetingType(), true);
return meetingTypeRadioBttns;
}
private void prepareRecurredEvents(){
Long recurrenceId = this.signupMeeting.getRecurrenceId();
if (recurrenceId != null && recurrenceId.longValue() > 0 ) {
Calendar cal = Calendar.getInstance();
cal.setTime(this.signupMeeting.getStartTime());
/*backward to one month and make sure we could get some recurrence events
* if it's not the only one existed
* */
cal.add(Calendar.HOUR,-24*31);
List<SignupMeeting> recurredMeetings = signupMeetingService.getRecurringSignupMeetings(getSakaiFacade().getCurrentLocationId(), getSakaiFacade().getCurrentUserId(), recurrenceId,
cal.getTime());
retrieveRecurrenceData(recurredMeetings);
}
}
/*This method only provide a most possible repeatType, not with 100% accuracy*/
private void retrieveRecurrenceData(List<SignupMeeting> upTodateOrginMeetings) {
/*to see if the recurring events have a 'Start_Now' type already*/
if(Utilities.testSignupBeginStartNowType(upTodateOrginMeetings)){
setSignupBeginsType(START_NOW);//overwrite previous value
setSignupBegins(6);//default value; not used
}
Date lastDate=new Date();
if (upTodateOrginMeetings == null || upTodateOrginMeetings.isEmpty())
return;
/*if this is the last one*/
Calendar cal = Calendar.getInstance();
cal.setTime(this.signupMeeting.getStartTime());
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
setRepeatUntil(cal.getTime());
int listSize = upTodateOrginMeetings.size();
if (listSize > 1) {
/*set last recurred Date for recurrence events*/
lastDate = upTodateOrginMeetings.get(listSize -1).getStartTime();
cal.setTime(lastDate);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
setRepeatUntil(cal.getTime());
String repeatType = upTodateOrginMeetings.get(listSize -1).getRepeatType();
if(repeatType !=null && !ONCE_ONLY.equals(repeatType)){
setRepeatType(repeatType);
setRepeatTypeUnknown(false);
return;
}
/*The following code is to make it old version backward compatible
* It will be cleaned after a while.
*/
Calendar calFirst = Calendar.getInstance();
Calendar calSecond = Calendar.getInstance();
/*The following code is to make it old version backward compatible*/
/*
* we can only get approximate estimation by assuming it's a
* succession. take the last two which are more likely be in a sequence
*/
calFirst.setTime(upTodateOrginMeetings.get(listSize - 2).getStartTime());
calFirst.set(Calendar.SECOND, 0);
calFirst.set(Calendar.MILLISECOND, 0);
calSecond.setTime(upTodateOrginMeetings.get(listSize - 1).getStartTime());
calSecond.set(Calendar.SECOND, 0);
calSecond.set(Calendar.MILLISECOND, 0);
int tmp = calSecond.get(Calendar.DATE);
int daysDiff = (int) (calSecond.getTimeInMillis() - calFirst.getTimeInMillis()) / DAY_IN_MILLISEC;
setRepeatTypeUnknown(false);
if (daysDiff == perDay)//could have weekdays get into this one, not very accurate.
setRepeatType(DAILY);
else if (daysDiff == perWeek)
setRepeatType(WEEKLY);
else if (daysDiff == perBiweek)
setRepeatType(BIWEEKLY);
else if(daysDiff ==3 && calFirst.get(Calendar.DAY_OF_WEEK)== Calendar.FRIDAY)
setRepeatType(WEEKDAYS);
else{
/*case:unknown repeatType*/
setRepeatTypeUnknown(true);
}
}
}
/**
* This is a getter for UI and it is used for controlling the
* recurring meeting warning message.
* @return true if the repeatType is unknown for a repeatable event.
*/
public boolean getRepeatTypeUnknown() {
return repeatTypeUnknown;
}
public void setRepeatTypeUnknown(boolean repeatTypeUnknown) {
this.repeatTypeUnknown = repeatTypeUnknown;
}
private void assignMainAttachmentsCopyToSignupMeeting(){
List<SignupAttachment> attachList = new ArrayList<SignupAttachment>();
if(attachList != null){
for (SignupAttachment attach: this.signupMeeting.getSignupAttachments()) {
if(attach.getTimeslotId() ==null && attach.getViewByAll())
attachList.add(attach);
//TODO Later: how about time slot attachment?.
}
}
List<SignupAttachment> cpList = new ArrayList<SignupAttachment>();
if(attachList.size() > 0){
for (SignupAttachment attach : attachList) {
cpList.add(getAttachmentHandler().copySignupAttachment(this.signupMeeting,true,attach,ATTACH_COPY +this.signupMeeting.getId().toString()));
}
}
this.signupMeeting.setSignupAttachments(cpList);
}
/*Overwrite default one*/
public boolean getSignupAttachmentEmpty(){
if(this.signupMeeting ==null)
return true;
if(this.signupMeeting.getSignupAttachments() ==null || this.signupMeeting.getSignupAttachments().isEmpty())
return true;
else
return false;
}
public UserDefineTimeslotBean getUserDefineTimeslotBean() {
return userDefineTimeslotBean;
}
public void setUserDefineTimeslotBean(UserDefineTimeslotBean userDefineTimeslotBean) {
this.userDefineTimeslotBean = userDefineTimeslotBean;
}
public boolean isUserDefinedTS() {
return userDefinedTS;
}
public void setUserDefinedTS(boolean userDefinedTS) {
this.userDefinedTS = userDefinedTS;
}
public List<TimeslotWrapper> getCustomTimeSlotWrpList() {
return customTimeSlotWrpList;
}
public void setCustomTimeSlotWrpList(List<TimeslotWrapper> customTimeSlotWrpList) {
this.customTimeSlotWrpList = customTimeSlotWrpList;
}
/**
* This is only for UI purpose to check if the event/meeting is an
* custom-ts style (manay time slots) and it requires signup.
*/
public boolean getCustomTsType() {
return CUSTOM_TIMESLOTS.equals(this.signupMeeting.getMeetingType());
}
public String getRecurLengthChoice() {
return recurLengthChoice;
}
public void setRecurLengthChoice(String recurLengthChoice) {
this.recurLengthChoice = recurLengthChoice;
}
public int getOccurrences() {
return occurrences;
}
public void setOccurrences(int occurrences) {
this.occurrences = occurrences;
}
/**
* @return true if sakai property signup.enableAttendance is true, else will return false
*/
public boolean isAttendanceOn() {
return Utilities.getSignupMeetingsBean().isAttendanceOn();
}
/**
* Get a list of users that have permission, but format it as a SelectItem list for the dropdown.
* Since this is a new item there will be no current instructor so it returns the current user at the top of the list
* We send a null signup meeting param as this is a new meeting.
*/
public List<SelectItem> getInstructors() {
return Utilities.getSignupMeetingsBean().getInstructors(signupMeeting);
}
/**
* This is for UI page to determine whether the email checkbox should be checked and disabled to change
* @return
*/
public boolean isMandatorySendEmail(){
return this.mandatorySendEmail;
}
}
| |
/**
* Copyright 2011 meltmedia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xchain.framework.digester;
import org.xchain.framework.sax.HandlerWrapper;
import org.xchain.framework.sax.util.XHtmlHandler;
import org.apache.xml.serializer.Serializer;
import org.apache.xml.serializer.SerializerFactory;
import org.apache.xml.serializer.OutputPropertiesFactory;
import org.apache.commons.digester.Rule;
import org.apache.commons.digester.Digester;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Writer;
import java.io.StringWriter;
import java.util.Iterator;
import java.util.Properties;
import java.util.Map;
import org.xml.sax.ContentHandler;
import org.xml.sax.ext.LexicalHandler;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
/**
* @author Christian Trimble
* @author Josh Kennedy
*/
public class SerializationRule
extends Rule
{
public static Logger log = LoggerFactory.getLogger( SerializationRule.class );
//public static String HTML_METHOD = Method.HTML;
//public static String TEXT_METHOD = Method.TEXT;
//public static String XML_METHOD = Method.XML;
private class RuleSerializationHandler
extends HandlerWrapper
{
private int depth = 0;
public void startElement( String namespaceUri, String localName, String qName, Attributes atts)
throws SAXException
{
depth++;
// if we are not including the containing element, then do not pass it on. Otherwise, pass this event on to super.
if( depth == 1 && includeContainingElement || depth > 1 ) {
super.startElement( namespaceUri, localName, qName, atts );
}
// if the depth is ever less than or equal to zero, then things are broken.
else if( depth <= 0 ) {
throw new IllegalStateException("Unmatched start and end elements detected.");
}
}
public void endElement( String namespaceUri, String localName, String qName )
throws SAXException
{
// if we are not including the containing element, then do not pass it on.
if( depth == 1 && includeContainingElement || depth > 1 ) {
super.endElement( namespaceUri, localName, qName );
}
// decrement the depth.
depth--;
// close down the document and return control to the rule.
if( depth <= 0 ) {
Digester digester = getDigester();
// reset the handlers.
digester.setCustomContentHandler(oldCustomContentHandler);
oldCustomContentHandler = null;
if( digester instanceof ExtendedDigester ) {
((ExtendedDigester)digester).setCustomLexicalHandler(oldCustomLexicalHandler);
oldCustomLexicalHandler = null;
}
// close all of the namespace mappings.
Iterator currentNamespaceIterator = digester.getCurrentNamespaces().entrySet().iterator();
while( currentNamespaceIterator.hasNext() ) {
Map.Entry currentNamespace = (Map.Entry)currentNamespaceIterator.next();
handler.endPrefixMapping((String)currentNamespace.getKey());
}
// terminate the document.
super.endDocument();
// pass control back to the digester.
digester.endElement( namespaceUri, localName, qName );
}
}
}
/** The handler that will create the serialized form of the nodes. */
protected RuleSerializationHandler handler = null;
/** The old custom handler that we displaced. */
protected ContentHandler oldCustomContentHandler;
/** The old lexical handler that we displaced. */
protected LexicalHandler oldCustomLexicalHandler;
/** The method that we will be using to render the output. */
protected String method = "text";
protected Boolean indent = Boolean.TRUE;
/** The budder that we will be writting to. */
protected StringBuffer buffer = null;
/**
* If true, the element that caused this rule to file will be passed to the serializer, otherwise the next element
* after this rule will be sent.
*/
protected boolean includeContainingElement = false;
public void begin(String namespaceUri, String name, Attributes attributes)
throws Exception
{
// get the digester.
Digester digester = getDigester();
// store the old content handlers.
oldCustomContentHandler = digester.getCustomContentHandler();
if( digester instanceof ExtendedDigester ) {
oldCustomLexicalHandler = ((ExtendedDigester)digester).getCustomLexicalHandler();
}
// create the handler.
handler = new RuleSerializationHandler();
// set up the handlers that will do the serialization.
handler.setWrappedHandler(newHandler());
// set the new handlers.
digester.setCustomContentHandler(handler);
if( digester instanceof ExtendedDigester ) {
((ExtendedDigester)digester).setCustomLexicalHandler(handler);
}
// push the buffer onto the stack.
digester.push(buffer);
// start the document.
handler.startDocument();
// set all of the namespaces that are defined on the digester.
Iterator currentNamespaceIterator = digester.getCurrentNamespaces().entrySet().iterator();
while( currentNamespaceIterator.hasNext() ) {
Map.Entry currentNamespace = (Map.Entry)currentNamespaceIterator.next();
handler.startPrefixMapping((String)currentNamespace.getKey(), (String)currentNamespace.getValue());
}
// send the current element to the handler.
if( digester.getNamespaceAware() ) {
handler.startElement(namespaceUri, name, digester.getCurrentElementName(), attributes);
}
else {
handler.startElement(namespaceUri, name, name, attributes);
}
}
public void end()
throws Exception
{
getDigester().pop();
}
/**
* Returns a new handler for the body of a serializer block.
*/
protected ContentHandler newHandler()
throws Exception
{
Serializer serializer = newSerializer();
if( method.toLowerCase().equals("html") ) {
// wrap the html handler.
XHtmlHandler xhtmlHandler = new XHtmlHandler();
xhtmlHandler.setWrappedHandler(serializer);
return xhtmlHandler;
}
else {
return serializer.asContentHandler();
}
}
/**
* Sets up the wrapped handler.
*/
protected Serializer newSerializer()
{
Properties outputProperties = OutputPropertiesFactory.getDefaultMethodProperties( method );
if ( method.toLowerCase().equals("html") ) {
if (indent == null) indent = true; // default to indenting mode
outputProperties.setProperty( "media-type", "text/html" );
outputProperties.setProperty( "doctype-system", "http://www.w3.org/TR/html4/loose.dtd" );
outputProperties.setProperty( "doctype-public", "-//W3C//DTD HTML 4.01 Transitional//EN" );
}
else if ( method.toLowerCase().equals("xhtml") ) {
if (indent == null) indent = true; // default to indenting mode
outputProperties.setProperty( "media-type", "application/xhtml+xml" );
outputProperties.setProperty( "omit-xml-declaration", "yes" ); // todo: should be browser sensitive
outputProperties.setProperty( "doctype-system", "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd" );
outputProperties.setProperty( "doctype-public", "-//W3C//DTD XHTML 1.0 Transitional//EN" );
}
else if ( method.toLowerCase().equals("xml") ) {
outputProperties.setProperty( "media-type", "text/xml" );
}
else if ( method.toLowerCase().equals("text") ) {
outputProperties.setProperty( "media-type", "text/plain" );
}
if ( Boolean.TRUE.equals( indent )) {
outputProperties.setProperty( "indent", "yes" );
outputProperties.setProperty( "{http://xml.apache.org/xalan}indent-amount", "2" );
}
else {
outputProperties.setProperty( "indent", "no" );
}
try {
Serializer serializer = SerializerFactory.getSerializer( outputProperties );
serializer.setWriter(newStringWriter());
return serializer;
} catch (org.apache.xml.serializer.utils.WrappedRuntimeException e) {
log.error("Serializer threw wrapped exception", e.getException());
throw e;
}
/*
if( TEXT_METHOD.equals(method ) ) {
// create the content handler.
ToTextStream toTextStream = new ToTextStream();
toTextStream.setOutputFormat(newTextProperties());
// set the writer.
toTextStream.setWriter(newStringWriter());
// return the handler.
return toTextStream;
}
else if( HTML_METHOD.equals(method) ) {
// create the handler.
ToHTMLStream toHtmlStream = new ToHTMLStream();
// configure the handler for html output.
toHtmlStream.setOutputFormat(newHtmlProperties());
// set the writer.
toHtmlStream.setWriter(newStringWriter());
// wrap the html handler.
XHtmlHandler xhtmlHandler = new XHtmlHandler();
xhtmlHandler.setWrappedHandler(toHtmlStream);
return xhtmlHandler;
}
else if( XML_METHOD.equals(method) ) {
// create the handler.
ToXMLStream toXmlStream = new ToXMLStream();
// configure the handler for html output.
toXmlStream.setOutputFormat(newHtmlProperties());
// set the writer.
toXmlStream.setWriter(newStringWriter());
return toXmlStream;
}
else {
throw new IllegalStateException("Invalid method specified.");
}
*/
}
/*
protected Properties newTextProperties()
{
return OutputPropertiesFactory.getDefaultMethodProperties(Method.TEXT);
}
protected Properties newHtmlProperties()
{
Properties properties = OutputPropertiesFactory.getDefaultMethodProperties(Method.HTML);
properties.put(OutputPropertiesFactory.S_KEY_INDENT_AMOUNT, "2");
return properties;
}
protected Properties newXmlProperties()
{
Properties properties = OutputPropertiesFactory.getDefaultMethodProperties(Method.XML);
properties.put(OutputPropertiesFactory.S_KEY_INDENT_AMOUNT, "2");
return properties;
}
*/
protected StringWriter newStringWriter()
{
StringWriter stringWriter = new StringWriter();
buffer = stringWriter.getBuffer();
return stringWriter;
}
}
| |
/*
* Copyright 2009-2014 DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
/**
*
*/
package org.mrgeo.resources.tms;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.LineString;
import org.apache.commons.lang.StringUtils;
import org.geotools.coverage.CoverageFactoryFinder;
import org.geotools.coverage.grid.GridCoverage2D;
import org.geotools.coverage.grid.GridCoverageFactory;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureIterator;
import org.geotools.factory.GeoTools;
import org.geotools.geometry.Envelope2D;
import org.geotools.process.ProcessException;
import org.geotools.process.raster.ContourProcess;
import org.geotools.util.NullProgressListener;
import org.mrgeo.image.MrsImage;
import org.mrgeo.image.MrsImageException;
import org.mrgeo.image.MrsImagePyramid;
import org.mrgeo.image.MrsImagePyramidMetadata;
import org.mrgeo.image.MrsImagePyramidMetadata.ImageMetadata;
import org.mrgeo.rasterops.ColorScale;
import org.mrgeo.rasterops.OpImageRegistrar;
import org.mrgeo.tile.TileNotFoundException;
import org.mrgeo.services.Configuration;
import org.mrgeo.services.SecurityUtils;
import org.mrgeo.services.mrspyramid.ColorScaleManager;
import org.mrgeo.services.mrspyramid.rendering.*;
import org.mrgeo.services.tms.TmsService;
import org.mrgeo.utils.Bounds;
import org.mrgeo.utils.HadoopUtils;
import org.mrgeo.utils.TMSUtils;
import org.opengis.feature.simple.SimpleFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Attr;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.activation.MimetypesFileTypeMap;
import javax.imageio.ImageWriter;
import javax.imageio.stream.MemoryCacheImageOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.dom.DOMSource;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.Raster;
import java.awt.image.WritableRaster;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.*;
import java.util.List;
import java.util.concurrent.ExecutionException;
/**
*
*/
@Path("/tms")
public class TileMapServiceResource
{
private static final Logger log = LoggerFactory.getLogger(TileMapServiceResource.class);
private static final MimetypesFileTypeMap mimeTypeMap = new MimetypesFileTypeMap();
private static final String VERSION = "1.0.0";
private static final String SRS = "EPSG:4326";
private static final String GENERAL_ERROR = "An error occurred in Tile Map Service";
private static String imageBaseDir = HadoopUtils.getDefaultImageBaseDirectory();
public static String KML_VERSION = "http://www.opengis.net/kml/2.2";
public static String KML_EXTENSIONS = "http://www.google.com/kml/ext/2.2";
public static String KML_MIME_TYPE = "application/vnd.google-earth.kml+xml";
private static Double MIN_CONTOUR_LENGTH = 0.01;
@Context
TmsService service;
static Properties props;
static
{
init();
}
public static void init()
{
try
{
if (props == null)
{
props = Configuration.getInstance().getProperties();
}
}
catch (final IllegalStateException e)
{
log.error("image.base must be specified in the MrGeo configuration file (" + e.getMessage() +
")");
}
}
protected static Response createEmptyTile(final ImageResponseWriter writer, final int width,
final int height)
{
// return an empty image
final int dataType;
if (writer.getResponseMimeType() == "image/jpeg")
{
dataType = BufferedImage.TYPE_3BYTE_BGR;
}
else
{
// dataType = BufferedImage.TYPE_INT_ARGB;
dataType = BufferedImage.TYPE_4BYTE_ABGR;
}
final BufferedImage bufImg = new BufferedImage(width, height, dataType);
final Graphics2D g = bufImg.createGraphics();
g.setColor(new Color(0, 0, 0, 0));
g.fillRect(0, 0, width, height);
g.dispose();
return writer.write(bufImg.getData());
}
protected static Document mrsPyramidMetadataToTileMapXml(final String raster, final String url,
final MrsImagePyramidMetadata mpm) throws ParserConfigurationException
{
/*
* String tileMap = "<?xml version='1.0' encoding='UTF-8' ?>" +
* "<TileMap version='1.0.0' tilemapservice='http://localhost/mrgeo-services/api/tms/1.0.0'>" +
* " <Title>AfPk Elevation V2</Title>" + " <Abstract>A test of V2 MrsImagePyramid.</Abstract>"
* + " <SRS>EPSG:4326</SRS>" + " <BoundingBox minx='68' miny='33' maxx='72' maxy='35' />" +
* " <Origin x='68' y='33' />" +
* " <TileFormat width='512' height='512' mime-type='image/tiff' extension='tif' />" +
* " <TileSets profile='global-geodetic'>" +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/1' units-per-pixel='0.3515625' order='1' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/2' units-per-pixel='0.17578125' order='2' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/3' units-per-pixel='0.08789063' order='3' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/4' units-per-pixel='0.08789063' order='4' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/5' units-per-pixel='0.08789063' order='5' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/6' units-per-pixel='0.08789063' order='6' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/7' units-per-pixel='0.08789063' order='7' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/8' units-per-pixel='0.08789063' order='8' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/9' units-per-pixel='0.08789063' order='9' />"
* +
* " <TileSet href='http://localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2/10' units-per-pixel='0.08789063' order='10' />"
* + " </TileSets>" + "</TileMap>";
*/
final DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
final DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
// root elements
final Document doc = docBuilder.newDocument();
final Element rootElement = doc.createElement("TileMap");
doc.appendChild(rootElement);
final Attr v = doc.createAttribute("version");
v.setValue(VERSION);
rootElement.setAttributeNode(v);
final Attr tilemapservice = doc.createAttribute("tilemapservice");
tilemapservice.setValue(normalizeUrl(normalizeUrl(url).replace(raster, "")));
rootElement.setAttributeNode(tilemapservice);
// child elements
final Element title = doc.createElement("Title");
title.setTextContent(raster);
rootElement.appendChild(title);
final Element abst = doc.createElement("Abstract");
abst.setTextContent("");
rootElement.appendChild(abst);
final Element srs = doc.createElement("SRS");
srs.setTextContent(SRS);
rootElement.appendChild(srs);
final Element bbox = doc.createElement("BoundingBox");
rootElement.appendChild(bbox);
final Attr minx = doc.createAttribute("minx");
minx.setValue(String.valueOf(mpm.getBounds().getMinX()));
bbox.setAttributeNode(minx);
final Attr miny = doc.createAttribute("miny");
miny.setValue(String.valueOf(mpm.getBounds().getMinY()));
bbox.setAttributeNode(miny);
final Attr maxx = doc.createAttribute("maxx");
maxx.setValue(String.valueOf(mpm.getBounds().getMaxX()));
bbox.setAttributeNode(maxx);
final Attr maxy = doc.createAttribute("maxy");
maxy.setValue(String.valueOf(mpm.getBounds().getMaxY()));
bbox.setAttributeNode(maxy);
final Element origin = doc.createElement("Origin");
rootElement.appendChild(origin);
final Attr x = doc.createAttribute("x");
x.setValue(String.valueOf(mpm.getBounds().getMinX()));
origin.setAttributeNode(x);
final Attr y = doc.createAttribute("y");
y.setValue(String.valueOf(mpm.getBounds().getMinY()));
origin.setAttributeNode(y);
final Element tileformat = doc.createElement("TileFormat");
rootElement.appendChild(tileformat);
final Attr w = doc.createAttribute("width");
w.setValue(String.valueOf(mpm.getTilesize()));
tileformat.setAttributeNode(w);
final Attr h = doc.createAttribute("height");
h.setValue(String.valueOf(mpm.getTilesize()));
tileformat.setAttributeNode(h);
final Attr mt = doc.createAttribute("mime-type");
mt.setValue("image/tiff");
tileformat.setAttributeNode(mt);
final Attr ext = doc.createAttribute("extension");
ext.setValue("tif");
tileformat.setAttributeNode(ext);
final Element tilesets = doc.createElement("TileSets");
rootElement.appendChild(tilesets);
final Attr profile = doc.createAttribute("profile");
profile.setValue("global-geodetic");
tilesets.setAttributeNode(profile);
for (int i = 0; i <= mpm.getMaxZoomLevel(); i++)
{
final Element tileset = doc.createElement("TileSet");
tilesets.appendChild(tileset);
final Attr href = doc.createAttribute("href");
href.setValue(normalizeUrl(normalizeUrl(url)) + "/" + i);
tileset.setAttributeNode(href);
final Attr upp = doc.createAttribute("units-per-pixel");
upp.setValue(String.valueOf(180d / 256d / Math.pow(2, i)));
tileset.setAttributeNode(upp);
final Attr order = doc.createAttribute("order");
order.setValue(String.valueOf(i));
tileset.setAttributeNode(order);
}
return doc;
}
protected static Document mrsPyramidTileToContourKml(final Raster tile,
final Envelope2D envelope, final Double interval) throws ProcessException,
ParserConfigurationException
{
final DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
final DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
// root elements
final Document doc = docBuilder.newDocument();
final Element rootElement = doc.createElement("kml");
doc.appendChild(rootElement);
final Attr ns = doc.createAttribute("xmlns");
ns.setValue(KML_VERSION);
rootElement.setAttributeNode(ns);
/*
* Attr gxns = doc.createAttribute("xmlns:gx"); gxns.setValue(KML_EXTENSIONS);
* rootElement.setAttributeNode(gxns);
*/
// child elements
final Element d = doc.createElement("Document");
rootElement.appendChild(d);
final Element nm = doc.createElement("name");
nm.setTextContent("contours");
d.appendChild(nm);
final Element dsc = doc.createElement("description");
dsc.setTextContent("interval = " + interval);
d.appendChild(dsc);
final Element sty = doc.createElement("Style");
d.appendChild(sty);
final Element lsty = doc.createElement("ListStyle");
sty.appendChild(lsty);
final Element lit = doc.createElement("listItemType");
lit.setTextContent("checkHideChildren");
lsty.appendChild(lit);
final Element normalStyle = doc.createElement("Style");
normalStyle.setAttribute("id", "normalState");
final Element iconStyle = doc.createElement("IconStyle");
final Element icon = doc.createElement("Icon");
final Element scale = doc.createElement("scale");
scale.setTextContent("0");
iconStyle.appendChild(scale.cloneNode(true));
iconStyle.appendChild(icon.cloneNode(true));
final Element highlightScale = doc.createElement("scale");
highlightScale.setTextContent("1.0");
final Element highlightColor = doc.createElement("color");
highlightColor.setTextContent("ffffffff");
final Element labelStyle = doc.createElement("LabelStyle");
labelStyle.appendChild(scale.cloneNode(true));
final Element highlightStyle = doc.createElement("Style");
highlightStyle.setAttribute("id", "highlightState");
final Element contourStyle = doc.createElement("StyleMap");
contourStyle.setAttribute("id", "styleMapContour");
final Element normalPair = doc.createElement("Pair");
final Element highlightPair = doc.createElement("Pair");
final Element normalKey = doc.createElement("key");
normalKey.setTextContent("normal");
final Element highlightKey = doc.createElement("key");
highlightKey.setTextContent("highlight");
final Element normalStyleUrl = doc.createElement("styleUrl");
normalStyleUrl.setTextContent("#normalState");
final Element highlightStyleUrl = doc.createElement("styleUrl");
highlightStyleUrl.setTextContent("#highlightState");
d.appendChild(normalStyle);
normalStyle.appendChild(iconStyle.cloneNode(true));
normalStyle.appendChild(labelStyle.cloneNode(true));
d.appendChild(highlightStyle);
highlightStyle.appendChild(iconStyle);
final Element highlightLabelStyle = doc.createElement("LabelStyle");
highlightStyle.appendChild(highlightLabelStyle);
highlightLabelStyle.appendChild(highlightScale);
highlightLabelStyle.appendChild(highlightColor);
final Element highlightLineStyle = doc.createElement("LineStyle");
highlightStyle.appendChild(highlightLineStyle);
final Element highlightLineWidth = doc.createElement("width");
highlightLineWidth.setTextContent("2");
highlightLineStyle.appendChild(highlightLineWidth);
// Element highlightLabelVis = doc.createElement("gx:labelVisibility");
// highlightLabelVis.setTextContent("1");
// highlightLineStyle.appendChild(highlightLabelVis);
d.appendChild(contourStyle);
contourStyle.appendChild(normalPair);
normalPair.appendChild(normalKey);
normalPair.appendChild(normalStyleUrl);
contourStyle.appendChild(highlightPair);
highlightPair.appendChild(highlightKey);
highlightPair.appendChild(highlightStyleUrl);
final WritableRaster wRaster = tile.createCompatibleWritableRaster();
wRaster.setRect(-tile.getMinX(), -tile.getMinY(), tile);
final GridCoverageFactory factory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools
.getDefaultHints());
final GridCoverage2D coverage = factory.create("GridCoverage", wRaster, envelope);
final ContourProcess cp = new ContourProcess();
final SimpleFeatureCollection features = cp.execute(coverage, 0, null,
Double.valueOf(interval), true, true, null, new NullProgressListener());
final SimpleFeatureIterator iterator = features.features();
try
{
while (iterator.hasNext())
{
final SimpleFeature feature = iterator.next();
final LineString geom = (LineString) feature.getDefaultGeometry();
if (geom.getLength() > MIN_CONTOUR_LENGTH)
{
final Double attrValue = (Double) feature.getAttribute("value");
final Element plm = doc.createElement("Placemark");
d.appendChild(plm);
final Element plmStyle = doc.createElement("styleUrl");
plmStyle.setTextContent("#styleMapContour");
plm.appendChild(plmStyle);
final Element plmname = doc.createElement("name");
plmname.setTextContent(String.valueOf(attrValue));
plm.appendChild(plmname);
final Element mgeom = doc.createElement("MultiGeometry");
plm.appendChild(mgeom);
final Element line = doc.createElement("LineString");
mgeom.appendChild(line);
final Element coords = doc.createElement("coordinates");
final Coordinate[] linecoords = geom.getCoordinates();
final List<String> coordList = new ArrayList<String>();
for (int i = 0; i < linecoords.length; i++)
{
final Coordinate coo = linecoords[i];
coordList.add(coo.x + "," + coo.y + "," + "0");
// Add a point label every 5000th vertice
if (i == 0 || i % 5000 == 0)
{
final Element pt = doc.createElement("Point");
mgeom.appendChild(pt);
final Element ptcoords = doc.createElement("coordinates");
ptcoords.setTextContent(coo.x + "," + coo.y + "," + "0");
pt.appendChild(ptcoords);
}
}
coords.setTextContent(StringUtils.join(coordList, "\n"));
line.appendChild(coords);
}
}
}
finally
{
iterator.close();
}
return doc;
}
protected static Document mrsPyramidToTileMapServiceXml(final String url,
final List<String> pyramidNames) throws ParserConfigurationException,
DOMException, UnsupportedEncodingException
{
/*
* String tileMapService = "<?xml version='1.0' encoding='UTF-8' ?>" +
* "<TileMapService version='1.0.0' services='http://localhost/mrgeo-services/api/tms/'>" +
* " <Title>Example Tile Map Service</Title>" +
* " <Abstract>This is a longer description of the example tiling map service.</Abstract>" +
* " <TileMaps>" + " <TileMap " + " title='AfPk Elevation V2' " +
* " srs='EPSG:4326' " + " profile='global-geodetic' " +
* " href='http:///localhost/mrgeo-services/api/tms/1.0.0/AfPkElevationV2' />" +
* " </TileMaps>" + "</TileMapService>";
*/
final DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
final DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
// root elements
final Document doc = docBuilder.newDocument();
final Element rootElement = doc.createElement("TileMapService");
doc.appendChild(rootElement);
final Attr v = doc.createAttribute("version");
v.setValue(VERSION);
rootElement.setAttributeNode(v);
final Attr service = doc.createAttribute("services");
service.setValue(normalizeUrl(normalizeUrl(url).replace(VERSION, "")));
rootElement.setAttributeNode(service);
// child elements
final Element title = doc.createElement("Title");
title.setTextContent("Tile Map Service");
rootElement.appendChild(title);
final Element abst = doc.createElement("Abstract");
abst.setTextContent("MrGeo MrsImagePyramid rasters available as TMS");
rootElement.appendChild(abst);
final Element tilesets = doc.createElement("TileMaps");
rootElement.appendChild(tilesets);
Collections.sort(pyramidNames);
for (final String p : pyramidNames)
{
final Element tileset = doc.createElement("TileMap");
tilesets.appendChild(tileset);
final Attr href = doc.createAttribute("href");
href.setValue(normalizeUrl(url) + "/" + URLEncoder.encode(p, "UTF-8"));
tileset.setAttributeNode(href);
final Attr maptitle = doc.createAttribute("title");
maptitle.setValue(p);
tileset.setAttributeNode(maptitle);
final Attr srs = doc.createAttribute("srs");
srs.setValue(SRS);
tileset.setAttributeNode(srs);
final Attr profile = doc.createAttribute("profile");
profile.setValue("global-geodetic");
tileset.setAttributeNode(profile);
}
return doc;
}
protected static String normalizeUrl(final String url)
{
String newUrl;
newUrl = (url.lastIndexOf("/") == url.length() - 1) ? url.substring(0, url.length() - 1) : url;
return newUrl;
}
protected static Document rootResourceXml(final String url) throws ParserConfigurationException
{
/*
* <?xml version="1.0" encoding="UTF-8" ?> <Services> <TileMapService
* title="MrGeo Tile Map Service" version="1.0.0"
* href="http://localhost:8080/mrgeo-services/api/tms/1.0.0" /> </Services>
*/
final DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
final DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
final Document doc = docBuilder.newDocument();
final Element rootElement = doc.createElement("Services");
doc.appendChild(rootElement);
final Element tms = doc.createElement("TileMapService");
rootElement.appendChild(tms);
final Attr title = doc.createAttribute("title");
title.setValue("MrGeo Tile Map Service");
tms.setAttributeNode(title);
final Attr v = doc.createAttribute("version");
v.setValue(VERSION);
tms.setAttributeNode(v);
final Attr href = doc.createAttribute("href");
href.setValue(normalizeUrl(url) + "/" + VERSION);
tms.setAttributeNode(href);
return doc;
}
@GET
@Produces("text/xml")
@Path("/")
public Response getRootResource(@Context final HttpServletRequest hsr)
{
try
{
final String url = hsr.getRequestURL().toString();
final Document doc = rootResourceXml(url);
final DOMSource source = new DOMSource(doc);
return Response.ok(source, "text/xml").header("Content-type", "text/xml").build();
}
catch (final ParserConfigurationException ex)
{
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(GENERAL_ERROR).build();
}
}
// @GET
// @Produces("image/*")
// @Path("old/{version}/{raster}/{z}/{x}/{y}.{format}")
// public Response oldGetTile(@PathParam("version") final String version,
// @PathParam("raster") String raster, @PathParam("z") final Integer z,
// @PathParam("x") final Integer x, @PathParam("y") final Integer y,
// @PathParam("format") final String format,
// @QueryParam("color-scale-name") final String colorScaleName,
// @QueryParam("color-scale") final String colorScale, @QueryParam("min") final Double min,
// @QueryParam("max") final Double max,
// @DefaultValue("1") @QueryParam("maskMax") final Double maskMax,
// @QueryParam("mask") final String mask)
// {
//
// try
// {
// // check the request format
// final int formatHash = format.hashCode();
// ImageWriter writer;
// switch (formatHash)
// {
// case 114833:
// writer = ImageUtils.createImageWriter("image/tiff");
// break;
// case 111145:
// writer = ImageUtils.createImageWriter("image/png");
// break;
// case 105441:
// writer = ImageUtils.createImageWriter("image/jpeg");
// break;
// default:
// return Response.status(Status.BAD_REQUEST).entity("Unsupported image format - " + format)
// .build();
// }
//
// if (Base64.isBase64(raster))
// {
// raster = new String(Base64.decode(raster));
// }
//
// // Check cache for metadata, if not found read from pyramid
// // and store in cache
// MrsImagePyramidMetadata mpm;
// try
// {
// mpm = service.getMetadata(raster);
// final ImageMetadata zlm = mpm.getImageMetadata()[z];
// if (zlm.tileBounds == null)
// {
// return Response.status(Status.NOT_FOUND).entity("Tile not found").build();
// }
// if (!zlm.tileBounds.contains(x, y))
// {
// return returnEmptyTile(writer, mpm.getTilesize(), mpm.getTilesize(), format);
// }
// }
// catch (final ExecutionException e)
// {
// return Response.status(Status.NOT_FOUND).entity("Tile map not found - " + raster).build();
// }
//
// OpImageRegistrar.registerMrGeoOps();
//
// // FIXME: Will we ever have to support multiband images here?
// double[] extrema = new double[2];
// final ImageStats stats = mpm.getStats(0);
//
// if (min != null && max != null)
// {
// extrema[0] = min;
// extrema[1] = max;
// }
// else if (stats != null)
// {
// extrema[0] = stats.min;
// extrema[1] = stats.max;
//
// // Check for min/max override values from the request
// if (min != null)
// {
// extrema[0] = min;
// }
// if (max != null)
// {
// extrema[1] = max;
// }
// }
// else
// {
// extrema = null;
// }
//
// final double transparentValue = mpm.getDefaultValue(0);
//
// ColorScale cs = null;
// try
// {
// cs = service.getColorScale(colorScale, colorScaleName, raster, new ColorScaleInfo.Builder()
// .transparent(transparentValue).extrema(extrema));
// }
// catch (final Exception te)
// {
// return Response.status(Status.NOT_FOUND).type("text/plain").entity(te.getMessage()).build();
// }
//
// // load the pyramid, and get the TMS tile if it exists
// // or else return blank image
// final MrsImagePyramid mp = service.getPyramid(raster);
// final MrsImage zImage = mp.getImage(z);
// // If the zImage is null that means the pyramid level does not exist
// if (zImage == null)
// {
// return Response.status(Status.NOT_FOUND).build();
// }
// final int width = zImage.getTileWidth();
// final int height = zImage.getTileHeight();
//
// BufferedImage bufImg;
// if (zImage.isTileEmpty(x, y))
// {
// return returnEmptyTile(writer, width, height, format);
// }
//
// try
// {
// final Raster xyTile = zImage.getTile(x, y);
//
// final WritableRaster wr = Raster.createWritableRaster(xyTile.getSampleModel(), xyTile
// .getDataBuffer(), null);
// final ColorModel cm = RasterUtils.createColorModel(xyTile);
// bufImg = new BufferedImage(cm, wr, false, null);
//
// final ByteArrayOutputStream baos = new ByteArrayOutputStream();
//
// // Apply mask if requested
// if (mask != null && !mask.isEmpty())
// {
// final int b = 0;
// final MrsImagePyramidMetadata maskMpm = service.getMetadata(mask);
//
// final MrsImagePyramid maskMp = service.getPyramid(mask);
// final MrsImage maskZImage = maskMp.getImage(z);
// if (maskZImage != null)
// {
// try
// {
// final Raster maskXyTile = maskZImage.getTile(x, y);
// for (int w = 0; w < maskXyTile.getWidth(); w++)
// {
// for (int h = 0; h < maskXyTile.getHeight(); h++)
// {
// final double maskPixel = maskXyTile.getSampleDouble(w, h, b);
// if (maskPixel > maskMax ||
// Double.compare(maskPixel, maskMpm.getDefaultValue(b)) == 0)
// {
// wr.setSample(w, h, b, mpm.getDefaultValue(b));
// }
// }
// }
// }
// catch (final TileNotFoundException ex)
// {
// // If not mask tile exists, a blank tile should be returned
// return returnEmptyTile(writer, width, height, format);
// }
// finally
// {
// maskZImage.close();
// }
// }
// }
//
// // return a non-colormapped geotiff for tiff format
// switch (formatHash)
// {
// case 114833:
// final Bounds bnds = new Bounds(TMSUtils.tileBoundsArray(x, y, z, width));
// GeoTiffExporter.export(bufImg, bnds, baos, false, null, mpm.getDefaultValue(0));
// break;
// case 111145:
// final MemoryCacheImageOutputStream imageStreamPng = new MemoryCacheImageOutputStream(baos);
// writer.setOutput(imageStreamPng);
// // Assume 3 band images are RGB and should NOT have a color scale applied
// if (mpm.getBands() == 3)
// {
// // Read nodata from metadata and assign to color
// final int[] defaultValues = mpm.getDefaultValuesInt();
// final Color nodataColor = new Color(defaultValues[0], defaultValues[1],
// defaultValues[2]);
//
// writer.write(ImageUtils.imageToRgbaBufferedImage(ImageUtils.makeColorTransparent(
// bufImg, nodataColor)));
// }
// else
// {
// // writer.write(ImageUtils.bufferRenderedImage(
// // ColorScaleApplier.applyToRgba(bufImg, cs)
// // ));
// new PngImageResponseWriter().writeToStream(org.mrgeo.rasterops.ColorScaleApplier
// .applyToRgba(bufImg, cs).getData(), baos);
// }
// imageStreamPng.close();
// break;
// case 105441:
// final MemoryCacheImageOutputStream imageStreamJpg = new MemoryCacheImageOutputStream(baos);
// writer.setOutput(imageStreamJpg);
// // Assume 3 band images are RGB and should NOT have a color scale applied
// if (mpm.getBands() == 3)
// {
// writer.write(bufImg);
// }
// else
// {
// writer.write(org.mrgeo.rasterops.ColorScaleApplier.applyToRgb(bufImg, cs));
// }
// imageStreamJpg.close();
// break;
// default:
// return Response.status(Status.BAD_REQUEST).entity("Unsupported image format - " + format)
// .build();
// }
// final byte[] imageData = baos.toByteArray();
// baos.close();
//
// final String type = mimeTypeMap.getContentType("output." + format);
// return Response.ok(imageData).header("Content-Type", type).build();
// }
// catch (final TileNotFoundException ex)
// {
// // requested tile is outside of image bounds,
// // return a blank tile
// return returnEmptyTile(writer, width, height, format);
// }
// finally
// {
// zImage.close();
// }
// }
// catch (final Exception e)
// {
// log.error("Exception occurred getting tile " + raster + "/" + z + "/" + x + "/" + y + "." +
// format, e);
// return Response.status(Status.INTERNAL_SERVER_ERROR).entity(GENERAL_ERROR).build();
// }
// }
@SuppressWarnings("static-method")
@GET
@Produces("image/*")
@Path("{version}/{raster}/{z}/{x}/{y}.{format}")
public Response getTile(@PathParam("version") final String version,
@PathParam("raster") String pyramid, @PathParam("z") final Integer z,
@PathParam("x") final Integer x, @PathParam("y") final Integer y,
@PathParam("format") final String format,
@QueryParam("color-scale-name") final String colorScaleName,
@QueryParam("color-scale") final String colorScale, @QueryParam("min") final Double min,
@QueryParam("max") final Double max,
@DefaultValue("1") @QueryParam("maskMax") final Double maskMax,
@QueryParam("mask") final String mask)
{
final ImageRenderer renderer;
Raster raster;
try
{
renderer = (ImageRenderer) ImageHandlerFactory.getHandler(format, ImageRenderer.class);
// TODO: Need to construct provider properties from the WebRequest using
// a new security layer and pass those properties.
// Apply mask if requested
Properties providerProperties = SecurityUtils.getProviderProperties();
if (mask != null && !mask.isEmpty())
{
raster = renderer.renderImage(pyramid, x, y, z, mask, maskMax, providerProperties);
}
else
{
raster = renderer.renderImage(pyramid, x, y, z, providerProperties);
}
if (!(renderer instanceof TiffImageRenderer) && raster.getNumBands() != 3 &&
raster.getNumBands() != 4)
{
ColorScale cs = null;
if (colorScaleName != null)
{
cs = ColorScaleManager.fromName(colorScaleName, props);
}
else if (colorScale != null)
{
cs = ColorScaleManager.fromJSON(colorScale);
}
// else
// {
// cs = ColorScaleManager.fromPyramid(pyramid, driver);
// }
final double[] extrema = renderer.getExtrema();
// Check for min/max override values from the request
if (min != null)
{
extrema[0] = min;
}
if (max != null)
{
extrema[1] = max;
}
raster = ((ColorScaleApplier) ImageHandlerFactory.getHandler(format,
ColorScaleApplier.class)).applyColorScale(raster, cs, extrema, renderer
.getDefaultValues());
}
// Apply mask if requested
// if (mask != null && !mask.isEmpty())
// {
// try
// {
// final MrsImagePyramidMetadata maskMetadata = service.getMetadata(mask);
//
// final Raster maskRaster = renderer.renderImage(mask, x, y, z, props, driver);
// final WritableRaster wr = RasterUtils.makeRasterWritable(raster);
//
// final int band = 0;
// final double nodata = maskMetadata.getDefaultValue(band);
//
// for (int w = 0; w < maskRaster.getWidth(); w++)
// {
// for (int h = 0; h < maskRaster.getHeight(); h++)
// {
// final double maskPixel = maskRaster.getSampleDouble(w, h, band);
// if (maskPixel > maskMax || Double.compare(maskPixel, nodata) == 0)
// {
// wr.setSample(w, h, band, nodata);
// }
// }
// }
// }
// catch (final TileNotFoundException ex)
// {
// raster = RasterUtils.createEmptyRaster(raster.getWidth(), raster.getHeight(), raster
// .getNumBands(), raster.getTransferType(), 0);
// }
// }
return ((ImageResponseWriter) ImageHandlerFactory.getHandler(format,
ImageResponseWriter.class)).write(raster, renderer.getDefaultValues());
}
catch (final IllegalArgumentException e)
{
return Response.status(Status.BAD_REQUEST).entity("Unsupported image format - " + format)
.build();
}
catch (final IOException e)
{
return Response.status(Status.NOT_FOUND).entity("Tile map not found - " + pyramid).build();
}
catch (final MrsImageException e)
{
return Response.status(Status.NOT_FOUND).entity("Tile map not found - " + pyramid + ": " + z)
.build();
}
catch (final TileNotFoundException e)
{
// return Response.status(Status.NOT_FOUND).entity("Tile not found").build();
try
{
final MrsImagePyramidMetadata metadata = service.getMetadata(pyramid);
return createEmptyTile(((ImageResponseWriter) ImageHandlerFactory.getHandler(format,
ImageResponseWriter.class)), metadata.getTilesize(), metadata.getTilesize());
}
catch (final Exception e1)
{
log.error("Exception occurred creating blank tile " + pyramid + "/" + z + "/" + x + "/" +
y + "." + format, e1);
}
}
catch (final ColorScale.BadJSONException e)
{
return Response.status(Status.NOT_FOUND).entity("Unable to parse color scale JSON").build();
}
catch (final ColorScale.BadSourceException e)
{
return Response.status(Status.NOT_FOUND).entity("Unable to open color scale file").build();
}
catch (final ColorScale.BadXMLException e)
{
return Response.status(Status.NOT_FOUND).entity("Unable to parse color scale XML").build();
}
catch (final ColorScale.ColorScaleException e)
{
return Response.status(Status.NOT_FOUND).entity("Unable to open color scale").build();
}
catch (final Exception e)
{
log.error("Exception occurred getting tile " + pyramid + "/" + z + "/" + x + "/" + y + "." +
format, e);
}
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(GENERAL_ERROR).build();
}
@GET
@Produces("text/xml")
@Path("/{version}/{raster}/{z}/{x}/{y}/{interval}.kml")
public Response getTileContours(@PathParam("version") final String version,
@PathParam("raster") String raster, @PathParam("z") final Integer z,
@PathParam("x") final Integer x, @PathParam("y") final Integer y,
@PathParam("interval") final Double interval)
{
try
{
// Check cache for metadata, if not found read from pyramid
// and store in cache
MrsImagePyramidMetadata mpm;
try
{
mpm = service.getMetadata(raster);
// If not close enough to max zoom level, bail out
if (mpm.getMaxZoomLevel() != z)
{
return Response.ok("<kml/>").header("Content-type", "text/xml").build();
}
final ImageMetadata zlm = mpm.getImageMetadata()[z];
if (zlm.tileBounds == null)
{
return Response.status(Status.NOT_FOUND).entity("Tile not found").build();
}
if (!zlm.tileBounds.contains(x, y))
{
return Response.status(Status.NOT_FOUND).entity("Tile is empty").build();
}
}
catch (final ExecutionException e)
{
return Response.status(Status.NOT_FOUND).entity("Tile map not found - " + raster).build();
}
OpImageRegistrar.registerMrGeoOps();
// load the pyramid, and get the TMS tile if it exists
// or else return blank image
final MrsImagePyramid mp = service.getPyramid(raster);
final MrsImage zImage = mp.getImage(z);
// If the zImage is null that means the pyramid level does not exist
if (zImage == null)
{
return Response.status(Status.NOT_FOUND).build();
}
if (zImage.isTileEmpty(x, y))
{
return Response.status(Status.NOT_FOUND).entity("Tile is empty").build();
}
try
{
final Raster xyTile = zImage.getTile(x, y);
final Bounds tileBounds = TMSUtils.tileBounds(x, y, z, mpm.getTilesize())
.convertNewToOldBounds();
final Envelope2D envelope = new Envelope2D();
envelope.setRect(tileBounds.toRectangle2D());
final Document kml = mrsPyramidTileToContourKml(xyTile, envelope, interval);
// return Response.ok(kml, new
// MimetypesFileTypeMap().getContentType("kml")).header("Content-type",
// KmlService.KML_MIME_TYPE).build();
return Response.ok(kml).header("Content-type", "text/xml").build();
}
catch (final IllegalArgumentException ex)
{
// requested tile is outside of image bounds,
// return a blank tile
return Response.status(Status.NOT_FOUND).entity("Tile is empty").build();
}
finally
{
zImage.close();
}
}
catch (final Exception e)
{
log.error("Exception occurred getting tile " + raster + "/" + z + "/" + x + "/" + y + ".kml",
e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(GENERAL_ERROR).build();
}
}
@GET
@Produces("text/xml")
@Path("/{version}/{raster}")
public Response getTileMap(@PathParam("version") final String version,
@PathParam("raster") String raster, @Context final HttpServletRequest hsr)
{
try
{
final String url = hsr.getRequestURL().toString();
// Check cache for metadata, if not found read from pyramid
// and store in cache
final MrsImagePyramidMetadata mpm = service.getMetadata(raster);
final Document doc = mrsPyramidMetadataToTileMapXml(raster, url, mpm);
final DOMSource source = new DOMSource(doc);
return Response.ok(source, "text/xml").header("Content-type", "text/xml").build();
}
catch (final ExecutionException e)
{
log.error("MrsImagePyramid " + raster + " not found", e);
return Response.status(Status.NOT_FOUND).entity("Tile map not found - " + raster).build();
}
catch (final ParserConfigurationException ex)
{
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(GENERAL_ERROR).build();
}
}
@GET
@Produces("text/xml")
@Path("/{version}")
public Response getTileMapService(@PathParam("version") final String version,
@Context final HttpServletRequest hsr)
{
try
{
final String url = hsr.getRequestURL().toString();
final Document doc = mrsPyramidToTileMapServiceXml(url, service.listImages());
final DOMSource source = new DOMSource(doc);
return Response.ok(source, "text/xml").header("Content-type", "text/xml").build();
}
catch (final IOException e)
{
log.error("File system exception for " + imageBaseDir, e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(GENERAL_ERROR).build();
}
catch (final ParserConfigurationException ex)
{
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(GENERAL_ERROR).build();
}
}
protected Response returnEmptyTile(final ImageWriter writer, final int width, final int height,
final String format) throws IOException
{
// return an empty image
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
int dataType;
if (format.equalsIgnoreCase("jpg"))
{
dataType = BufferedImage.TYPE_3BYTE_BGR;
}
else
{
// dataType = BufferedImage.TYPE_INT_ARGB;
dataType = BufferedImage.TYPE_4BYTE_ABGR;
}
final BufferedImage bufImg = new BufferedImage(width, height, dataType);
final Graphics2D g = bufImg.createGraphics();
g.setColor(new Color(0, 0, 0, 0));
g.fillRect(0, 0, width, height);
g.dispose();
final MemoryCacheImageOutputStream imageStream = new MemoryCacheImageOutputStream(baos);
writer.setOutput(imageStream);
writer.write(bufImg);
imageStream.close();
final byte[] imageData = baos.toByteArray();
baos.close();
final String type = mimeTypeMap.getContentType("output." + format);
return Response.ok(imageData).header("Content-Type", type).build();
// A 404 - Not Found response may be the most appropriate, but results in pink tiles,
// maybe change that behavior on the OpenLayers client?
// return Response.status( Response.Status.NOT_FOUND).build();
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.websocket;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration;
import org.springframework.boot.autoconfigure.test.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.web.DispatcherServletAutoConfiguration;
import org.springframework.boot.autoconfigure.web.EmbeddedServletContainerAutoConfiguration;
import org.springframework.boot.autoconfigure.web.ServerPropertiesAutoConfiguration;
import org.springframework.boot.context.embedded.AnnotationConfigEmbeddedWebApplicationContext;
import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.context.web.ServerPortInfoApplicationContextInitializer;
import org.springframework.boot.test.EnvironmentTestUtils;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.messaging.converter.SimpleMessageConverter;
import org.springframework.messaging.simp.annotation.SubscribeMapping;
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
import org.springframework.messaging.simp.stomp.StompCommand;
import org.springframework.messaging.simp.stomp.StompFrameHandler;
import org.springframework.messaging.simp.stomp.StompHeaders;
import org.springframework.messaging.simp.stomp.StompSession;
import org.springframework.messaging.simp.stomp.StompSessionHandler;
import org.springframework.messaging.simp.stomp.StompSessionHandlerAdapter;
import org.springframework.stereotype.Controller;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.socket.client.standard.StandardWebSocketClient;
import org.springframework.web.socket.config.annotation.AbstractWebSocketMessageBrokerConfigurer;
import org.springframework.web.socket.config.annotation.EnableWebSocket;
import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker;
import org.springframework.web.socket.config.annotation.StompEndpointRegistry;
import org.springframework.web.socket.messaging.WebSocketStompClient;
import org.springframework.web.socket.sockjs.client.RestTemplateXhrTransport;
import org.springframework.web.socket.sockjs.client.SockJsClient;
import org.springframework.web.socket.sockjs.client.Transport;
import org.springframework.web.socket.sockjs.client.WebSocketTransport;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
/**
* Tests for {@link WebSocketMessagingAutoConfiguration}.
*
* @author Andy Wilkinson
*/
public class WebSocketMessagingAutoConfigurationTests {
private AnnotationConfigEmbeddedWebApplicationContext context = new AnnotationConfigEmbeddedWebApplicationContext();
private SockJsClient sockJsClient;
@Before
public void setup() {
List<Transport> transports = Arrays.asList(
new WebSocketTransport(new StandardWebSocketClient()),
new RestTemplateXhrTransport(new RestTemplate()));
this.sockJsClient = new SockJsClient(transports);
}
@After
public void tearDown() {
this.context.close();
this.sockJsClient.stop();
}
@Test
public void basicMessagingWithJson() throws Throwable {
EnvironmentTestUtils.addEnvironment(this.context, "server.port:0",
"spring.jackson.serialization.indent-output:true");
this.context.register(WebSocketMessagingConfiguration.class);
new ServerPortInfoApplicationContextInitializer().initialize(this.context);
this.context.refresh();
WebSocketStompClient stompClient = new WebSocketStompClient(this.sockJsClient);
final AtomicReference<Throwable> failure = new AtomicReference<Throwable>();
final AtomicReference<Object> result = new AtomicReference<Object>();
final CountDownLatch latch = new CountDownLatch(1);
StompSessionHandler handler = new StompSessionHandlerAdapter() {
@Override
public void afterConnected(StompSession session,
StompHeaders connectedHeaders) {
session.subscribe("/app/data", new StompFrameHandler() {
@Override
public void handleFrame(StompHeaders headers, Object payload) {
result.set(payload);
latch.countDown();
}
@Override
public Type getPayloadType(StompHeaders headers) {
return Object.class;
}
});
}
@Override
public void handleFrame(StompHeaders headers, Object payload) {
latch.countDown();
}
@Override
public void handleException(StompSession session, StompCommand command,
StompHeaders headers, byte[] payload, Throwable exception) {
failure.set(exception);
latch.countDown();
}
@Override
public void handleTransportError(StompSession session, Throwable exception) {
failure.set(exception);
latch.countDown();
}
};
stompClient.setMessageConverter(new SimpleMessageConverter());
stompClient.connect("ws://localhost:{port}/messaging", handler,
this.context.getEnvironment().getProperty("local.server.port"));
if (!latch.await(30, TimeUnit.SECONDS)) {
if (failure.get() != null) {
throw failure.get();
}
fail("Response was not received within 30 seconds");
}
assertThat(new String((byte[]) result.get()))
.isEqualTo(String.format("{%n \"foo\" : 5,%n \"bar\" : \"baz\"%n}"));
}
@Configuration
@EnableWebSocket
@EnableConfigurationProperties
@EnableWebSocketMessageBroker
@ImportAutoConfiguration({ JacksonAutoConfiguration.class,
EmbeddedServletContainerAutoConfiguration.class,
ServerPropertiesAutoConfiguration.class,
WebSocketMessagingAutoConfiguration.class,
DispatcherServletAutoConfiguration.class })
static class WebSocketMessagingConfiguration
extends AbstractWebSocketMessageBrokerConfigurer {
@Override
public void registerStompEndpoints(StompEndpointRegistry registry) {
registry.addEndpoint("/messaging").withSockJS();
}
@Override
public void configureMessageBroker(MessageBrokerRegistry registry) {
registry.setApplicationDestinationPrefixes("/app");
}
@Bean
public MessagingController messagingController() {
return new MessagingController();
}
@Bean
public TomcatEmbeddedServletContainerFactory tomcat() {
return new TomcatEmbeddedServletContainerFactory(0);
}
@Bean
public TomcatWebSocketContainerCustomizer tomcatCustomizer() {
return new TomcatWebSocketContainerCustomizer();
}
}
@Controller
static class MessagingController {
@SubscribeMapping("/data")
Data getData() {
return new Data(5, "baz");
}
}
static class Data {
private int foo;
private String bar;
Data(int foo, String bar) {
this.foo = foo;
this.bar = bar;
}
public int getFoo() {
return this.foo;
}
public String getBar() {
return this.bar;
}
}
}
| |
/**
* LICENSE + COPYRIGHT
*/
package org.emergent.bzr4j.core.xmloutput;
import org.emergent.bzr4j.core.BazaarException;
import org.emergent.bzr4j.core.BazaarItemKind;
import org.emergent.bzr4j.core.xmloutput.XmlBazaarStatus;
import org.emergent.bzr4j.core.BazaarStatusType;
import org.emergent.bzr4j.core.IBazaarLogMessage;
import org.emergent.bzr4j.core.IBazaarStatus;
import org.emergent.bzr4j.core.utils.StringUtil;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* I'm a parser for the output generated by th command: status --xml (usign
* bzr-xmloutput plugin), and with that output I create a
* Set<IBazaarStatus>
*
* @author Guillermo Gonzalez
*/
class XmlStatusParser extends XmlAbstractParser implements XmlStatusResult {
private final List<IBazaarStatus> statuses = new ArrayList<IBazaarStatus>(0);
private List<IBazaarLogMessage> pendingMerges = null;
private Set<IBazaarStatus> statusSet = null;
private final static String ADDED = "added";
private final static String REMOVED = "removed";
private final static String RENAMED = "renamed";
private final static String MODIFIED = "modified";
private final static String KIND_CHANGED = "kind_changed";
private final static String UNKNOWN = "unknown";
private final static String FILE = "file";
private final static String DIR = "directory";
private final static String OLDKIND = "oldkind";
private final static String NEWKIND = "newkind";
private final static String OLDPATH = "oldpath";
private final static String FID = "fid";
private final static String SUFFIX = "suffix";
private final static String STATUS = "status";
private final static String BRANCH_ROOT = "workingtree_root";
private final static String CONFLICTS = "conflicts";
private final static String CONFLICT = "conflict";
private final static String TYPE = "type";
private final static String PENDING_MERGES = "pending_merges";
public void parse(final String xml) throws BazaarException {
try {
XMLInputFactory factory = XMLInputFactory.newInstance();
parser = factory.createXMLStreamReader(new StringReader(xml));
int eventType = parser.getEventType();
while (eventType != XMLStreamConstants.END_DOCUMENT) {
if (eventType == XMLStreamConstants.START_ELEMENT && isGroup(parser.getLocalName())) {
parseGroup();
} else if (eventType == XMLStreamConstants.START_ELEMENT && STATUS.equals(parser.getLocalName())) {
workDir = new File(parser.getAttributeValue(null, BRANCH_ROOT));
} else if (eventType == XMLStreamConstants.START_ELEMENT && PENDING_MERGES.equals(parser.getLocalName())) {
parsePendingMerges();
}
eventType = parser.next();
}
}
catch (XMLStreamException e) {
throw new BazaarException(e);
}
catch (IOException e) {
throw new BazaarException(e);
}
statusSet = orderAndCleanup();
}
public Set<IBazaarStatus> getStatusSet() {
return statusSet;
}
public List<IBazaarLogMessage> getPendingMerges() {
if (pendingMerges == null) {
pendingMerges = Collections.EMPTY_LIST;
}
return pendingMerges;
}
private void parsePendingMerges() throws BazaarException {
LOG.debug("Parsing pending merges");
try {
pendingMerges = new XmlLogParser().parse(parser);
}
catch (Exception e) {
LOG.error(e, "unexpected error while parsing pending merges: ");
pendingMerges = null;
}
if (pendingMerges == null) {
pendingMerges = new ArrayList<IBazaarLogMessage>(0);
}
}
private boolean isGroup(String name) {
return (name.equals(ADDED) || name.equals(REMOVED) || name.equals(RENAMED)
|| name.equals(MODIFIED) || name.equals(KIND_CHANGED)
|| name.equals(UNKNOWN)) || name.equals(CONFLICTS);
}
private void parseGroup() throws XMLStreamException, IOException {
String group = parser.getLocalName();
LOG.debug("Parsing status group: " + group);
int eventType = parser.next();
while (eventType != XMLStreamConstants.END_DOCUMENT) {
if (eventType == XMLStreamConstants.START_ELEMENT && CONFLICT.equals(parser.getLocalName())) {
final String type = parser.getAttributeValue(null, TYPE);
final String path = StringUtil.nullSafeTrim(parser.getElementText());
if ((path.endsWith(".BASE") || path.endsWith(".THIS") || path.endsWith(".OTHER"))) {
eventType = parser.next();
continue;
}
// TODO: add conflict type enum
XmlBazaarStatus.Builder builder = new XmlBazaarStatus.Builder();
builder.setBranchRoot(workDir)
.setPath(getAsFile(path))
.setKind(BazaarItemKind.conflict)
.addStatusTypes(getStatusType(group));
final IBazaarStatus status = builder.createBazaarStatus();
statuses.add(status);
} else if (eventType == XMLStreamConstants.START_ELEMENT &&
(FILE.equals(parser.getLocalName()) || DIR.equals(parser.getLocalName()))) {
String path, prevPath;
path = prevPath = null;
BazaarItemKind newKind;
BazaarItemKind oldKind = null;
if (FILE.equals(parser.getLocalName())) {
newKind = BazaarItemKind.file;
} else {
newKind = BazaarItemKind.directory;
}
for (int i = 0; i < parser.getAttributeCount(); i++) {
if (parser.getAttributeLocalName(i).equals(OLDKIND)) {
oldKind = BazaarItemKind.valueOf(StringUtil.nullSafeTrim(parser.getAttributeValue(i)));
} else if (parser.getAttributeLocalName(i).equals(OLDPATH)) {
prevPath = StringUtil.nullSafeTrim(parser.getAttributeValue(i));
} else if (parser.getAttributeLocalName(i).equals(FID)) {
// do nothing (for the moment)
} else if (parser.getAttributeLocalName(i).equals(SUFFIX)) {
// do nothing (for the moment)
}
}
path = StringUtil.nullSafeTrim(parser.getElementText());
if (path.endsWith(".BASE") || path.endsWith(".THIS") || path.endsWith(".OTHER")) {
eventType = parser.next();
continue;
}
XmlBazaarStatus.Builder builder = new XmlBazaarStatus.Builder();
builder.setBranchRoot(workDir)
.setPath(getAsFile(path))
.setOldPath(getAsFile(prevPath))
.setKind(newKind)
.setOldKind(oldKind)
.addStatusTypes(getStatusType(group));
final IBazaarStatus status = builder.createBazaarStatus();
statuses.add(status);
} else if (eventType == XMLStreamConstants.END_ELEMENT && group.equals(parser.getLocalName())) {
return;
}
eventType = parser.next();
}
}
private static BazaarStatusType getStatusType(final String group) {
if (group.equals(ADDED)) {
return BazaarStatusType.CREATED;
} else if (group.equals(REMOVED)) {
return BazaarStatusType.DELETED;
} else if (group.equals(RENAMED)) {
return BazaarStatusType.RENAMED;
} else if (group.equals(MODIFIED)) {
return BazaarStatusType.MODIFIED;
} else if (group.equals(KIND_CHANGED)) {
return BazaarStatusType.KIND_CHANGED;
} else if (group.equals(UNKNOWN)) {
return BazaarStatusType.UNKNOWN;
} else if (group.equals(CONFLICTS)) {
return BazaarStatusType.HAS_CONFLICTS;
}
return null;
}
private static File getAsFile(final String relativePathTofile) {
if (relativePathTofile != null && !"".equals(relativePathTofile)) {
return new File(relativePathTofile);
}
return null;
}
private Set<IBazaarStatus> orderAndCleanup() {
return XmlBazaarStatus.orderAndCleanup(statuses);
}
public List<IBazaarStatus> parseForLog(final XMLStreamReader logParser, final String endTag)
throws XMLStreamException, IOException {
statuses.clear();
parser = logParser;
int eventType = parser.next();
while ((!endTag.equals(parser.getLocalName()) && eventType != XMLStreamConstants.END_ELEMENT)) {
if (eventType == XMLStreamConstants.START_ELEMENT && isGroup(parser.getLocalName())) {
parseGroup();
}
eventType = parser.next();
}
final Set<IBazaarStatus> mergedSet = orderAndCleanup();
return Arrays.asList(mergedSet.toArray(new IBazaarStatus[mergedSet.size()]));
}
}
| |
package com.refinedmods.refinedstorage.api.network.grid;
import com.refinedmods.refinedstorage.api.network.grid.handler.IFluidGridHandler;
import com.refinedmods.refinedstorage.api.network.grid.handler.IItemGridHandler;
import com.refinedmods.refinedstorage.api.storage.cache.IStorageCache;
import com.refinedmods.refinedstorage.api.storage.cache.IStorageCacheListener;
import com.refinedmods.refinedstorage.api.util.IFilter;
import com.refinedmods.refinedstorage.api.util.IStackList;
import net.minecraft.network.chat.Component;
import net.minecraft.server.level.ServerPlayer;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.inventory.CraftingContainer;
import net.minecraft.world.inventory.ResultContainer;
import net.minecraft.world.item.ItemStack;
import net.minecraftforge.items.IItemHandlerModifiable;
import javax.annotation.Nullable;
import java.util.List;
/**
* Represents a grid.
*/
public interface IGrid {
int TABS_PER_PAGE = 5;
int SORTING_DIRECTION_ASCENDING = 0;
int SORTING_DIRECTION_DESCENDING = 1;
int SORTING_TYPE_QUANTITY = 0;
int SORTING_TYPE_NAME = 1;
int SORTING_TYPE_ID = 2;
int SORTING_TYPE_INVENTORYTWEAKS = 3;
int SORTING_TYPE_LAST_MODIFIED = 4;
int SEARCH_BOX_MODE_NORMAL = 0;
int SEARCH_BOX_MODE_NORMAL_AUTOSELECTED = 1;
int SEARCH_BOX_MODE_JEI_SYNCHRONIZED = 2;
int SEARCH_BOX_MODE_JEI_SYNCHRONIZED_AUTOSELECTED = 3;
int SEARCH_BOX_MODE_JEI_SYNCHRONIZED_2WAY = 4;
int SEARCH_BOX_MODE_JEI_SYNCHRONIZED_2WAY_AUTOSELECTED = 5;
int VIEW_TYPE_NORMAL = 0;
int VIEW_TYPE_NON_CRAFTABLES = 1;
int VIEW_TYPE_CRAFTABLES = 2;
int SIZE_STRETCH = 0;
int SIZE_SMALL = 1;
int SIZE_MEDIUM = 2;
int SIZE_LARGE = 3;
static boolean isValidViewType(int type) {
return type == VIEW_TYPE_NORMAL ||
type == VIEW_TYPE_CRAFTABLES ||
type == VIEW_TYPE_NON_CRAFTABLES;
}
static boolean isValidSearchBoxMode(int mode) {
return mode == SEARCH_BOX_MODE_NORMAL ||
mode == SEARCH_BOX_MODE_NORMAL_AUTOSELECTED ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_AUTOSELECTED ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_2WAY ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_2WAY_AUTOSELECTED;
}
static boolean isSearchBoxModeWithAutoselection(int mode) {
return mode == SEARCH_BOX_MODE_NORMAL_AUTOSELECTED ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_AUTOSELECTED ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_2WAY_AUTOSELECTED;
}
static boolean doesSearchBoxModeUseJEI(int mode) {
return mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_AUTOSELECTED ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_2WAY ||
mode == SEARCH_BOX_MODE_JEI_SYNCHRONIZED_2WAY_AUTOSELECTED;
}
static boolean isValidSortingType(int type) {
return type == SORTING_TYPE_QUANTITY ||
type == SORTING_TYPE_NAME ||
type == SORTING_TYPE_ID ||
type == SORTING_TYPE_INVENTORYTWEAKS ||
type == SORTING_TYPE_LAST_MODIFIED;
}
static boolean isValidSortingDirection(int direction) {
return direction == SORTING_DIRECTION_ASCENDING || direction == SORTING_DIRECTION_DESCENDING;
}
static boolean isValidSize(int size) {
return size == SIZE_STRETCH ||
size == SIZE_SMALL ||
size == SIZE_MEDIUM ||
size == SIZE_LARGE;
}
/**
* @return the grid type
*/
GridType getGridType();
/**
* @param player the player to create a listener for
* @return a listener for this grid, will be attached to the storage cache in {@link #getStorageCache()}
*/
IStorageCacheListener createListener(ServerPlayer player);
/**
* @return the storage cache for this grid, or null if this grid is unavailable
*/
@Nullable
IStorageCache getStorageCache();
/**
* @return the item grid handler, or null if there is no handler available
*/
@Nullable
IItemGridHandler getItemHandler();
/**
* @return the fluid grid handler, or null if there is no handler available
*/
@Nullable
IFluidGridHandler getFluidHandler();
/**
* @param listener the listener
*/
default void addCraftingListener(ICraftingGridListener listener) {
}
/**
* @param listener the listener
*/
default void removeCraftingListener(ICraftingGridListener listener) {
}
/**
* @return the title
*/
Component getTitle();
/**
* @return the view type
*/
int getViewType();
/**
* @return the sorting type
*/
int getSortingType();
/**
* @return the sorting direction
*/
int getSortingDirection();
/**
* @return the search box mode
*/
int getSearchBoxMode();
/**
* @return the current tab that is selected
*/
int getTabSelected();
/**
* @return the current page that the tab is on
*/
int getTabPage();
/**
* @return the total amount of tab pages
*/
int getTotalTabPages();
/**
* @return the size mode
*/
int getSize();
/**
* @param type the new view type
*/
void onViewTypeChanged(int type);
/**
* @param type the new sorting type
*/
void onSortingTypeChanged(int type);
/**
* @param direction the new direction
*/
void onSortingDirectionChanged(int direction);
/**
* @param searchBoxMode the new search box mode
*/
void onSearchBoxModeChanged(int searchBoxMode);
/**
* @param size the new size mode
*/
void onSizeChanged(int size);
/**
* @param tab the new selected tab
*/
void onTabSelectionChanged(int tab);
/**
* @param page the new selected page
*/
void onTabPageChanged(int page);
/**
* @return the filters
*/
List<IFilter> getFilters();
/**
* @return the tabs
*/
List<IGridTab> getTabs();
/**
* @return the inventory of the filters
*/
IItemHandlerModifiable getFilter();
/**
* @return the crafting matrix, or null if not a crafting grid
*/
@Nullable
CraftingContainer getCraftingMatrix();
/**
* @return the crafting result inventory, or null if not a crafting grid
*/
@Nullable
ResultContainer getCraftingResult();
/**
* Called when the crafting matrix changes.
*/
void onCraftingMatrixChanged();
/**
* Called when an item is crafted in a crafting grid.
*
* @param player the player that crafted the item
* @param availableItems the items available for shift crafting
* @param usedItems the items used by shift crafting
*/
void onCrafted(Player player, @Nullable IStackList<ItemStack> availableItems, @Nullable IStackList<ItemStack> usedItems);
/**
* Called when the clear button is pressed in the pattern grid or crafting grid.
*/
void onClear(Player player);
/**
* Called when an item is crafted with shift click (up to 64 items) in a crafting grid.
*
* @param player the player that crafted the item
*/
void onCraftedShift(Player player);
/**
* Called when a JEI recipe transfer occurs.
*
* @param player the player
* @param recipe a 9*x array stack array, where x is the possible combinations for the given slot
*/
void onRecipeTransfer(Player player, ItemStack[][] recipe);
/**
* Called when the grid is closed.
*
* @param player the player
*/
void onClosed(Player player);
/**
* @return true if the grid is active, false otherwise
*/
boolean isGridActive();
/**
* @return the slot id where this grid is located, if applicable, otherwise -1
*/
int getSlotId();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.repair;
import java.io.IOException;
import java.net.InetAddress;
import java.security.MessageDigest;
import java.util.UUID;
import org.apache.cassandra.io.util.SequentialWriter;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.config.KSMetaData;
import org.apache.cassandra.db.BufferDecoratedKey;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.RowIndexEntry;
import org.apache.cassandra.db.compaction.AbstractCompactedRow;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.sstable.ColumnStats;
import org.apache.cassandra.io.util.DataOutputPlus;
import org.apache.cassandra.locator.SimpleStrategy;
import org.apache.cassandra.net.MessageIn;
import org.apache.cassandra.net.MessageOut;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.sink.IMessageSink;
import org.apache.cassandra.sink.SinkManager;
import org.apache.cassandra.repair.messages.RepairMessage;
import org.apache.cassandra.repair.messages.ValidationComplete;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.MerkleTree;
import org.apache.cassandra.utils.concurrent.SimpleCondition;
import static org.junit.Assert.*;
public class ValidatorTest
{
private static final String keyspace = "ValidatorTest";
private static final String columnFamily = "Standard1";
private final IPartitioner partitioner = StorageService.getPartitioner();
@BeforeClass
public static void defineSchema() throws Exception
{
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(keyspace,
SimpleStrategy.class,
KSMetaData.optsWithRF(1),
SchemaLoader.standardCFMD(keyspace, columnFamily));
}
@After
public void tearDown()
{
SinkManager.clear();
}
@Test
public void testValidatorComplete() throws Throwable
{
Range<Token> range = new Range<>(partitioner.getMinimumToken(), partitioner.getRandomToken());
final RepairJobDesc desc = new RepairJobDesc(UUID.randomUUID(), UUID.randomUUID(), keyspace, columnFamily, range);
final SimpleCondition lock = new SimpleCondition();
SinkManager.add(new IMessageSink()
{
@SuppressWarnings("unchecked")
public MessageOut handleMessage(MessageOut message, int id, InetAddress to)
{
try
{
if (message.verb == MessagingService.Verb.REPAIR_MESSAGE)
{
RepairMessage m = (RepairMessage) message.payload;
assertEquals(RepairMessage.Type.VALIDATION_COMPLETE, m.messageType);
assertEquals(desc, m.desc);
assertTrue(((ValidationComplete)m).success);
assertNotNull(((ValidationComplete)m).tree);
}
}
finally
{
lock.signalAll();
}
return null;
}
public MessageIn handleMessage(MessageIn message, int id, InetAddress to)
{
return null;
}
});
InetAddress remote = InetAddress.getByName("127.0.0.2");
ColumnFamilyStore cfs = Keyspace.open(keyspace).getColumnFamilyStore(columnFamily);
Validator validator = new Validator(desc, remote, 0);
MerkleTree tree = new MerkleTree(cfs.partitioner, validator.desc.range, MerkleTree.RECOMMENDED_DEPTH, (int) Math.pow(2, 15));
validator.prepare(cfs, tree);
// and confirm that the tree was split
assertTrue(tree.size() > 1);
// add a row
Token mid = partitioner.midpoint(range.left, range.right);
validator.add(new CompactedRowStub(new BufferDecoratedKey(mid, ByteBufferUtil.bytes("inconceivable!"))));
validator.complete();
// confirm that the tree was validated
Token min = tree.partitioner().getMinimumToken();
assertNotNull(tree.hash(new Range<>(min, min)));
if (!lock.isSignaled())
lock.await();
}
private static class CompactedRowStub extends AbstractCompactedRow
{
private CompactedRowStub(DecoratedKey key)
{
super(key);
}
public RowIndexEntry write(long currentPosition, SequentialWriter out) throws IOException
{
throw new UnsupportedOperationException();
}
public void update(MessageDigest digest) { }
public ColumnStats columnStats()
{
throw new UnsupportedOperationException();
}
public void close() throws IOException { }
}
@Test
public void testValidatorFailed() throws Throwable
{
Range<Token> range = new Range<>(partitioner.getMinimumToken(), partitioner.getRandomToken());
final RepairJobDesc desc = new RepairJobDesc(UUID.randomUUID(), UUID.randomUUID(), keyspace, columnFamily, range);
final SimpleCondition lock = new SimpleCondition();
SinkManager.add(new IMessageSink()
{
@SuppressWarnings("unchecked")
public MessageOut handleMessage(MessageOut message, int id, InetAddress to)
{
try
{
if (message.verb == MessagingService.Verb.REPAIR_MESSAGE)
{
RepairMessage m = (RepairMessage) message.payload;
assertEquals(RepairMessage.Type.VALIDATION_COMPLETE, m.messageType);
assertEquals(desc, m.desc);
assertFalse(((ValidationComplete) m).success);
assertNull(((ValidationComplete)m).tree);
}
}
finally
{
lock.signalAll();
}
return null;
}
public MessageIn handleMessage(MessageIn message, int id, InetAddress to)
{
return null;
}
});
InetAddress remote = InetAddress.getByName("127.0.0.2");
Validator validator = new Validator(desc, remote, 0);
validator.fail();
if (!lock.isSignaled())
lock.await();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.core.CloseHook;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.RTimer;
import org.apache.solr.util.SolrPluginUtils;
import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* Refer SOLR-281
*
*/
public class SearchHandler extends RequestHandlerBase implements SolrCoreAware , PluginInfoInitialized
{
static final String INIT_COMPONENTS = "components";
static final String INIT_FIRST_COMPONENTS = "first-components";
static final String INIT_LAST_COMPONENTS = "last-components";
protected static Logger log = LoggerFactory.getLogger(SearchHandler.class);
protected List<SearchComponent> components = null;
private ShardHandlerFactory shardHandlerFactory ;
private PluginInfo shfInfo;
protected List<String> getDefaultComponents()
{
ArrayList<String> names = new ArrayList<String>(6);
names.add( QueryComponent.COMPONENT_NAME );
names.add( FacetComponent.COMPONENT_NAME );
names.add( MoreLikeThisComponent.COMPONENT_NAME );
names.add( HighlightComponent.COMPONENT_NAME );
names.add( StatsComponent.COMPONENT_NAME );
names.add( DebugComponent.COMPONENT_NAME );
return names;
}
@Override
public void init(PluginInfo info) {
init(info.initArgs);
for (PluginInfo child : info.children) {
if("shardHandlerFactory".equals(child.type)){
this.shfInfo = child;
break;
}
}
}
/**
* Initialize the components based on name. Note, if using <code>INIT_FIRST_COMPONENTS</code> or <code>INIT_LAST_COMPONENTS</code>,
* then the {@link DebugComponent} will always occur last. If this is not desired, then one must explicitly declare all components using
* the <code>INIT_COMPONENTS</code> syntax.
*/
@Override
@SuppressWarnings("unchecked")
public void inform(SolrCore core)
{
Object declaredComponents = initArgs.get(INIT_COMPONENTS);
List<String> first = (List<String>) initArgs.get(INIT_FIRST_COMPONENTS);
List<String> last = (List<String>) initArgs.get(INIT_LAST_COMPONENTS);
List<String> list = null;
boolean makeDebugLast = true;
if( declaredComponents == null ) {
// Use the default component list
list = getDefaultComponents();
if( first != null ) {
List<String> clist = first;
clist.addAll( list );
list = clist;
}
if( last != null ) {
list.addAll( last );
}
}
else {
list = (List<String>)declaredComponents;
if( first != null || last != null ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"First/Last components only valid if you do not declare 'components'");
}
makeDebugLast = false;
}
// Build the component list
components = new ArrayList<SearchComponent>( list.size() );
DebugComponent dbgCmp = null;
for(String c : list){
SearchComponent comp = core.getSearchComponent( c );
if (comp instanceof DebugComponent && makeDebugLast == true){
dbgCmp = (DebugComponent) comp;
} else {
components.add(comp);
log.debug("Adding component:"+comp);
}
}
if (makeDebugLast == true && dbgCmp != null){
components.add(dbgCmp);
log.debug("Adding debug component:" + dbgCmp);
}
if(shfInfo ==null) {
shardHandlerFactory = core.getCoreDescriptor().getCoreContainer().getShardHandlerFactory();
} else {
shardHandlerFactory = core.createInitInstance(shfInfo, ShardHandlerFactory.class, null, null);
core.addCloseHook(new CloseHook() {
@Override
public void preClose(SolrCore core) {
shardHandlerFactory.close();
}
@Override
public void postClose(SolrCore core) {
}
});
}
}
public List<SearchComponent> getComponents() {
return components;
}
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception
{
// int sleep = req.getParams().getInt("sleep",0);
// if (sleep > 0) {log.error("SLEEPING for " + sleep); Thread.sleep(sleep);}
ResponseBuilder rb = new ResponseBuilder(req, rsp, components);
if (rb.requestInfo != null) {
rb.requestInfo.setResponseBuilder(rb);
}
boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);
rb.setDebug(dbg);
if (dbg == false){//if it's true, we are doing everything anyway.
SolrPluginUtils.getDebugInterests(req.getParams().getParams(CommonParams.DEBUG), rb);
}
final RTimer timer = rb.isDebug() ? new RTimer() : null;
ShardHandler shardHandler1 = shardHandlerFactory.getShardHandler();
shardHandler1.checkDistributed(rb);
if (timer == null) {
// non-debugging prepare phase
for( SearchComponent c : components ) {
c.prepare(rb);
}
} else {
// debugging prepare phase
RTimer subt = timer.sub( "prepare" );
for( SearchComponent c : components ) {
rb.setTimer( subt.sub( c.getName() ) );
c.prepare(rb);
rb.getTimer().stop();
}
subt.stop();
}
if (!rb.isDistrib) {
// a normal non-distributed request
// The semantics of debugging vs not debugging are different enough that
// it makes sense to have two control loops
if(!rb.isDebug()) {
// Process
for( SearchComponent c : components ) {
c.process(rb);
}
}
else {
// Process
RTimer subt = timer.sub( "process" );
for( SearchComponent c : components ) {
rb.setTimer( subt.sub( c.getName() ) );
c.process(rb);
rb.getTimer().stop();
}
subt.stop();
timer.stop();
// add the timing info
if (rb.isDebugTimings()) {
rb.addDebugInfo("timing", timer.asNamedList() );
}
}
} else {
// a distributed request
if (rb.outgoing == null) {
rb.outgoing = new LinkedList<ShardRequest>();
}
rb.finished = new ArrayList<ShardRequest>();
int nextStage = 0;
do {
rb.stage = nextStage;
nextStage = ResponseBuilder.STAGE_DONE;
// call all components
for( SearchComponent c : components ) {
// the next stage is the minimum of what all components report
nextStage = Math.min(nextStage, c.distributedProcess(rb));
}
// check the outgoing queue and send requests
while (rb.outgoing.size() > 0) {
// submit all current request tasks at once
while (rb.outgoing.size() > 0) {
ShardRequest sreq = rb.outgoing.remove(0);
sreq.actualShards = sreq.shards;
if (sreq.actualShards==ShardRequest.ALL_SHARDS) {
sreq.actualShards = rb.shards;
}
sreq.responses = new ArrayList<ShardResponse>();
// TODO: map from shard to address[]
for (String shard : sreq.actualShards) {
ModifiableSolrParams params = new ModifiableSolrParams(sreq.params);
params.remove(ShardParams.SHARDS); // not a top-level request
params.set("distrib", "false"); // not a top-level request
params.remove("indent");
params.remove(CommonParams.HEADER_ECHO_PARAMS);
params.set(ShardParams.IS_SHARD, true); // a sub (shard) request
params.set(ShardParams.SHARD_URL, shard); // so the shard knows what was asked
if (rb.requestInfo != null) {
// we could try and detect when this is needed, but it could be tricky
params.set("NOW", Long.toString(rb.requestInfo.getNOW().getTime()));
}
String shardQt = params.get(ShardParams.SHARDS_QT);
if (shardQt == null) {
params.remove(CommonParams.QT);
} else {
params.set(CommonParams.QT, shardQt);
}
shardHandler1.submit(sreq, shard, params);
}
}
// now wait for replies, but if anyone puts more requests on
// the outgoing queue, send them out immediately (by exiting
// this loop)
boolean tolerant = rb.req.getParams().getBool(ShardParams.SHARDS_TOLERANT, false);
while (rb.outgoing.size() == 0) {
ShardResponse srsp = tolerant ?
shardHandler1.takeCompletedIncludingErrors():
shardHandler1.takeCompletedOrError();
if (srsp == null) break; // no more requests to wait for
// Was there an exception?
if (srsp.getException() != null) {
// If things are not tolerant, abort everything and rethrow
if(!tolerant) {
shardHandler1.cancelAll();
if (srsp.getException() instanceof SolrException) {
throw (SolrException)srsp.getException();
} else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, srsp.getException());
}
}
}
rb.finished.add(srsp.getShardRequest());
// let the components see the responses to the request
for(SearchComponent c : components) {
c.handleResponses(rb, srsp.getShardRequest());
}
}
}
for(SearchComponent c : components) {
c.finishStage(rb);
}
// we are done when the next stage is MAX_VALUE
} while (nextStage != Integer.MAX_VALUE);
}
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
StringBuilder sb = new StringBuilder();
sb.append("Search using components: ");
if( components != null ) {
for(SearchComponent c : components){
sb.append(c.getName());
sb.append(",");
}
}
return sb.toString();
}
@Override
public String getSource() {
return "$URL: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene_solr_4_2/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java $";
}
public ShardHandlerFactory getShardHandlerFactory() {
return shardHandlerFactory;
}
}
// TODO: generalize how a comm component can fit into search component framework
// TODO: statics should be per-core singletons
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.alchemy.utils;
import org.apache.commons.lang.StringUtils;
import org.apache.uima.alchemy.digester.domain.AnnotatedResults;
import org.apache.uima.alchemy.digester.domain.CategorizationResults;
import org.apache.uima.alchemy.digester.domain.EntitiesResults;
import org.apache.uima.alchemy.digester.domain.Entity;
import org.apache.uima.alchemy.digester.domain.Keyword;
import org.apache.uima.alchemy.digester.domain.KeywordResults;
import org.apache.uima.alchemy.digester.domain.LanguageDetectionResults;
import org.apache.uima.alchemy.digester.domain.Microformat;
import org.apache.uima.alchemy.digester.domain.MicroformatsResults;
import org.apache.uima.alchemy.digester.domain.Results;
import org.apache.uima.alchemy.ts.categorization.Category;
import org.apache.uima.alchemy.ts.entity.AlchemyAnnotation;
import org.apache.uima.alchemy.ts.keywords.KeywordFS;
import org.apache.uima.alchemy.ts.language.LanguageFS;
import org.apache.uima.alchemy.ts.microformats.MicroformatFS;
import org.apache.uima.alchemy.utils.exception.MappingException;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.Type;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.cas.StringArray;
public class Alchemy2TypeSystemMapper {
public static void mapRankedEntities(EntitiesResults results, JCas aJCas) throws MappingException {
setLanaguage(results, aJCas);
for (Entity entity : results.getEntities().getEntities()) {
try {
// use reflection to instantiate classes of the proper type in the type system
Object fsObject;
try {// usually jcas gen creates the constructor with jcas argument as the second one
fsObject = Class.forName("org.apache.uima.alchemy.ts.entity." + entity.getType())
.getConstructors()[1].newInstance(aJCas);
} catch (Exception e) { // for exceptional cases in which jcas parameter constructor is the
// first
fsObject = Class.forName("org.apache.uima.alchemy.ts.entity." + entity.getType())
.getConstructors()[0].newInstance(aJCas);
}
FeatureStructure fs = (FeatureStructure) fsObject;
Type type = fs.getType();
fs.setFeatureValueFromString(type.getFeatureByBaseName("count"), entity.getCount()); // count
fs.setFeatureValueFromString(type.getFeatureByBaseName("text"), entity.getText()); // text
fs.setFeatureValueFromString(type.getFeatureByBaseName("relevance"), entity.getRelevance()); // relevance
if (entity.getDisambiguated() != null) {
fs.setFeatureValueFromString(type.getFeatureByBaseName("disambiguation"), entity
.getDisambiguated().getName()); // disambiguation name
fs.setFeatureValueFromString(type.getFeatureByBaseName("dbpedia"), entity
.getDisambiguated().getDbpedia()); // dbpedia
fs.setFeatureValueFromString(type.getFeatureByBaseName("website"), entity
.getDisambiguated().getWebsite()); // website
fs.setFeatureValueFromString(type.getFeatureByBaseName("subType"), entity
.getDisambiguated().getSubType()); // subtype
fs.setFeatureValueFromString(type.getFeatureByBaseName("geo"), entity.getDisambiguated()
.getGeo()); // geo
fs.setFeatureValueFromString(type.getFeatureByBaseName("opencyc"), entity
.getDisambiguated().getOpencyc()); // opencyc
fs.setFeatureValueFromString(type.getFeatureByBaseName("yago"), entity.getDisambiguated()
.getYago()); // yago
fs.setFeatureValueFromString(type.getFeatureByBaseName("umbel"), entity
.getDisambiguated().getUmbel()); // umbel
fs.setFeatureValueFromString(type.getFeatureByBaseName("freebase"), entity
.getDisambiguated().getFreebase()); // freebase
fs.setFeatureValueFromString(type.getFeatureByBaseName("ciaFactbook"), entity
.getDisambiguated().getCiaFactbook()); // ciaFactbook
fs.setFeatureValueFromString(type.getFeatureByBaseName("census"), entity
.getDisambiguated().getCensus()); // census
fs.setFeatureValueFromString(type.getFeatureByBaseName("geonames"), entity
.getDisambiguated().getGeonames()); // geonames
fs.setFeatureValueFromString(type.getFeatureByBaseName("musicBrainz"), entity
.getDisambiguated().getMusicBrainz()); // musicBrainz
}
if (entity.getQuotations() != null && entity.getQuotations().getQuotations() != null
&& entity.getQuotations().getQuotations().size() > 0) {
StringArray quotationsFeatureStructure = new StringArray(aJCas, entity.getQuotations()
.getQuotations().size());
int i = 0;
for (String quotation : entity.getQuotations().getQuotations()) {
quotationsFeatureStructure.set(i, quotation);
i++;
}
fs.setFeatureValue(type.getFeatureByBaseName("quotatiotans"), quotationsFeatureStructure);
}
aJCas.addFsToIndexes(fs);
} catch (Exception e) {
throw new MappingException(e);
}
}
}
private static void setLanaguage(Results results, JCas aJCas) {
aJCas.setDocumentLanguage(results.getLanguage());
}
public static void mapAnnotatedEntities(AnnotatedResults results, JCas aJCas) {
setLanaguage(results, aJCas);
String annotatedText = results.getAnnotatedText();
// find strings of pattern 'TYPE[TEXT'
String[] ants = StringUtils.substringsBetween(annotatedText, "[", "]");
// map the ants to UIMA CAS
for (String ant : ants) {
if (ant.indexOf("[") > 0) {
AlchemyAnnotation alchemyAnnotation = new AlchemyAnnotation(aJCas);
int indexOfAnt = annotatedText.indexOf(ant);
alchemyAnnotation.setBegin(indexOfAnt - 1);
String antText = ant.substring(ant.indexOf("[") + 1);
alchemyAnnotation.setEnd(indexOfAnt + antText.length() - 1);
String antType = ant.substring(0, ant.indexOf("["));
alchemyAnnotation.setAlchemyType(antType);
alchemyAnnotation.addToIndexes();
annotatedText = annotatedText.replaceFirst("\\[" + ant.replace("[", "\\[") + "\\]\\]",
antText);
}
}
}
public static void mapCategorizationEntity(CategorizationResults results, JCas aJCas)
throws MappingException {
setLanaguage(results, aJCas);
try {
FeatureStructure fs = new Category(aJCas);
Type type = fs.getType();
fs.setFeatureValueFromString(type.getFeatureByBaseName("score"), results.getScore());
fs.setFeatureValueFromString(type.getFeatureByBaseName("text"), results.getCategory());
aJCas.addFsToIndexes(fs);
} catch (Exception e) {
e.printStackTrace();
throw new MappingException(e);
}
}
public static void mapKeywordEntity(KeywordResults results, JCas aJCas) throws MappingException {
setLanaguage(results, aJCas);
for (Keyword k : results.getKeywords()) {
try {
KeywordFS fs = new KeywordFS(aJCas);
Type type = fs.getType();
fs.setFeatureValueFromString(type.getFeatureByBaseName("text"), k.getText()); // text
fs.addToIndexes();
} catch (Exception e) {
throw new MappingException(e);
}
}
}
public static void mapMicroformats(MicroformatsResults results, JCas aJCas) {
setLanaguage(results, aJCas);
for (Microformat microformat : results.getMicroformats()) {
MicroformatFS microformatFS = new MicroformatFS(aJCas);
Type type = microformatFS.getType();
microformatFS.setFeatureValueFromString(type.getFeatureByBaseName("fieldName"), microformat
.getFieldName());
microformatFS.setFeatureValueFromString(type.getFeatureByBaseName("fieldData"), microformat
.getFieldData());
microformatFS.addToIndexes();
}
}
public static void mapLanguageDetection(LanguageDetectionResults results, JCas aJCas) {
setLanaguage(results, aJCas);
LanguageFS languageFS = new LanguageFS(aJCas);
Type type = languageFS.getType();
languageFS.setFeatureValueFromString(type.getFeatureByBaseName("language"), results
.getLanguage());
languageFS
.setFeatureValueFromString(type.getFeatureByBaseName("iso6391"), results.getIso6391());
languageFS
.setFeatureValueFromString(type.getFeatureByBaseName("iso6392"), results.getIso6392());
languageFS
.setFeatureValueFromString(type.getFeatureByBaseName("iso6393"), results.getIso6393());
languageFS.setFeatureValueFromString(type.getFeatureByBaseName("ethnologue"), results
.getEthnologue());
languageFS.setFeatureValueFromString(type.getFeatureByBaseName("nativeSpeakers"), results
.getNativeSpeakers());
languageFS.setFeatureValueFromString(type.getFeatureByBaseName("wikipedia"), results
.getWikipedia());
languageFS.addToIndexes();
}
}
| |
// Copyright 2019 takahashikzn
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package jp.root42.indolently.regex;
import java.util.Objects;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import jp.root42.indolently.$list;
import jp.root42.indolently.Indolently;
/**
* {@link Regex} implementation.
*
* @author takahashikzn
*/
public final class RegexJDK
implements RegexBase<Pattern, ReMatcherJDK> {
private final Pattern pattern;
public RegexJDK(final Pattern pattern) {
this.pattern = pattern;
}
@Override
public Pattern ptrn() {
return this.pattern;
}
@Override
public String pattern() {
return this.ptrn().pattern();
}
@Override
public ReMatcherJDK matcher(final CharSequence cs) {
return new ReMatcherJDK(this.ptrn().matcher(cs), cs);
}
@Override
public $list<String> split(final CharSequence cs, final int limit) {
return Indolently.list(this.ptrn().split(cs, limit));
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
} else if (!(o instanceof Regex)) {
return false;
}
return this.ptrn().equals(((Regex) o).ptrn());
}
@Override
public int hashCode() {
return Objects.hash(this.getClass(), this.pattern);
}
@Override
public String toString() {
return this.pattern.toString();
}
}
final class ReMatcherJDK
implements ReMatcher<Pattern, Matcher> {
private final Matcher matcher;
private final String text;
public ReMatcherJDK(final Matcher matcher, final CharSequence text) {
this.matcher = matcher;
this.text = text.toString();
}
@Override
public int hashCode() {
return this.matcher.hashCode();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
} else if (!(o instanceof ReMatcherJDK)) {
return false;
}
return ((ReMatcherJDK) o).matcher.equals(this.matcher);
}
@Override
public String toString() { return this.matcher.toString(); }
@Override
public String text() {
return this.text;
}
@Override
public Pattern pattern() {
return this.matcher.pattern();
}
@Override
public MatchResult toMatchResult() {
return this.matcher.toMatchResult();
}
@Override
public Matcher usePattern(final Pattern newPattern) {
return this.matcher.usePattern(newPattern);
}
@Override
public Matcher reset() {
return this.matcher.reset();
}
@Override
public Matcher reset(final CharSequence input) {
return this.matcher.reset(input);
}
@Override
public int start() {
return this.matcher.start();
}
@Override
public int start(final int group) {
return this.matcher.start(group);
}
@Override
public int start(final String name) {
return this.matcher.start(name);
}
@Override
public int end() {
return this.matcher.end();
}
@Override
public int end(final int group) {
return this.matcher.end(group);
}
@Override
public int end(final String name) {
return this.matcher.end(name);
}
@Override
public String group() {
return this.matcher.group();
}
@Override
public String group(final int group) {
return this.matcher.group(group);
}
@Override
public String group(final String name) {
return this.matcher.group(name);
}
@Override
public int groupCount() {
return this.matcher.groupCount();
}
@Override
public boolean matches() {
return this.matcher.matches();
}
@Override
public boolean find() {
return this.matcher.find();
}
@Override
public boolean find(final int start) {
return this.matcher.find(start);
}
@Override
public boolean lookingAt() {
return this.matcher.lookingAt();
}
@Override
public Matcher appendReplacement(final StringBuilder sb, final String replacement) {
return this.matcher.appendReplacement(sb, replacement);
}
@Override
public Matcher appendReplacement(final StringBuffer sb, final String replacement) {
return this.matcher.appendReplacement(sb, replacement);
}
@Override
public StringBuilder appendTail(final StringBuilder sb) {
return this.matcher.appendTail(sb);
}
@Override
public StringBuffer appendTail(final StringBuffer sb) {
return this.matcher.appendTail(sb);
}
@Override
public String replaceAll(final String replacement) {
return this.matcher.replaceAll(replacement);
}
@Override
public String replaceFirst(final String replacement) {
return this.matcher.replaceFirst(replacement);
}
@Override
public Matcher region(final int start, final int end) {
return this.matcher.region(start, end);
}
@Override
public int regionStart() {
return this.matcher.regionStart();
}
@Override
public int regionEnd() {
return this.matcher.regionEnd();
}
@Override
public boolean hasTransparentBounds() {
return this.matcher.hasTransparentBounds();
}
@Override
public Matcher useTransparentBounds(final boolean b) {
return this.matcher.useTransparentBounds(b);
}
@Override
public boolean hasAnchoringBounds() {
return this.matcher.hasAnchoringBounds();
}
@Override
public Matcher useAnchoringBounds(final boolean b) {
return this.matcher.useAnchoringBounds(b);
}
@Override
public boolean hitEnd() {
return this.matcher.hitEnd();
}
@Override
public boolean requireEnd() {
return this.matcher.requireEnd();
}
}
| |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.collision.bih;
import com.jme3.bounding.BoundingBox;
import com.jme3.bounding.BoundingSphere;
import com.jme3.bounding.BoundingVolume;
import com.jme3.collision.Collidable;
import com.jme3.collision.CollisionResults;
import com.jme3.collision.UnsupportedCollisionException;
import com.jme3.export.InputCapsule;
import com.jme3.export.JmeExporter;
import com.jme3.export.JmeImporter;
import com.jme3.export.OutputCapsule;
import com.jme3.math.FastMath;
import com.jme3.math.Matrix4f;
import com.jme3.math.Ray;
import com.jme3.math.Vector3f;
import com.jme3.scene.CollisionData;
import com.jme3.scene.Mesh;
import com.jme3.scene.Mesh.Mode;
import com.jme3.scene.VertexBuffer;
import com.jme3.scene.VertexBuffer.Type;
import com.jme3.scene.mesh.IndexBuffer;
import com.jme3.scene.mesh.VirtualIndexBuffer;
import com.jme3.scene.mesh.WrappedIndexBuffer;
import com.jme3.util.TempVars;
import java.io.IOException;
import static java.lang.Math.max;
import java.nio.FloatBuffer;
public class BIHTree implements CollisionData {
public static final int MAX_TREE_DEPTH = 100;
public static final int MAX_TRIS_PER_NODE = 21;
private Mesh mesh;
private BIHNode root;
private int maxTrisPerNode;
private int numTris;
private float[] pointData;
private int[] triIndices;
// private transient CollisionResults boundResults = new CollisionResults();
private transient float[] bihSwapTmp;
private static final TriangleAxisComparator[] comparators = new TriangleAxisComparator[]
{
new TriangleAxisComparator(0),
new TriangleAxisComparator(1),
new TriangleAxisComparator(2)
};
private void initTriList(FloatBuffer vb, IndexBuffer ib) {
pointData = new float[numTris * 3 * 3];
int p = 0;
for (int i = 0; i < numTris * 3; i += 3) {
int vert = ib.get(i) * 3;
pointData[p++] = vb.get(vert++);
pointData[p++] = vb.get(vert++);
pointData[p++] = vb.get(vert);
vert = ib.get(i + 1) * 3;
pointData[p++] = vb.get(vert++);
pointData[p++] = vb.get(vert++);
pointData[p++] = vb.get(vert);
vert = ib.get(i + 2) * 3;
pointData[p++] = vb.get(vert++);
pointData[p++] = vb.get(vert++);
pointData[p++] = vb.get(vert);
}
triIndices = new int[numTris];
for (int i = 0; i < numTris; i++) {
triIndices[i] = i;
}
}
public BIHTree(Mesh mesh, int maxTrisPerNode) {
this.mesh = mesh;
this.maxTrisPerNode = maxTrisPerNode;
if (maxTrisPerNode < 1 || mesh == null) {
throw new IllegalArgumentException();
}
bihSwapTmp = new float[9];
VertexBuffer vBuffer = mesh.getBuffer(Type.Position);
if(vBuffer == null){
throw new IllegalArgumentException("A mesh should at least contain a Position buffer");
}
IndexBuffer ib = mesh.getIndexBuffer();
FloatBuffer vb = (FloatBuffer) vBuffer.getData();
if (ib == null) {
ib = new VirtualIndexBuffer(mesh.getVertexCount(), mesh.getMode());
} else if (mesh.getMode() != Mode.Triangles) {
ib = new WrappedIndexBuffer(mesh);
}
numTris = ib.size() / 3;
initTriList(vb, ib);
}
public BIHTree(Mesh mesh) {
this(mesh, MAX_TRIS_PER_NODE);
}
public BIHTree() {
}
public void construct() {
BoundingBox sceneBbox = createBox(0, numTris - 1);
root = createNode(0, numTris - 1, sceneBbox, 0);
}
private BoundingBox createBox(int l, int r) {
TempVars vars = TempVars.get();
Vector3f min = vars.vect1.set(new Vector3f(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY));
Vector3f max = vars.vect2.set(new Vector3f(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY));
Vector3f v1 = vars.vect3,
v2 = vars.vect4,
v3 = vars.vect5;
for (int i = l; i <= r; i++) {
getTriangle(i, v1, v2, v3);
BoundingBox.checkMinMax(min, max, v1);
BoundingBox.checkMinMax(min, max, v2);
BoundingBox.checkMinMax(min, max, v3);
}
BoundingBox bbox = new BoundingBox(min, max);
vars.release();
return bbox;
}
int getTriangleIndex(int triIndex) {
return triIndices[triIndex];
}
private int sortTriangles(int l, int r, float split, int axis) {
int pivot = l;
int j = r;
TempVars vars = TempVars.get();
Vector3f v1 = vars.vect1,
v2 = vars.vect2,
v3 = vars.vect3;
while (pivot <= j) {
getTriangle(pivot, v1, v2, v3);
v1.addLocal(v2).addLocal(v3).multLocal(FastMath.ONE_THIRD);
if (v1.get(axis) > split) {
swapTriangles(pivot, j);
--j;
} else {
++pivot;
}
}
vars.release();
pivot = (pivot == l && j < pivot) ? j : pivot;
return pivot;
}
private void setMinMax(BoundingBox bbox, boolean doMin, int axis, float value) {
Vector3f min = bbox.getMin(null);
Vector3f max = bbox.getMax(null);
if (doMin) {
min.set(axis, value);
} else {
max.set(axis, value);
}
bbox.setMinMax(min, max);
}
private float getMinMax(BoundingBox bbox, boolean doMin, int axis) {
if (doMin) {
return bbox.getMin(null).get(axis);
} else {
return bbox.getMax(null).get(axis);
}
}
// private BIHNode createNode2(int l, int r, BoundingBox nodeBbox, int depth){
// if ((r - l) < maxTrisPerNode || depth > 100)
// return createLeaf(l, r);
//
// BoundingBox currentBox = createBox(l, r);
// int axis = depth % 3;
// float split = currentBox.getCenter().get(axis);
//
// TriangleAxisComparator comparator = comparators[axis];
// Arrays.sort(tris, l, r, comparator);
// int splitIndex = -1;
//
// float leftPlane, rightPlane = Float.POSITIVE_INFINITY;
// leftPlane = tris[l].getExtreme(axis, false);
// for (int i = l; i <= r; i++){
// BIHTriangle tri = tris[i];
// if (splitIndex == -1){
// float v = tri.getCenter().get(axis);
// if (v > split){
// if (i == 0){
// // no left plane
// splitIndex = -2;
// }else{
// splitIndex = i;
// // first triangle assigned to right
// rightPlane = tri.getExtreme(axis, true);
// }
// }else{
// // triangle assigned to left
// float ex = tri.getExtreme(axis, false);
// if (ex > leftPlane)
// leftPlane = ex;
// }
// }else{
// float ex = tri.getExtreme(axis, true);
// if (ex < rightPlane)
// rightPlane = ex;
// }
// }
//
// if (splitIndex < 0){
// splitIndex = (r - l) / 2;
//
// leftPlane = Float.NEGATIVE_INFINITY;
// rightPlane = Float.POSITIVE_INFINITY;
//
// for (int i = l; i < splitIndex; i++){
// float ex = tris[i].getExtreme(axis, false);
// if (ex > leftPlane){
// leftPlane = ex;
// }
// }
// for (int i = splitIndex; i <= r; i++){
// float ex = tris[i].getExtreme(axis, true);
// if (ex < rightPlane){
// rightPlane = ex;
// }
// }
// }
//
// BIHNode node = new BIHNode(axis);
// node.leftPlane = leftPlane;
// node.rightPlane = rightPlane;
//
// node.leftIndex = l;
// node.rightIndex = r;
//
// BoundingBox leftBbox = new BoundingBox(currentBox);
// setMinMax(leftBbox, false, axis, split);
// node.left = createNode2(l, splitIndex-1, leftBbox, depth+1);
//
// BoundingBox rightBbox = new BoundingBox(currentBox);
// setMinMax(rightBbox, true, axis, split);
// node.right = createNode2(splitIndex, r, rightBbox, depth+1);
//
// return node;
// }
private BIHNode createNode(int l, int r, BoundingBox nodeBbox, int depth) {
if ((r - l) < maxTrisPerNode || depth > MAX_TREE_DEPTH) {
return new BIHNode(l, r);
}
BoundingBox currentBox = createBox(l, r);
Vector3f exteriorExt = nodeBbox.getExtent(null);
Vector3f interiorExt = currentBox.getExtent(null);
exteriorExt.subtractLocal(interiorExt);
int axis = 0;
if (exteriorExt.x > exteriorExt.y) {
if (exteriorExt.x > exteriorExt.z) {
axis = 0;
} else {
axis = 2;
}
} else {
if (exteriorExt.y > exteriorExt.z) {
axis = 1;
} else {
axis = 2;
}
}
if (exteriorExt.equals(Vector3f.ZERO)) {
axis = 0;
}
// Arrays.sort(tris, l, r, comparators[axis]);
float split = currentBox.getCenter().get(axis);
int pivot = sortTriangles(l, r, split, axis);
if (pivot == l || pivot == r) {
pivot = (r + l) / 2;
}
//If one of the partitions is empty, continue with recursion: same level but different bbox
if (pivot < l) {
//Only right
BoundingBox rbbox = new BoundingBox(currentBox);
setMinMax(rbbox, true, axis, split);
return createNode(l, r, rbbox, depth + 1);
} else if (pivot > r) {
//Only left
BoundingBox lbbox = new BoundingBox(currentBox);
setMinMax(lbbox, false, axis, split);
return createNode(l, r, lbbox, depth + 1);
} else {
//Build the node
BIHNode node = new BIHNode(axis);
//Left child
BoundingBox lbbox = new BoundingBox(currentBox);
setMinMax(lbbox, false, axis, split);
//The left node right border is the plane most right
node.setLeftPlane(getMinMax(createBox(l, max(l, pivot - 1)), false, axis));
node.setLeftChild(createNode(l, max(l, pivot - 1), lbbox, depth + 1)); //Recursive call
//Right Child
BoundingBox rbbox = new BoundingBox(currentBox);
setMinMax(rbbox, true, axis, split);
//The right node left border is the plane most left
node.setRightPlane(getMinMax(createBox(pivot, r), true, axis));
node.setRightChild(createNode(pivot, r, rbbox, depth + 1)); //Recursive call
return node;
}
}
public void getTriangle(int index, Vector3f v1, Vector3f v2, Vector3f v3) {
int pointIndex = index * 9;
v1.x = pointData[pointIndex++];
v1.y = pointData[pointIndex++];
v1.z = pointData[pointIndex++];
v2.x = pointData[pointIndex++];
v2.y = pointData[pointIndex++];
v2.z = pointData[pointIndex++];
v3.x = pointData[pointIndex++];
v3.y = pointData[pointIndex++];
v3.z = pointData[pointIndex++];
}
public void swapTriangles(int index1, int index2) {
int p1 = index1 * 9;
int p2 = index2 * 9;
// store p1 in tmp
System.arraycopy(pointData, p1, bihSwapTmp, 0, 9);
// copy p2 to p1
System.arraycopy(pointData, p2, pointData, p1, 9);
// copy tmp to p2
System.arraycopy(bihSwapTmp, 0, pointData, p2, 9);
// swap indices
int tmp2 = triIndices[index1];
triIndices[index1] = triIndices[index2];
triIndices[index2] = tmp2;
}
private int collideWithRay(Ray r,
Matrix4f worldMatrix,
BoundingVolume worldBound,
CollisionResults results) {
TempVars vars = TempVars.get();
try {
CollisionResults boundResults = vars.collisionResults;
boundResults.clear();
worldBound.collideWith(r, boundResults);
if (boundResults.size() > 0) {
float tMin = boundResults.getClosestCollision().getDistance();
float tMax = boundResults.getFarthestCollision().getDistance();
if (tMax <= 0) {
tMax = Float.POSITIVE_INFINITY;
} else if (tMin == tMax) {
tMin = 0;
}
if (tMin <= 0) {
tMin = 0;
}
if (r.getLimit() < Float.POSITIVE_INFINITY) {
tMax = Math.min(tMax, r.getLimit());
if (tMin > tMax){
return 0;
}
}
// return root.intersectBrute(r, worldMatrix, this, tMin, tMax, results);
return root.intersectWhere(r, worldMatrix, this, tMin, tMax, results);
}
return 0;
} finally {
vars.release();
}
}
private int collideWithBoundingVolume(BoundingVolume bv,
Matrix4f worldMatrix,
CollisionResults results) {
BoundingBox bbox;
if (bv instanceof BoundingSphere) {
BoundingSphere sphere = (BoundingSphere) bv;
bbox = new BoundingBox(bv.getCenter().clone(), sphere.getRadius(),
sphere.getRadius(),
sphere.getRadius());
} else if (bv instanceof BoundingBox) {
bbox = new BoundingBox((BoundingBox) bv);
} else {
throw new UnsupportedCollisionException();
}
bbox.transform(worldMatrix.invert(), bbox);
return root.intersectWhere(bv, bbox, worldMatrix, this, results);
}
public int collideWith(Collidable other,
Matrix4f worldMatrix,
BoundingVolume worldBound,
CollisionResults results) {
if (other instanceof Ray) {
Ray ray = (Ray) other;
return collideWithRay(ray, worldMatrix, worldBound, results);
} else if (other instanceof BoundingVolume) {
BoundingVolume bv = (BoundingVolume) other;
return collideWithBoundingVolume(bv, worldMatrix, results);
} else {
throw new UnsupportedCollisionException();
}
}
public void write(JmeExporter ex) throws IOException {
OutputCapsule oc = ex.getCapsule(this);
oc.write(mesh, "mesh", null);
oc.write(root, "root", null);
oc.write(maxTrisPerNode, "tris_per_node", 0);
oc.write(pointData, "points", null);
oc.write(triIndices, "indices", null);
}
public void read(JmeImporter im) throws IOException {
InputCapsule ic = im.getCapsule(this);
mesh = (Mesh) ic.readSavable("mesh", null);
root = (BIHNode) ic.readSavable("root", null);
maxTrisPerNode = ic.readInt("tris_per_node", 0);
pointData = ic.readFloatArray("points", null);
triIndices = ic.readIntArray("indices", null);
}
}
| |
/*
Derby - Class org.apache.derby.iapi.services.io.RegisteredFormatIds
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.iapi.services.io;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.services.info.JVMInfo;
/**
Registration of TypedFormat classes.
<P>
A TypedFormat is registered by placing a class name at the
correct place in the correct array, driven by the base format number:
<UL>
<LI>2 byte - MIN_TWO_BYTE_FORMAT_ID - TwoByte
</UL>
The offset from the base format number (0 based) gives the offset in the array.
<P>
The class name is either:
<UL>
<LI> The actual class name of the TypeFormat.
<LI> The name of a class that extends org.apache.derby.iapi.services.io.FormatableInstanceGetter.
In this case the monitor will register an instance of the class after calling its
setFormatId() method with format id it is registered as.
</UL>
*/
public interface RegisteredFormatIds {
/* one byte format identifiers never used
String[] OneByte = {
};
*/
String[] TwoByte = {
/* 0 */ null, // null marker
/* 1 */ null, // String marker
/* 2 */ null, // Serializable marker
/* 3 */ null,
/* 4 */ null,
/* 5 */ null,
/* 6 */ null,
/* 7 */ null,
/* 8 */ null,
/* 9 */ null,
/* 10 */ null,
/* 11 */ null,
/* 12 */ null,
/* 13 */ null,
/* 14 */ "org.apache.derby.catalog.types.TypeDescriptorImpl",
/* 15 */ "org.apache.derby.impl.store.access.PC_XenaVersion",
/* 16 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 17 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 18 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 19 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 20 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 21 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 22 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 23 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 24 */ null,
/* 25 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 26 */ null,
/* 27 */ null,
/* 28 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 29 */ null,
/* 30 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 31 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 32 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 33 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 34 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 35 */ null,
/* 36 */ null,
/* 37 */ "org.apache.derby.impl.sql.execute.DeleteConstantAction",
/* 38 */ "org.apache.derby.impl.sql.execute.InsertConstantAction",
/* 39 */ "org.apache.derby.impl.sql.execute.UpdateConstantAction",
/* 40 */ null,
/* 41 */ null,
/* 42 */ null,
/* 43 */ null,
/* 44 */ null,
/* 45 */ null,
/* 46 */ null,
/* 47 */ null,
/* 48 */ null,
/* 49 */ null,
/* 50 */ null,
/* 51 */ null,
/* 52 */ null,
/* 53 */ null,
/* 54 */ null,
/* 55 */ null,
/* 56 */ null,
/* 57 */ null,
/* 58 */ null,
/* 59 */ null,
/* 60 */ null,
/* 61 */ null,
/* 62 */ null,
/* 63 */ null,
/* 64 */ null,
/* 65 */ null,
/* 66 */ null,
/* 67 */ null,
/* 68 */ null,
/* 69 */ null,
/* 70 */ null,
/* 71 */ null,
/* 72 */ null,
/* 73 */ null,
/* 74 */ null,
/* 75 */ null,
/* 76 */ null,
/* 77 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 78 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 79 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 80 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 81 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 82 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 83 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 84 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 85 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 86 */ null,
/* 87 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 88 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 89 */ "org.apache.derby.iapi.types.SQLLongint",
/* 90 */ "org.apache.derby.impl.store.access.heap.HeapClassInfo",
/* 91 */ "org.apache.derby.impl.store.access.heap.Heap_v10_2",
/* 92 */ null,
/* 93 */ "org.apache.derby.impl.store.access.StorableFormatId",
/* 94 */ null,
/* 95 */ "org.apache.derby.impl.store.access.btree.index.B2IUndo",
/* 96 */ null,
/* 97 */ "org.apache.derby.impl.store.raw.data.ChainAllocPageOperation",
/* 98 */ null,
/* 99 */ null,
/* 100 */ null,
/* 101 */ "org.apache.derby.impl.store.raw.data.DeleteOperation",
/* 102 */ "org.apache.derby.impl.store.raw.xact.EndXact",
/* 103 */ "org.apache.derby.impl.store.raw.data.InsertOperation",
/* 104 */ "org.apache.derby.impl.store.raw.data.LogicalUndoOperation",
/* 105 */ "org.apache.derby.impl.store.raw.data.PhysicalUndoOperation",
/* 106 */ "org.apache.derby.impl.store.raw.data.PurgeOperation",
/* 107 */ "org.apache.derby.impl.store.raw.data.ContainerUndoOperation",
/* 108 */ "org.apache.derby.impl.store.raw.data.UpdateOperation",
/* 109 */ "org.apache.derby.impl.store.raw.data.UpdateFieldOperation",
/* 110 */ null,
/* 111 */ "org.apache.derby.impl.store.raw.data.AllocPageOperation",
/* 112 */ null,
/* 113 */ "org.apache.derby.impl.store.raw.data.InvalidatePageOperation",
/* 114 */ "org.apache.derby.impl.store.raw.log.SaveLWMOperation",
/* 115 */ null,
/* 116 */ null,
/* 117 */ "org.apache.derby.impl.store.raw.data.StoredPage",
/* 118 */ "org.apache.derby.impl.store.raw.data.AllocPage",
/* 119 */ null,
/* 120 */ null,
/* 121 */ null,
/* 122 */ null,
/* 123 */ null,
/* 124 */ null,
/* 125 */ null,
/* 126 */ null,
/* 127 */ null,
/* 128 */ null,
/* 129 */ "org.apache.derby.impl.store.raw.log.LogRecord",
/* 130 */ "org.apache.derby.impl.store.raw.log.LogCounter",
/* 131 */ "org.apache.derby.impl.services.uuid.BasicUUIDGetter", // InstanceGetter
/* 132 */ null,
/* 133 */ "org.apache.derby.impl.store.access.btree.LeafControlRow",
/* 134 */ "org.apache.derby.impl.store.access.btree.BranchControlRow",
/* 135 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 136 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 137 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 138 */ null,
/* 139 */ null,
/* 140 */ null,
/* 141 */ null,
/* 142 */ null,
/* 143 */ null,
/* 144 */ null,
/* 145 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 146 */ null,
/* 147 */ "org.apache.derby.impl.store.raw.xact.XactId",
/* 148 */ null,
/* 149 */ "org.apache.derby.impl.sql.execute.AvgAggregator",
/* 150 */ null,
/* 151 */ "org.apache.derby.impl.sql.execute.CountAggregator",
/* 152 */ "org.apache.derby.impl.sql.execute.MaxMinAggregator",
/* 153 */ null,
/* 154 */ "org.apache.derby.impl.sql.execute.SumAggregator",
/* 155 */ null,
/* 156 */ null,
/* 157 */ null,
/* 158 */ null,
/* 159 */ null,
/* 160 */ null,
/* 161 */ null,
/* 162 */ null,
/* 163 */ null,
/* 164 */ null,
/* 165 */ null,
/* 166 */ null,
/* 167 */ null,
/* 168 */ null,
/* 169 */ "org.apache.derby.impl.store.raw.xact.BeginXact",
/* 170 */ null,
/* 171 */ null,
/* 172 */ null,
/* 173 */ "org.apache.derby.impl.sql.execute.rts.RunTimeStatisticsImpl",
/* 174 */ null,
/* 175 */ null,
/* 176 */ null,
/* 177 */ "org.apache.derby.impl.sql.execute.rts.RealProjectRestrictStatistics",
/* 178 */ "org.apache.derby.impl.sql.execute.rts.RealSortStatistics",
/* 179 */ "org.apache.derby.impl.sql.execute.rts.RealTableScanStatistics",
/* 180 */ "org.apache.derby.impl.sql.execute.rts.RealNestedLoopJoinStatistics",
/* 181 */ "org.apache.derby.impl.sql.execute.rts.RealIndexRowToBaseRowStatistics",
/* 182 */ "org.apache.derby.impl.sql.execute.rts.RealAnyResultSetStatistics",
/* 183 */ "org.apache.derby.impl.sql.execute.rts.RealOnceResultSetStatistics",
/* 184 */ "org.apache.derby.impl.sql.execute.rts.RealCurrentOfStatistics",
/* 185 */ "org.apache.derby.impl.sql.execute.rts.RealRowResultSetStatistics",
/* 186 */ "org.apache.derby.impl.sql.execute.rts.RealUnionResultSetStatistics",
/* 187 */ "org.apache.derby.impl.sql.execute.rts.RealNestedLoopLeftOuterJoinStatistics",
/* 188 */ "org.apache.derby.impl.sql.execute.rts.RealNormalizeResultSetStatistics",
/* 189 */ "org.apache.derby.impl.sql.execute.rts.RealInsertResultSetStatistics",
/* 190 */ "org.apache.derby.impl.sql.execute.rts.RealUpdateResultSetStatistics",
/* 191 */ "org.apache.derby.impl.sql.execute.rts.RealDeleteResultSetStatistics",
/* 192 */ null,
/* 193 */ null,
/* 194 */ null,
/* 195 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 196 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 197 */ null,
/* 198 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 199 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter,
/* 200 */ null, // DECIMAL - register dynamically by DataValueFactory implementation
/* 201 */ null,
/* 202 */ "org.apache.derby.iapi.types.UserType",
/* 203 */ "org.apache.derby.impl.sql.execute.rts.RealHashScanStatistics",
/* 204 */ null,
/* 205 */ "org.apache.derby.catalog.types.ReferencedColumnsDescriptorImpl",
/* 206 */ null,
/* 207 */ null,
/* 208 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 209 */ null,
/* 210 */ "org.apache.derby.impl.store.raw.data.CopyRowsOperation",
/* 211 */ null,
/* 212 */ null,
/* 213 */ "org.apache.derby.impl.sql.execute.ReplaceJarConstantAction",
/* 214 */ "org.apache.derby.impl.sql.execute.rts.RealVTIStatistics",
/* 215 */ null,
/* 216 */ null,
/* 217 */ null,
/* 218 */ "org.apache.derby.impl.sql.execute.IndexColumnOrder",
/* 219 */ "org.apache.derby.iapi.util.ByteArray",
/* 220 */ null,
/* 221 */ null,
/* 222 */ null,
/* 223 */ "org.apache.derby.impl.sql.execute.AggregatorInfo",
/* 224 */ "org.apache.derby.impl.sql.execute.AggregatorInfoList",
/* 225 */ "org.apache.derby.impl.sql.GenericStorablePreparedStatement",
/* 226 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 227 */ null,
/* 228 */ "org.apache.derby.impl.sql.GenericResultDescription",
/* 229 */ null,
/* 230 */ null,
/* 231 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 232 */ null,
/* 233 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 234 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter,
/* 235 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter,
/* 236 */ null,
/* 237 */ null,
/* 238 */ null,
/* 239 */ null,
/* 240 */ "org.apache.derby.iapi.types.DataTypeDescriptor",
/* 241 */ "org.apache.derby.impl.store.raw.data.InitPageOperation",
/* 242 */ "org.apache.derby.impl.store.raw.data.ContainerOperation",
/* 243 */ null,
/* 244 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 245 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 246 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 247 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 248 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 249 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 250 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 251 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 252 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 253 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 254 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 255 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 256 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 257 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 258 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 259 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter", // old catalog type format
/* 260 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 261 */ "org.apache.derby.impl.store.raw.xact.TransactionTableEntry",
/* 262 */ "org.apache.derby.impl.store.raw.xact.TransactionTable",
/* 263 */ "org.apache.derby.impl.store.raw.log.CheckpointOperation",
/* 264 */ "org.apache.derby.catalog.types.UserDefinedTypeIdImpl",
/* 265 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 266 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 267 */ null,
/* 268 */ "org.apache.derby.iapi.sql.dictionary.IndexRowGenerator",
/* 269 */ "org.apache.derby.iapi.services.io.FormatableBitSet",
/* 270 */ "org.apache.derby.iapi.services.io.FormatableArrayHolder",
/* 271 */ "org.apache.derby.iapi.services.io.FormatableProperties",
/* 272 */ null,
/* 273 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 274 */ null,
/* 275 */ null,
/* 276 */ null,
/* 277 */ null,
/* 278 */ "org.apache.derby.impl.sql.execute.ConstraintInfo",
/* 279 */ null,
/* 280 */ null,
/* 281 */ null,
/* 282 */ "org.apache.derby.impl.sql.execute.FKInfo",
/* 283 */ "org.apache.derby.impl.sql.execute.rts.RealScalarAggregateStatistics",
/* 284 */ "org.apache.derby.impl.sql.execute.rts.RealDistinctScalarAggregateStatistics",
/* 285 */ "org.apache.derby.impl.sql.execute.rts.RealGroupedAggregateStatistics",
/* 286 */ null,
/* 287 */ "org.apache.derby.impl.store.raw.data.SetReservedSpaceOperation",
/* 288 */ null,
/* 289 */ null,
/* 290 */ null,
/* 291 */ "org.apache.derby.impl.store.raw.data.RemoveFileOperation",
/* 292 */ null,
/* 293 */ null,
/* 294 */ null,
/* 295 */ null,
/* 296 */ "org.apache.derby.impl.sql.CursorTableReference",
/* 297 */ "org.apache.derby.impl.sql.CursorInfo",
/* 298 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 299 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 300 */ null,
/* 301 */ null,
/* 302 */ null,
/* 303 */ "org.apache.derby.iapi.services.io.FormatableIntHolder",
/* 304 */ "org.apache.derby.impl.sql.execute.rts.RealHashJoinStatistics",
/* 305 */ "org.apache.derby.impl.sql.execute.rts.RealHashLeftOuterJoinStatistics",
/* 306 */ "org.apache.derby.impl.sql.execute.rts.RealHashTableStatistics",
/* 307 */ "org.apache.derby.iapi.types.JSQLType",
/* 308 */ "org.apache.derby.impl.sql.execute.rts.RealMaterializedResultSetStatistics",
/* 309 */ null,
/* 310 */ null,
/* 311 */ null,
/* 312 */ "org.apache.derby.catalog.types.MethodAliasInfo",
/* 313 */ "org.apache.derby.iapi.services.io.FormatableHashtable",
/* 314 */ null,
/* 315 */ null,
/* 316 */ "org.apache.derby.iapi.sql.dictionary.TriggerDescriptor",
/* 317 */ "org.apache.derby.impl.sql.execute.TriggerInfo",
/* 318 */ null,
/* 319 */ null,
/* 320 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 321 */ null,
/* 322 */ null,
/* 323 */ null,
/* 324 */ null,
/* 325 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 326 */ "org.apache.derby.catalog.types.DefaultInfoImpl",
/* 327 */ "org.apache.derby.impl.sql.execute.rts.RealLastIndexKeyScanStatistics",
/* 328 */ "org.apache.derby.impl.store.raw.xact.GlobalXactId",
/* 329 */ "org.apache.derby.iapi.services.io.FormatableLongHolder",
/* 330 */ "org.apache.derby.impl.sql.execute.rts.RealScrollInsensitiveResultSetStatistics",
/* 331 */ null,
/* 332 */ null,
/* 333 */ null,
/* 334 */ "org.apache.derby.impl.sql.execute.rts.RealDistinctScanStatistics",
/* 335 */ null,
/* 336 */ null,
/* 337 */ null,
/* 338 */ null,
/* 339 */ null,
/* 340 */ null,
/* 341 */ null,
/* 342 */ null,
/* 343 */ null,
/* 344 */ null,
/* 345 */ null,
/* 346 */ null,
/* 347 */ null,
/* 348 */ null,
/* 349 */ null,
/* 350 */ null,
/* 351 */ null,
/* 352 */ null,
/* 353 */ null,
/* 354 */ null,
/* 355 */ null,
/* 356 */ null,
/* 357 */ null,
/* 358 */ "org.apache.derby.impl.sql.execute.ColumnInfo",
/* 359 */ "org.apache.derby.impl.sql.depend.DepClassInfo",
/* 360 */ "org.apache.derby.impl.store.access.btree.index.B2IStaticCompiledInfo",
/* 361 */ null, // SQLData marker
/* 362 */ null,
/* 363 */ null,
/* 364 */ null,
/* 365 */ null,
/* 366 */ null,
/* 367 */ null,
/* 368 */ null,
/* 369 */ null,
/* 370 */ null,
/* 371 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 372 */ null,
/* 373 */ null,
/* 374 */ null,
/* 375 */ "org.apache.derby.impl.sql.execute.UpdatableVTIConstantAction",
/* 376 */ null,
/* 377 */ null,
/* 378 */ null,
/* 379 */ "org.apache.derby.impl.sql.execute.rts.RealInsertVTIResultSetStatistics",
/* 380 */ "org.apache.derby.impl.sql.execute.rts.RealDeleteVTIResultSetStatistics",
/* 381 */ null, // Unused,
/* 382 */ null, // Unused
/* 383 */ "org.apache.derby.impl.sql.GenericColumnDescriptor",
/* 384 */ null, // Unused,
/* 385 */ null,
/* 386 */ null,
/* 387 */ "org.apache.derby.catalog.types.IndexDescriptorImpl",
/* 388 */ "org.apache.derby.impl.store.access.btree.index.B2I_v10_2",
/* 389 */ null,
/* 390 */ null,
/* 391 */ null,
/* 392 */ null,
/* 393 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo", // InstanceGetter
/* 394 */ null,
/* 395 */ null,
/* 396 */ null, // Unused
/* 397 */ "org.apache.derby.catalog.types.StatisticsImpl",
/* 398 */ null,
/* 399 */ null,
/* 400 */ null,
/* 401 */ "org.apache.derby.impl.sql.catalog.DD_Version",
/* 402 */ "org.apache.derby.impl.sql.catalog.DD_Version",
/* 403 + 0 */ null,
/* 1 */ null,
/* 2 */ null,
/* 3 */ null,
/* 4 */ null,
/* 5 */ null,
/* 6 */ null,
/* 7 */ null,
/* 8 */ null,
/* 9 */ null,
/* 10 */ null,
/* 11 */ null,
/* 12 */ null,
/* 13 */ null,
/* 14 */ null,
/* 15 */ null,
/* 16 */ null,
/* 17 */ null,
/* 18 */ null,
/* 19 */ null,
/* 20 */ null,
/* 21 */ null,
/* 22 */ null,
/* 23 */ null,
/* 24 */ null,
/* 25 */ null,
/* 26 */ null,
/* 27 */ null,
/* 28 */ null,
/* 29 */ null,
/* 30 */ null,
/* 31 */ null,
/* 32 */ null,
/* 33 */ null,
/* 403 + 34 */ null,
/* 438 */ null,
/* 439 */ "org.apache.derby.impl.sql.execute.rts.RealDeleteCascadeResultSetStatistics",
/// --- BLOB is copying LONGVARBIT in implementation
/* 440 */ null,
/* 441 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter, BLOB_COMPILATION_TYPE_ID
/* 442 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter", // BLOB_TYPE_ID_IMPL
/* 443 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter, SQL_BLOB_ID
/// --- CLOB is copying LONGVARCHAR in implementation
/* 444 */ null,
/* 445 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter
/* 446 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter",
/* 447 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter,
/// --- NLOB is copying NATIONAL LONGVARCHAR in implementation
/* 448 */ null,
/* 449 */ null,
/* 450 */ null,
/* 451 */ "org.apache.derby.catalog.types.RoutineAliasInfo",
/* 452 */ null,
/* 453 */ "org.apache.derby.impl.store.raw.log.ChecksumOperation",
/* 454 */ "org.apache.derby.impl.store.raw.data.CompressSpacePageOperation10_2",
/* 455 */ "org.apache.derby.catalog.types.SynonymAliasInfo",
/* 456 */ null,
/* 457 */ "org.apache.derby.catalog.types.TypesImplInstanceGetter", // XML_TYPE_ID_IMPL
/* 458 */ "org.apache.derby.iapi.types.DTSClassInfo", //InstanceGetter, XML_ID
/* 459 */ "org.apache.derby.impl.store.raw.data.EncryptContainerOperation",
/* 460 */ "org.apache.derby.impl.store.raw.data.EncryptContainerUndoOperation",
/* 461 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo",
/* 462 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo",
/* 463 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo",
/* 464 */ null,
/* 465 */ "org.apache.derby.impl.store.raw.data.CompressSpacePageOperation",
/* 466 */ "org.apache.derby.impl.store.access.btree.index.B2I_10_3",
/* 467 */ "org.apache.derby.impl.store.access.heap.Heap",
/* 468 */ "org.apache.derby.iapi.types.DTSClassInfo",
/* 469 */ "org.apache.derby.catalog.types.RowMultiSetImpl",
/* 470 */ "org.apache.derby.impl.store.access.btree.index.B2I",
/* 471 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo",
/* 472 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo",
/* 473 */ "org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo",
/* 474 */ "org.apache.derby.catalog.types.UDTAliasInfo",
};
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rest.handler.taskmanager;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.BlobServerOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.blob.BlobServer;
import org.apache.flink.runtime.blob.TransientBlobKey;
import org.apache.flink.runtime.blob.TransientBlobService;
import org.apache.flink.runtime.blob.VoidBlobStore;
import org.apache.flink.runtime.clusterframework.types.ResourceID;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.resourcemanager.ResourceManagerGateway;
import org.apache.flink.runtime.resourcemanager.utils.TestingResourceManagerGateway;
import org.apache.flink.runtime.rest.HttpMethodWrapper;
import org.apache.flink.runtime.rest.handler.HandlerRequest;
import org.apache.flink.runtime.rest.handler.HandlerRequestException;
import org.apache.flink.runtime.rest.messages.EmptyRequestBody;
import org.apache.flink.runtime.rest.messages.UntypedResponseMessageHeaders;
import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagerIdPathParameter;
import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagerMessageParameters;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.webmonitor.RestfulGateway;
import org.apache.flink.runtime.webmonitor.retriever.GatewayRetriever;
import org.apache.flink.util.FileUtils;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufAllocator;
import org.apache.flink.shaded.netty4.io.netty.channel.Channel;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFuture;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandler;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandlerContext;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelPipeline;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelProgressivePromise;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelPromise;
import org.apache.flink.shaded.netty4.io.netty.channel.DefaultChannelPromise;
import org.apache.flink.shaded.netty4.io.netty.channel.DefaultFileRegion;
import org.apache.flink.shaded.netty4.io.netty.channel.embedded.EmbeddedChannel;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.DefaultFullHttpRequest;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpMethod;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpVersion;
import org.apache.flink.shaded.netty4.io.netty.util.Attribute;
import org.apache.flink.shaded.netty4.io.netty.util.AttributeKey;
import org.apache.flink.shaded.netty4.io.netty.util.concurrent.EventExecutor;
import org.apache.flink.shaded.netty4.io.netty.util.concurrent.ImmediateEventExecutor;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import javax.annotation.Nonnull;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.SocketAddress;
import java.util.ArrayDeque;
import java.util.Collections;
import java.util.Map;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for the {@link AbstractTaskManagerFileHandler}.
*/
public class AbstractTaskManagerFileHandlerTest extends TestLogger {
private static final ResourceID EXPECTED_TASK_MANAGER_ID = ResourceID.generate();
private static final DefaultFullHttpRequest HTTP_REQUEST = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, TestUntypedMessageHeaders.URL);
@ClassRule
public static TemporaryFolder temporaryFolder = new TemporaryFolder();
private static BlobServer blobServer;
private static HandlerRequest<EmptyRequestBody, TaskManagerMessageParameters> handlerRequest;
private String fileContent1;
private TransientBlobKey transientBlobKey1;
private String fileContent2;
private TransientBlobKey transientBlobKey2;
@BeforeClass
public static void setup() throws IOException, HandlerRequestException {
final Configuration configuration = new Configuration();
configuration.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());
blobServer = new BlobServer(configuration, new VoidBlobStore());
handlerRequest = new HandlerRequest<>(
EmptyRequestBody.getInstance(),
new TaskManagerMessageParameters(),
Collections.singletonMap(TaskManagerIdPathParameter.KEY, EXPECTED_TASK_MANAGER_ID.getResourceIdString()),
Collections.emptyMap());
}
@Before
public void setupTest() throws IOException {
fileContent1 = UUID.randomUUID().toString();
final File file1 = createFileWithContent(fileContent1);
transientBlobKey1 = storeFileInBlobServer(file1);
fileContent2 = UUID.randomUUID().toString();
final File file2 = createFileWithContent(fileContent2);
transientBlobKey2 = storeFileInBlobServer(file2);
}
@AfterClass
public static void teardown() throws IOException {
if (blobServer != null) {
blobServer.close();
blobServer = null;
}
}
/**
* Tests that the {@link AbstractTaskManagerFileHandler} serves the requested file.
*/
@Test
public void testFileServing() throws Exception {
final Time cacheEntryDuration = Time.milliseconds(1000L);
final Queue<CompletableFuture<TransientBlobKey>> requestFileUploads = new ArrayDeque<>(1);
requestFileUploads.add(CompletableFuture.completedFuture(transientBlobKey1));
final TestTaskManagerFileHandler testTaskManagerFileHandler = createTestTaskManagerFileHandler(cacheEntryDuration, requestFileUploads, EXPECTED_TASK_MANAGER_ID);
final File outputFile = temporaryFolder.newFile();
final TestingChannelHandlerContext testingContext = new TestingChannelHandlerContext(outputFile);
testTaskManagerFileHandler.respondToRequest(
testingContext,
HTTP_REQUEST,
handlerRequest,
null);
assertThat(outputFile.length(), is(greaterThan(0L)));
assertThat(FileUtils.readFileUtf8(outputFile), is(equalTo(fileContent1)));
}
/**
* Tests that files are cached.
*/
@Test
public void testFileCaching() throws Exception {
final File outputFile = runFileCachingTest(
Time.milliseconds(5000L),
Time.milliseconds(0L));
assertThat(outputFile.length(), is(greaterThan(0L)));
assertThat(FileUtils.readFileUtf8(outputFile), is(equalTo(fileContent1)));
}
/**
* Tests that file cache entries expire.
*/
@Test
public void testFileCacheExpiration() throws Exception {
final Time cacheEntryDuration = Time.milliseconds(5L);
final File outputFile = runFileCachingTest(cacheEntryDuration, cacheEntryDuration);
assertThat(outputFile.length(), is(greaterThan(0L)));
assertThat(FileUtils.readFileUtf8(outputFile), is(equalTo(fileContent2)));
}
private File runFileCachingTest(
Time cacheEntryDuration,
Time delayBetweenRequests) throws Exception {
final Queue<CompletableFuture<TransientBlobKey>> requestFileUploads = new ArrayDeque<>(2);
requestFileUploads.add(CompletableFuture.completedFuture(transientBlobKey1));
requestFileUploads.add(CompletableFuture.completedFuture(transientBlobKey2));
final TestTaskManagerFileHandler testTaskManagerFileHandler = createTestTaskManagerFileHandler(
cacheEntryDuration,
requestFileUploads,
EXPECTED_TASK_MANAGER_ID);
final File outputFile = temporaryFolder.newFile();
final TestingChannelHandlerContext testingContext = new TestingChannelHandlerContext(outputFile);
testTaskManagerFileHandler.respondToRequest(
testingContext,
HTTP_REQUEST,
handlerRequest,
null);
Thread.sleep(delayBetweenRequests.toMilliseconds());
// the handler should not trigger the file upload again because it is still cached
testTaskManagerFileHandler.respondToRequest(
testingContext,
HTTP_REQUEST,
handlerRequest,
null);
return outputFile;
}
private AbstractTaskManagerFileHandlerTest.TestTaskManagerFileHandler createTestTaskManagerFileHandler(
Time cacheEntryDuration,
Queue<CompletableFuture<TransientBlobKey>> requestFileUploads,
ResourceID expectedTaskManagerId) {
final ResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway();
return new TestTaskManagerFileHandler(
() -> CompletableFuture.completedFuture(null),
TestingUtils.infiniteTime(),
Collections.emptyMap(),
new TestUntypedMessageHeaders(),
() -> CompletableFuture.completedFuture(resourceManagerGateway),
blobServer,
cacheEntryDuration,
requestFileUploads,
expectedTaskManagerId);
}
private static File createFileWithContent(String fileContent) throws IOException {
final File file = temporaryFolder.newFile();
// write random content into the file
try (FileOutputStream fileOutputStream = new FileOutputStream(file)) {
fileOutputStream.write(fileContent.getBytes("UTF-8"));
}
return file;
}
private static TransientBlobKey storeFileInBlobServer(File fileToStore) throws IOException {
// store the requested file in the BlobServer
try (FileInputStream fileInputStream = new FileInputStream(fileToStore)) {
return blobServer.getTransientBlobService().putTransient(fileInputStream);
}
}
/**
* Class under test.
*/
private static final class TestTaskManagerFileHandler extends AbstractTaskManagerFileHandler<TaskManagerMessageParameters> {
private final Queue<CompletableFuture<TransientBlobKey>> requestFileUploads;
private final ResourceID expectedTaskManagerId;
protected TestTaskManagerFileHandler(@Nonnull GatewayRetriever<? extends RestfulGateway> leaderRetriever, @Nonnull Time timeout, @Nonnull Map<String, String> responseHeaders, @Nonnull UntypedResponseMessageHeaders<EmptyRequestBody, TaskManagerMessageParameters> untypedResponseMessageHeaders, @Nonnull GatewayRetriever<ResourceManagerGateway> resourceManagerGatewayRetriever, @Nonnull TransientBlobService transientBlobService, @Nonnull Time cacheEntryDuration, Queue<CompletableFuture<TransientBlobKey>> requestFileUploads, ResourceID expectedTaskManagerId) {
super(leaderRetriever, timeout, responseHeaders, untypedResponseMessageHeaders, resourceManagerGatewayRetriever, transientBlobService, cacheEntryDuration);
this.requestFileUploads = Preconditions.checkNotNull(requestFileUploads);
this.expectedTaskManagerId = Preconditions.checkNotNull(expectedTaskManagerId);
}
@Override
protected CompletableFuture<TransientBlobKey> requestFileUpload(ResourceManagerGateway resourceManagerGateway, ResourceID taskManagerResourceId) {
assertThat(taskManagerResourceId, is(equalTo(expectedTaskManagerId)));
final CompletableFuture<TransientBlobKey> transientBlobKeyFuture = requestFileUploads.poll();
if (transientBlobKeyFuture != null) {
return transientBlobKeyFuture;
} else {
return FutureUtils.completedExceptionally(new FlinkException("Could not upload file."));
}
}
}
/**
* Testing implementation of {@link ChannelHandlerContext}.
*/
private static final class TestingChannelHandlerContext implements ChannelHandlerContext {
final File outputFile;
private TestingChannelHandlerContext(File outputFile) {
this.outputFile = Preconditions.checkNotNull(outputFile);
}
@Override
public ChannelFuture write(Object msg, ChannelPromise promise) {
if (msg instanceof DefaultFileRegion) {
final DefaultFileRegion defaultFileRegion = (DefaultFileRegion) msg;
try (final FileOutputStream fileOutputStream = new FileOutputStream(outputFile)) {
fileOutputStream.getChannel();
defaultFileRegion.transferTo(fileOutputStream.getChannel(), 0L);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
return new DefaultChannelPromise(new EmbeddedChannel());
}
@Override
public EventExecutor executor() {
return ImmediateEventExecutor.INSTANCE;
}
@Override
public ChannelFuture write(Object msg) {
return write(msg, null);
}
@Override
public ChannelFuture writeAndFlush(Object msg, ChannelPromise promise) {
final ChannelFuture channelFuture = write(msg, promise);
flush();
return channelFuture;
}
@Override
public ChannelFuture writeAndFlush(Object msg) {
return writeAndFlush(msg, null);
}
@Override
public ChannelPipeline pipeline() {
return mock(ChannelPipeline.class);
}
// -----------------------------------------------------
// Automatically generated implementation
// -----------------------------------------------------
@Override
public Channel channel() {
return null;
}
@Override
public String name() {
return null;
}
@Override
public ChannelHandler handler() {
return null;
}
@Override
public boolean isRemoved() {
return false;
}
@Override
public ChannelHandlerContext fireChannelRegistered() {
return null;
}
@Override
public ChannelHandlerContext fireChannelUnregistered() {
return null;
}
@Override
public ChannelHandlerContext fireChannelActive() {
return null;
}
@Override
public ChannelHandlerContext fireChannelInactive() {
return null;
}
@Override
public ChannelHandlerContext fireExceptionCaught(Throwable cause) {
return null;
}
@Override
public ChannelHandlerContext fireUserEventTriggered(Object event) {
return null;
}
@Override
public ChannelHandlerContext fireChannelRead(Object msg) {
return null;
}
@Override
public ChannelHandlerContext fireChannelReadComplete() {
return null;
}
@Override
public ChannelHandlerContext fireChannelWritabilityChanged() {
return null;
}
@Override
public ChannelFuture bind(SocketAddress localAddress) {
return null;
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress) {
return null;
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress) {
return null;
}
@Override
public ChannelFuture disconnect() {
return null;
}
@Override
public ChannelFuture close() {
return null;
}
@Override
public ChannelFuture deregister() {
return null;
}
@Override
public ChannelFuture bind(SocketAddress localAddress, ChannelPromise promise) {
return null;
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, ChannelPromise promise) {
return null;
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) {
return null;
}
@Override
public ChannelFuture disconnect(ChannelPromise promise) {
return null;
}
@Override
public ChannelFuture close(ChannelPromise promise) {
return null;
}
@Override
public ChannelFuture deregister(ChannelPromise promise) {
return null;
}
@Override
public ChannelHandlerContext read() {
return null;
}
@Override
public ChannelHandlerContext flush() {
return null;
}
@Override
public ByteBufAllocator alloc() {
return null;
}
@Override
public ChannelPromise newPromise() {
return null;
}
@Override
public ChannelProgressivePromise newProgressivePromise() {
return null;
}
@Override
public ChannelFuture newSucceededFuture() {
return null;
}
@Override
public ChannelFuture newFailedFuture(Throwable cause) {
return null;
}
@Override
public ChannelPromise voidPromise() {
return null;
}
@Override
public <T> Attribute<T> attr(AttributeKey<T> key) {
return null;
}
@Override
public <T> boolean hasAttr(AttributeKey<T> attributeKey) {
return false;
}
}
/**
* Testing {@link UntypedResponseMessageHeaders}.
*/
private static final class TestUntypedMessageHeaders implements UntypedResponseMessageHeaders<EmptyRequestBody, TaskManagerMessageParameters> {
private static final String URL = "/foobar";
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public TaskManagerMessageParameters getUnresolvedMessageParameters() {
return new TaskManagerMessageParameters();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.GET;
}
@Override
public String getTargetRestEndpointURL() {
return URL;
}
}
}
| |
/*
* Copyright 2011 Corpuslinguistic working group Humboldt University Berlin.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package annis.gui.controlpanel;
import annis.libgui.Helper;
import annis.gui.HistoryPanel;
import annis.gui.QueryController;
import annis.gui.beans.HistoryEntry;
import annis.gui.components.VirtualKeyboard;
import annis.gui.model.Query;
import annis.libgui.InstanceConfig;
import com.sun.jersey.api.client.AsyncWebResource;
import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.vaadin.data.Property.ValueChangeEvent;
import com.vaadin.data.Property.ValueChangeListener;
import com.vaadin.event.FieldEvents.TextChangeEvent;
import com.vaadin.event.FieldEvents.TextChangeListener;
import com.vaadin.event.ShortcutAction.KeyCode;
import com.vaadin.event.ShortcutAction.ModifierKey;
import com.vaadin.server.ClassResource;
import com.vaadin.shared.ui.label.ContentMode;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.*;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.themes.ChameleonTheme;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.slf4j.LoggerFactory;
import org.vaadin.hene.popupbutton.PopupButton;
/**
*
* @author thomas
*/
public class QueryPanel extends GridLayout implements TextChangeListener,
ValueChangeListener
{
private static final org.slf4j.Logger log = LoggerFactory.getLogger(QueryPanel.class);
public static final int MAX_HISTORY_MENU_ITEMS = 5;
// the view name
public static final String NAME = "query";
private TextArea txtQuery;
private Label lblStatus;
private Button btShowResult;
private PopupButton btHistory;
private ListSelect lstHistory;
private QueryController controller;
private ProgressIndicator piCount;
private String lastPublicStatus;
private List<HistoryEntry> history;
private Window historyWindow;
public QueryPanel(final QueryController controller, InstanceConfig instanceConfig)
{
super(2,3);
this.controller = controller;
this.lastPublicStatus = "Ok";
this.history = new LinkedList<HistoryEntry>();
setSpacing(true);
setMargin(true);
addComponent(new Label("AnnisQL:"), 0, 0);
addComponent(new Label("Status:"), 0, 2);
setRowExpandRatio(0, 1.0f);
setColumnExpandRatio(0, 0.2f);
setColumnExpandRatio(1, 0.8f);
txtQuery = new TextArea();
txtQuery.addStyleName("query");
txtQuery.addStyleName("corpus-font-force");
txtQuery.addStyleName("keyboardInput");
txtQuery.setWidth("100%");
txtQuery.setHeight(10f, Unit.EM);
txtQuery.setTextChangeTimeout(1000);
txtQuery.addTextChangeListener((TextChangeListener) this);
addComponent(txtQuery, 1, 0);
final VirtualKeyboard virtualKeyboard;
if(instanceConfig.getKeyboardLayout() == null)
{
virtualKeyboard = null;
}
else
{
virtualKeyboard = new VirtualKeyboard();
virtualKeyboard.setKeyboardLayout(instanceConfig.getKeyboardLayout());
virtualKeyboard.extend(txtQuery);
}
VerticalLayout panelStatusLayout = new VerticalLayout();
panelStatusLayout.setHeight("-1px");
panelStatusLayout.setWidth(100f, Unit.PERCENTAGE);
lblStatus = new Label();
lblStatus.setContentMode(ContentMode.HTML);
lblStatus.setValue(this.lastPublicStatus);
lblStatus.setWidth("100%");
lblStatus.setHeight(3.5f, Unit.EM);
lblStatus.addStyleName("border-layout");
panelStatusLayout.addComponent(lblStatus);
addComponent(panelStatusLayout, 1, 2);
HorizontalLayout buttonLayout = new HorizontalLayout();
buttonLayout.setWidth("100%");
addComponent(buttonLayout, 1, 1);
piCount = new ProgressIndicator();
piCount.setIndeterminate(true);
piCount.setEnabled(false);
piCount.setVisible(false);
piCount.setPollingInterval(60000);
panelStatusLayout.addComponent(piCount);
btShowResult = new Button("Show Result");
btShowResult.setWidth("100%");
btShowResult.addClickListener(new ShowResultClickListener());
btShowResult.setDescription("<strong>Show Result</strong><br />Ctrl + Enter");
btShowResult.setClickShortcut(KeyCode.ENTER, ModifierKey.CTRL);
btShowResult.setDisableOnClick(true);
buttonLayout.addComponent(btShowResult);
VerticalLayout historyListLayout = new VerticalLayout();
historyListLayout.setSizeUndefined();
lstHistory = new ListSelect();
lstHistory.setWidth("200px");
lstHistory.setNullSelectionAllowed(false);
lstHistory.setValue(null);
lstHistory.addValueChangeListener((ValueChangeListener) this);
lstHistory.setImmediate(true);
Button btShowMoreHistory = new Button("Show more details", new Button.ClickListener()
{
@Override
public void buttonClick(ClickEvent event)
{
if(historyWindow == null)
{
historyWindow = new Window("History");
historyWindow.setModal(false);
historyWindow.setWidth("400px");
historyWindow.setHeight("250px");
}
historyWindow.setContent(new HistoryPanel(history, controller));
if(UI.getCurrent().getWindows().contains(historyWindow))
{
historyWindow.bringToFront();
}
else
{
UI.getCurrent().addWindow(historyWindow);
}
}
});
btShowMoreHistory.setWidth("100%");
historyListLayout.addComponent(lstHistory);
historyListLayout.addComponent(btShowMoreHistory);
historyListLayout.setExpandRatio(lstHistory, 1.0f);
historyListLayout.setExpandRatio(btShowMoreHistory, 0.0f);
btHistory = new PopupButton("History");
btHistory.setContent(historyListLayout);
btHistory.setDescription("<strong>Show History</strong><br />"
+ "Either use the short overview (arrow down) or click on the button "
+ "for the extended view.");
buttonLayout.addComponent(btHistory);
if(virtualKeyboard != null)
{
Button btShowKeyboard = new Button();
btShowKeyboard.setDescription("Click to show a virtual keyboard");
btShowKeyboard.addStyleName(ChameleonTheme.BUTTON_ICON_ONLY);
btShowKeyboard.setIcon(new ClassResource(VirtualKeyboard.class, "keyboard.png"));
btShowKeyboard.addClickListener(new ShowKeyboardClickListener(virtualKeyboard));
buttonLayout.addComponent(btShowKeyboard);
}
buttonLayout.setExpandRatio(btShowResult, 1.0f);
}
public void updateShortHistory(List<HistoryEntry> history)
{
this.history = history;
lstHistory.removeAllItems();
int counter = 0;
for(HistoryEntry e : history)
{
if(counter >= MAX_HISTORY_MENU_ITEMS)
{
break;
}
else
{
lstHistory.addItem(e);
}
counter++;
}
}
public void setQuery(String query)
{
if(txtQuery != null)
{
txtQuery.setValue(query);
}
validateQuery(query);
}
public String getQuery()
{
if(txtQuery != null)
{
return (String) txtQuery.getValue();
}
return "";
}
@Override
public void textChange(TextChangeEvent event)
{
validateQuery(event.getText());
}
private void validateQuery(String query)
{
// validate query
try
{
AsyncWebResource annisResource = Helper.getAnnisAsyncWebResource();
Future<String> future = annisResource.path("query").path("check").queryParam("q", query)
.get(String.class);
// wait for maximal one seconds
try
{
String result = future.get(1, TimeUnit.SECONDS);
if ("ok".equalsIgnoreCase(result))
{
lblStatus.setValue(lastPublicStatus);
}
else
{
lblStatus.setValue(result);
}
}
catch (InterruptedException ex)
{
log.warn(null, ex);
}
catch (ExecutionException ex)
{
// ok, there was some serios error
log.error(null, ex);
}
catch (TimeoutException ex)
{
lblStatus.setValue("Validation of query took too long.");
}
}
catch(UniformInterfaceException ex)
{
if(ex.getResponse().getStatus() == 400)
{
lblStatus.setValue(ex.getResponse().getEntity(String.class));
}
else
{
log.error(
"Exception when communicating with service", ex);
Notification.show("Exception when communicating with service: " + ex.getMessage(),
Notification.Type.TRAY_NOTIFICATION);
}
}
catch(ClientHandlerException ex)
{
log.error(
"Could not connect to web service", ex);
Notification.show("Could not connect to web service: " + ex.getMessage(),
Notification.Type.TRAY_NOTIFICATION);
}
}
@Override
public void valueChange(ValueChangeEvent event)
{
btHistory.setPopupVisible(false);
HistoryEntry e = (HistoryEntry) event.getProperty().getValue();
if(controller != null && e != null)
{
controller.setQuery(new Query(e.getQuery(), e.getCorpora()));
}
}
public class ShowResultClickListener implements Button.ClickListener
{
@Override
public void buttonClick(ClickEvent event)
{
if(controller != null)
{
controller.setQuery((txtQuery.getValue()));
controller.executeQuery();
}
}
}
public void setCountIndicatorEnabled(boolean enabled)
{
if(piCount != null && btShowResult != null && lblStatus != null)
{
lblStatus.setVisible(!enabled);
piCount.setVisible(enabled);
piCount.setEnabled(enabled);
btShowResult.setEnabled(!enabled);
}
}
public void setStatus(String status)
{
if(lblStatus != null)
{
lblStatus.setValue(status);
lastPublicStatus = status;
}
}
private static class ShowKeyboardClickListener implements ClickListener
{
private final VirtualKeyboard virtualKeyboard;
public ShowKeyboardClickListener(VirtualKeyboard virtualKeyboard)
{
this.virtualKeyboard = virtualKeyboard;
}
@Override
public void buttonClick(ClickEvent event)
{
virtualKeyboard.show();
}
}
public QueryController getQueryController()
{
return this.controller;
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* AudienceSegmentPage.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202202;
/**
* Represents a page of {@link AudienceSegment} objects.
*/
public class AudienceSegmentPage implements java.io.Serializable , Iterable<com.google.api.ads.admanager.axis.v202202.AudienceSegment>{
/* The size of the total result set to which this page belongs. */
private java.lang.Integer totalResultSetSize;
/* The absolute index in the total result set on which this page
* begins. */
private java.lang.Integer startIndex;
/* The collection of audience segments contained within this page. */
private com.google.api.ads.admanager.axis.v202202.AudienceSegment[] results;
public AudienceSegmentPage() {
}
public AudienceSegmentPage(
java.lang.Integer totalResultSetSize,
java.lang.Integer startIndex,
com.google.api.ads.admanager.axis.v202202.AudienceSegment[] results) {
this.totalResultSetSize = totalResultSetSize;
this.startIndex = startIndex;
this.results = results;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
// Only include length of results to avoid overly verbose output
.add("results.length", getResults() == null ? 0 : getResults().length)
.add("startIndex", getStartIndex())
.add("totalResultSetSize", getTotalResultSetSize())
.toString();
}
/**
* Gets the totalResultSetSize value for this AudienceSegmentPage.
*
* @return totalResultSetSize * The size of the total result set to which this page belongs.
*/
public java.lang.Integer getTotalResultSetSize() {
return totalResultSetSize;
}
/**
* Sets the totalResultSetSize value for this AudienceSegmentPage.
*
* @param totalResultSetSize * The size of the total result set to which this page belongs.
*/
public void setTotalResultSetSize(java.lang.Integer totalResultSetSize) {
this.totalResultSetSize = totalResultSetSize;
}
/**
* Gets the startIndex value for this AudienceSegmentPage.
*
* @return startIndex * The absolute index in the total result set on which this page
* begins.
*/
public java.lang.Integer getStartIndex() {
return startIndex;
}
/**
* Sets the startIndex value for this AudienceSegmentPage.
*
* @param startIndex * The absolute index in the total result set on which this page
* begins.
*/
public void setStartIndex(java.lang.Integer startIndex) {
this.startIndex = startIndex;
}
/**
* Gets the results value for this AudienceSegmentPage.
*
* @return results * The collection of audience segments contained within this page.
*/
public com.google.api.ads.admanager.axis.v202202.AudienceSegment[] getResults() {
return results;
}
/**
* Sets the results value for this AudienceSegmentPage.
*
* @param results * The collection of audience segments contained within this page.
*/
public void setResults(com.google.api.ads.admanager.axis.v202202.AudienceSegment[] results) {
this.results = results;
}
public com.google.api.ads.admanager.axis.v202202.AudienceSegment getResults(int i) {
return this.results[i];
}
public void setResults(int i, com.google.api.ads.admanager.axis.v202202.AudienceSegment _value) {
this.results[i] = _value;
}
/**
* Returns an iterator over this page's {@code results} that:
* <ul>
* <li>Will not be {@code null}.</li>
* <li>Will not support {@link java.util.Iterator#remove()}.</li>
* </ul>
*
* @return a non-null iterator.
*/
@Override
public java.util.Iterator<com.google.api.ads.admanager.axis.v202202.AudienceSegment> iterator() {
if (results == null) {
return java.util.Collections.<com.google.api.ads.admanager.axis.v202202.AudienceSegment>emptyIterator();
}
return java.util.Arrays.<com.google.api.ads.admanager.axis.v202202.AudienceSegment>asList(results).iterator();
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof AudienceSegmentPage)) return false;
AudienceSegmentPage other = (AudienceSegmentPage) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.totalResultSetSize==null && other.getTotalResultSetSize()==null) ||
(this.totalResultSetSize!=null &&
this.totalResultSetSize.equals(other.getTotalResultSetSize()))) &&
((this.startIndex==null && other.getStartIndex()==null) ||
(this.startIndex!=null &&
this.startIndex.equals(other.getStartIndex()))) &&
((this.results==null && other.getResults()==null) ||
(this.results!=null &&
java.util.Arrays.equals(this.results, other.getResults())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getTotalResultSetSize() != null) {
_hashCode += getTotalResultSetSize().hashCode();
}
if (getStartIndex() != null) {
_hashCode += getStartIndex().hashCode();
}
if (getResults() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getResults());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getResults(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(AudienceSegmentPage.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "AudienceSegmentPage"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("totalResultSetSize");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "totalResultSetSize"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("startIndex");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "startIndex"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("results");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "results"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "AudienceSegment"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
/**
* Copyright (c) 2012, Ben Fortuna
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* o Neither the name of Ben Fortuna nor the names of any other contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.fortuna.ical4j.model;
import java.io.IOException;
import java.io.Serializable;
import java.util.Calendar;
import java.util.Date;
import java.util.StringTokenizer;
import net.fortuna.ical4j.util.Dates;
import org.apache.commons.lang3.builder.HashCodeBuilder;
/**
* $Id$
*
* Created on 20/06/2005
*
* Represents a duration of time in iCalendar. Note that according to RFC2445 durations represented in weeks are
* mutually exclusive of other duration fields.
*
* <pre>
* 4.3.6 Duration
*
* Value Name: DURATION
*
* Purpose: This value type is used to identify properties that contain
* a duration of time.
*
* Formal Definition: The value type is defined by the following
* notation:
*
* dur-value = (["+"] / "-") "P" (dur-date / dur-time / dur-week)
*
* dur-date = dur-day [dur-time]
* dur-time = "T" (dur-hour / dur-minute / dur-second)
* dur-week = 1*DIGIT "W"
* dur-hour = 1*DIGIT "H" [dur-minute]
* dur-minute = 1*DIGIT "M" [dur-second]
* dur-second = 1*DIGIT "S"
* dur-day = 1*DIGIT "D"
* </pre>
*
* @author Ben Fortuna
*/
public class Dur implements Comparable<Dur>, Serializable {
private static final long serialVersionUID = 5013232281547134583L;
private static final int DAYS_PER_WEEK = 7;
private static final int SECONDS_PER_MINUTE = 60;
private static final int MINUTES_PER_HOUR = 60;
private static final int HOURS_PER_DAY = 24;
private static final int DAYS_PER_YEAR = 365;
private boolean negative;
private int weeks;
private int days;
private int hours;
private int minutes;
private int seconds;
/**
* Constructs a new duration instance from a string representation.
* @param value a string representation of a duration
*/
public Dur(final String value) {
negative = false;
weeks = 0;
days = 0;
hours = 0;
minutes = 0;
seconds = 0;
String token = null;
String prevToken = null;
final StringTokenizer t = new StringTokenizer(value, "+-PWDTHMS", true);
while (t.hasMoreTokens()) {
prevToken = token;
token = t.nextToken();
if ("+".equals(token)) {
negative = false;
}
else if ("-".equals(token)) {
negative = true;
}
else if ("P".equals(token)) {
// does nothing..
}
else if ("W".equals(token)) {
weeks = Integer.parseInt(prevToken);
}
else if ("D".equals(token)) {
days = Integer.parseInt(prevToken);
}
else if ("T".equals(token)) {
// does nothing..
}
else if ("H".equals(token)) {
hours = Integer.parseInt(prevToken);
}
else if ("M".equals(token)) {
minutes = Integer.parseInt(prevToken);
}
else if ("S".equals(token)) {
seconds = Integer.parseInt(prevToken);
}
}
}
/**
* Constructs a new duration from the specified weeks.
* @param weeks a duration in weeks.
*/
public Dur(final int weeks) {
this.weeks = Math.abs(weeks);
this.days = 0;
this.hours = 0;
this.minutes = 0;
this.seconds = 0;
this.negative = weeks < 0;
}
/**
* Constructs a new duration from the specified arguments.
* @param days duration in days
* @param hours duration in hours
* @param minutes duration in minutes
* @param seconds duration in seconds
*/
public Dur(final int days, final int hours, final int minutes,
final int seconds) {
if (!(days >= 0 && hours >= 0 && minutes >= 0 && seconds >= 0)
&& !(days <= 0 && hours <= 0 && minutes <= 0 && seconds <= 0)) {
throw new IllegalArgumentException("Invalid duration representation");
}
this.weeks = 0;
this.days = Math.abs(days);
this.hours = Math.abs(hours);
this.minutes = Math.abs(minutes);
this.seconds = Math.abs(seconds);
this.negative = days < 0 || hours < 0 || minutes < 0 || seconds < 0;
}
/**
* Constructs a new duration representing the time between the two specified dates. The end date may precede the
* start date in order to represent a negative duration.
* @param date1 the first date of the duration
* @param date2 the second date of the duration
*/
public Dur(final Date date1, final Date date2) {
Date start = null;
Date end = null;
// Negative range? (start occurs after end)
negative = date1.compareTo(date2) > 0;
if (negative) {
// Swap the dates (which eliminates the need to bother with
// negative after this!)
start = date2;
end = date1;
}
else {
start = date1;
end = date2;
}
final Calendar startCal;
if (start instanceof net.fortuna.ical4j.model.Date) {
startCal = Dates.getCalendarInstance((net.fortuna.ical4j.model.Date)start);
} else {
startCal = Calendar.getInstance();
}
startCal.setTime(start);
final Calendar endCal = Calendar.getInstance(startCal.getTimeZone());
endCal.setTime(end);
// Init our duration interval (which is in units that evolve as we
// compute, below)
int dur = 0;
// Count days to get to the right year (loop in the very rare chance
// that a leap year causes us to come up short)
int nYears = endCal.get(Calendar.YEAR) - startCal.get(Calendar.YEAR);
while (nYears > 0) {
startCal.add(Calendar.DATE, DAYS_PER_YEAR * nYears);
dur += DAYS_PER_YEAR * nYears;
nYears = endCal.get(Calendar.YEAR) - startCal.get(Calendar.YEAR);
}
// Count days to get to the right day
dur += endCal.get(Calendar.DAY_OF_YEAR)
- startCal.get(Calendar.DAY_OF_YEAR);
// Count hours to get to right hour
dur *= HOURS_PER_DAY; // days -> hours
dur += endCal.get(Calendar.HOUR_OF_DAY)
- startCal.get(Calendar.HOUR_OF_DAY);
// ... to the right minute
dur *= MINUTES_PER_HOUR; // hours -> minutes
dur += endCal.get(Calendar.MINUTE) - startCal.get(Calendar.MINUTE);
// ... and second
dur *= SECONDS_PER_MINUTE; // minutes -> seconds
dur += endCal.get(Calendar.SECOND) - startCal.get(Calendar.SECOND);
// Now unwind our units
seconds = dur % SECONDS_PER_MINUTE;
dur = dur / SECONDS_PER_MINUTE; // seconds -> minutes (drop remainder seconds)
minutes = dur % MINUTES_PER_HOUR;
dur /= MINUTES_PER_HOUR; // minutes -> hours (drop remainder minutes)
hours = dur % HOURS_PER_DAY;
dur /= HOURS_PER_DAY; // hours -> days (drop remainder hours)
days = dur;
weeks = 0;
// Special case for week-only representation
if (seconds == 0 && minutes == 0 && hours == 0
&& (days % DAYS_PER_WEEK) == 0) {
weeks = days / DAYS_PER_WEEK;
days = 0;
}
}
/**
* Returns a date representing the end of this duration from the specified start date.
* @param start the date to start the duration
* @return the end of the duration as a date
*/
public final Date getTime(final Date start) {
final Calendar cal;
if (start instanceof net.fortuna.ical4j.model.Date) {
cal = Dates.getCalendarInstance((net.fortuna.ical4j.model.Date)start);
} else {
cal = Calendar.getInstance();
}
cal.setTime(start);
if (isNegative()) {
cal.add(Calendar.WEEK_OF_YEAR, -weeks);
cal.add(Calendar.DAY_OF_WEEK, -days);
cal.add(Calendar.HOUR_OF_DAY, -hours);
cal.add(Calendar.MINUTE, -minutes);
cal.add(Calendar.SECOND, -seconds);
}
else {
cal.add(Calendar.WEEK_OF_YEAR, weeks);
cal.add(Calendar.DAY_OF_WEEK, days);
cal.add(Calendar.HOUR_OF_DAY, hours);
cal.add(Calendar.MINUTE, minutes);
cal.add(Calendar.SECOND, seconds);
}
return cal.getTime();
}
/**
* Provides a negation of this instance.
* @return a Dur instance that represents a negation of this instance
*/
public final Dur negate() {
final Dur negated = new Dur(days, hours, minutes, seconds);
negated.weeks = weeks;
negated.negative = !negative;
return negated;
}
/**
* Add two durations. Durations may only be added if they are both positive
* or both negative durations.
* @param duration the duration to add to this duration
* @return a new instance representing the sum of the two durations.
*/
public final Dur add(final Dur duration) {
if ((!isNegative() && duration.isNegative())
|| (isNegative() && !duration.isNegative())) {
throw new IllegalArgumentException(
"Cannot add a negative and a positive duration");
}
Dur sum = null;
if (weeks > 0 && duration.weeks > 0) {
sum = new Dur(weeks + duration.weeks);
}
else {
int daySum = (weeks > 0) ? weeks * DAYS_PER_WEEK + days : days;
int hourSum = hours;
int minuteSum = minutes;
int secondSum = seconds;
if ((secondSum + duration.seconds) / SECONDS_PER_MINUTE > 0) {
minuteSum += (secondSum + duration.seconds) / SECONDS_PER_MINUTE;
secondSum = (secondSum + duration.seconds) % SECONDS_PER_MINUTE;
}
else {
secondSum += duration.seconds;
}
if ((minuteSum + duration.minutes) / MINUTES_PER_HOUR > 0) {
hourSum += (minuteSum + duration.minutes) / MINUTES_PER_HOUR;
minuteSum = (minuteSum + duration.minutes) % MINUTES_PER_HOUR;
}
else {
minuteSum += duration.minutes;
}
if ((hourSum + duration.hours) / HOURS_PER_DAY > 0) {
daySum += (hourSum + duration.hours) / HOURS_PER_DAY;
hourSum = (hourSum + duration.hours) % HOURS_PER_DAY;
}
else {
hourSum += duration.hours;
}
daySum += (duration.weeks > 0) ? duration.weeks * DAYS_PER_WEEK
+ duration.days : duration.days;
sum = new Dur(daySum, hourSum, minuteSum, secondSum);
}
sum.negative = negative;
return sum;
}
/**
* {@inheritDoc}
*/
public final String toString() {
final StringBuffer b = new StringBuffer();
if (negative) {
b.append('-');
}
b.append('P');
if (weeks > 0) {
b.append(weeks);
b.append('W');
}
else {
if (days > 0) {
b.append(days);
b.append('D');
}
if (hours > 0 || minutes > 0 || seconds > 0) {
b.append('T');
if (hours > 0) {
b.append(hours);
b.append('H');
}
if (minutes > 0) {
b.append(minutes);
b.append('M');
}
if (seconds > 0) {
b.append(seconds);
b.append('S');
}
}
// handle case of zero length duration
if ((hours + minutes + seconds + days + weeks) == 0) {
b.append("T0S");
}
}
return b.toString();
}
/**
* Compares this duration with another, acording to their length.
* @param arg0 another duration instance
* @return a postive value if this duration is longer, zero if the duration
* lengths are equal, otherwise a negative value
*/
public final int compareTo(final Dur arg0) {
int result;
if (isNegative() != arg0.isNegative()) {
// return Boolean.valueOf(isNegative()).compareTo(Boolean.valueOf(arg0.isNegative()));
// for pre-java 1.5 compatibility..
if (isNegative()) {
return Integer.MIN_VALUE;
}
else {
return Integer.MAX_VALUE;
}
}
else if (getWeeks() != arg0.getWeeks()) {
result = getWeeks() - arg0.getWeeks();
}
else if (getDays() != arg0.getDays()) {
result = getDays() - arg0.getDays();
}
else if (getHours() != arg0.getHours()) {
result = getHours() - arg0.getHours();
}
else if (getMinutes() != arg0.getMinutes()) {
result = getMinutes() - arg0.getMinutes();
}
else {
result = getSeconds() - arg0.getSeconds();
}
// invert sense of all tests if both durations are negative
if (isNegative()) {
return -result;
}
else {
return result;
}
}
/**
* {@inheritDoc}
*/
public boolean equals(final Object obj) {
if (obj instanceof Dur) {
return ((Dur) obj).compareTo(this) == 0;
}
return super.equals(obj);
}
/**
* {@inheritDoc}
*/
public int hashCode() {
return new HashCodeBuilder().append(weeks).append(days).append(
hours).append(minutes).append(seconds).append(negative).toHashCode();
}
/**
* @return Returns the days.
*/
public final int getDays() {
return days;
}
/**
* @return Returns the hours.
*/
public final int getHours() {
return hours;
}
/**
* @return Returns the minutes.
*/
public final int getMinutes() {
return minutes;
}
/**
* @return Returns the negative.
*/
public final boolean isNegative() {
return negative;
}
/**
* @return Returns the seconds.
*/
public final int getSeconds() {
return seconds;
}
/**
* @return Returns the weeks.
*/
public final int getWeeks() {
return weeks;
}
/**
* @param stream
* @throws IOException
* @throws ClassNotFoundException
*/
private void readObject(final java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.state.changelog;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.core.fs.CloseableRegistry;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.query.TaskKvStateRegistry;
import org.apache.flink.runtime.state.AbstractKeyedStateBackend;
import org.apache.flink.runtime.state.CheckpointableKeyedStateBackend;
import org.apache.flink.runtime.state.ConfigurableStateBackend;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.runtime.state.KeyedStateHandle;
import org.apache.flink.runtime.state.OperatorStateBackend;
import org.apache.flink.runtime.state.OperatorStateHandle;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.changelog.ChangelogStateBackendHandle;
import org.apache.flink.runtime.state.changelog.ChangelogStateBackendHandle.ChangelogStateBackendHandleImpl;
import org.apache.flink.runtime.state.changelog.StateChangelogStorage;
import org.apache.flink.runtime.state.delegate.DelegatingStateBackend;
import org.apache.flink.runtime.state.ttl.TtlTimeProvider;
import org.apache.flink.runtime.taskmanager.AsynchronousException;
import org.apache.flink.state.changelog.restore.ChangelogBackendRestoreOperation;
import org.apache.flink.state.changelog.restore.ChangelogBackendRestoreOperation.BaseBackendBuilder;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import java.util.Collection;
import java.util.Objects;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* This state backend holds the working state in the underlying delegatedStateBackend, and forwards
* state changes to State Changelog.
*/
@Internal
public class ChangelogStateBackend implements DelegatingStateBackend, ConfigurableStateBackend {
private static final long serialVersionUID = 1000L;
private static final Logger LOG = LoggerFactory.getLogger(ChangelogStateBackend.class);
private final StateBackend delegatedStateBackend;
/**
* Delegate a state backend by a ChangelogStateBackend.
*
* <p>As FLINK-22678 mentioned, we currently hide this constructor from user.
*
* @param stateBackend the delegated state backend.
*/
ChangelogStateBackend(StateBackend stateBackend) {
this.delegatedStateBackend = Preconditions.checkNotNull(stateBackend);
Preconditions.checkArgument(
!(stateBackend instanceof DelegatingStateBackend),
"Recursive Delegation is not supported.");
LOG.info(
"ChangelogStateBackend is used, delegating {}.",
delegatedStateBackend.getClass().getSimpleName());
}
@Override
public <K> ChangelogKeyedStateBackend<K> createKeyedStateBackend(
Environment env,
JobID jobID,
String operatorIdentifier,
TypeSerializer<K> keySerializer,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
TaskKvStateRegistry kvStateRegistry,
TtlTimeProvider ttlTimeProvider,
MetricGroup metricGroup,
@Nonnull Collection<KeyedStateHandle> stateHandles,
CloseableRegistry cancelStreamRegistry)
throws Exception {
return restore(
env,
operatorIdentifier,
keyGroupRange,
ttlTimeProvider,
stateHandles,
baseHandles ->
(AbstractKeyedStateBackend<K>)
delegatedStateBackend.createKeyedStateBackend(
env,
jobID,
operatorIdentifier,
keySerializer,
numberOfKeyGroups,
keyGroupRange,
kvStateRegistry,
ttlTimeProvider,
metricGroup,
baseHandles,
cancelStreamRegistry));
}
@Override
public <K> CheckpointableKeyedStateBackend<K> createKeyedStateBackend(
Environment env,
JobID jobID,
String operatorIdentifier,
TypeSerializer<K> keySerializer,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
TaskKvStateRegistry kvStateRegistry,
TtlTimeProvider ttlTimeProvider,
MetricGroup metricGroup,
@Nonnull Collection<KeyedStateHandle> stateHandles,
CloseableRegistry cancelStreamRegistry,
double managedMemoryFraction)
throws Exception {
return restore(
env,
operatorIdentifier,
keyGroupRange,
ttlTimeProvider,
stateHandles,
baseHandles ->
(AbstractKeyedStateBackend<K>)
delegatedStateBackend.createKeyedStateBackend(
env,
jobID,
operatorIdentifier,
keySerializer,
numberOfKeyGroups,
keyGroupRange,
kvStateRegistry,
ttlTimeProvider,
metricGroup,
baseHandles,
cancelStreamRegistry,
managedMemoryFraction));
}
@Override
public OperatorStateBackend createOperatorStateBackend(
Environment env,
String operatorIdentifier,
@Nonnull Collection<OperatorStateHandle> stateHandles,
CloseableRegistry cancelStreamRegistry)
throws Exception {
return delegatedStateBackend.createOperatorStateBackend(
env, operatorIdentifier, stateHandles, cancelStreamRegistry);
}
@Override
public boolean useManagedMemory() {
return delegatedStateBackend.useManagedMemory();
}
@Override
public StateBackend getDelegatedStateBackend() {
return delegatedStateBackend;
}
@Override
public StateBackend configure(ReadableConfig config, ClassLoader classLoader)
throws IllegalConfigurationException {
if (delegatedStateBackend instanceof ConfigurableStateBackend) {
return new ChangelogStateBackend(
((ConfigurableStateBackend) delegatedStateBackend)
.configure(config, classLoader));
}
return this;
}
@SuppressWarnings({"unchecked", "rawtypes"})
private <K> ChangelogKeyedStateBackend<K> restore(
Environment env,
String operatorIdentifier,
KeyGroupRange keyGroupRange,
TtlTimeProvider ttlTimeProvider,
Collection<KeyedStateHandle> stateHandles,
BaseBackendBuilder<K> baseBackendBuilder)
throws Exception {
StateChangelogStorage<?> changelogStorage =
Preconditions.checkNotNull(
env.getTaskStateManager().getStateChangelogStorage(),
"Changelog storage is null when creating and restoring"
+ " the ChangelogKeyedStateBackend.");
String subtaskName = env.getTaskInfo().getTaskNameWithSubtasks();
ExecutionConfig executionConfig = env.getExecutionConfig();
ChangelogKeyedStateBackend<K> keyedStateBackend =
ChangelogBackendRestoreOperation.restore(
changelogStorage.createReader(),
env.getUserCodeClassLoader().asClassLoader(),
castHandles(stateHandles),
baseBackendBuilder,
(baseBackend, baseState) ->
new ChangelogKeyedStateBackend(
baseBackend,
subtaskName,
executionConfig,
ttlTimeProvider,
changelogStorage.createWriter(
operatorIdentifier, keyGroupRange),
baseState,
env.getCheckpointStorageAccess()));
PeriodicMaterializationManager periodicMaterializationManager =
new PeriodicMaterializationManager(
checkNotNull(env.getMainMailboxExecutor()),
checkNotNull(env.getAsyncOperationsThreadPool()),
subtaskName,
(message, exception) ->
env.failExternally(new AsynchronousException(message, exception)),
keyedStateBackend,
executionConfig.getPeriodicMaterializeIntervalMillis(),
executionConfig.getMaterializationMaxAllowedFailures());
// keyedStateBackend is responsible to close periodicMaterializationManager
// This indicates periodicMaterializationManager binds to the keyedStateBackend
// However PeriodicMaterializationManager can not be part of keyedStateBackend
// because of cyclic reference
keyedStateBackend.registerCloseable(periodicMaterializationManager);
periodicMaterializationManager.start();
return keyedStateBackend;
}
private Collection<ChangelogStateBackendHandle> castHandles(
Collection<KeyedStateHandle> stateHandles) {
if (stateHandles.stream().anyMatch(h -> !(h instanceof ChangelogStateBackendHandle))) {
LOG.warn(
"Some state handles do not contain changelog: {} (ok if recovery from a savepoint)",
stateHandles);
}
return stateHandles.stream()
.filter(Objects::nonNull)
.map(
keyedStateHandle ->
keyedStateHandle instanceof ChangelogStateBackendHandle
? (ChangelogStateBackendHandle) keyedStateHandle
: new ChangelogStateBackendHandleImpl(
singletonList(keyedStateHandle),
emptyList(),
keyedStateHandle.getKeyGroupRange()))
.collect(Collectors.toList());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.