gt stringclasses 1 value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.containers;
import com.intellij.concurrency.ConcurrentCollectionFactory;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.testFramework.LeakHunter;
import com.intellij.testFramework.RunFirst;
import com.intellij.testFramework.TestLoggerFactory;
import com.intellij.testFramework.UsefulTestCase;
import com.intellij.util.ref.GCUtil;
import com.intellij.util.ref.GCWatcher;
import gnu.trove.TObjectHashingStrategy;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
// tests various ContainerUtil.create* and ContainerUtil.new* collections for being really weak/soft/concurrent
@RunFirst
public class ContainerUtilCollectionsTest extends Assert {
@Rule
public TestRule watcher = TestLoggerFactory.createTestWatcher();
private static final long TIMEOUT = 5 * 60 * 1000; // 5 minutes
private static final TObjectHashingStrategy<String> IGNORE_CASE_WITH_CRAZY_HASH_STRATEGY = new TObjectHashingStrategy<String>() {
@Override
public int computeHashCode(String object) {
return Character.toLowerCase(object.charAt(object.length() - 1));
}
@Override
public boolean equals(String o1, String o2) {
return StringUtil.equalsIgnoreCase(o1, o2);
}
};
@Test(timeout = TIMEOUT)
public void testConcurrentWeakMapTossedEvenWithIdentityStrategy() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentWeakMap(ContainerUtil.identityStrategy());
checkKeyTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testConcurrentSoftMapTossedEvenWithIdentityStrategy() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentSoftMap(10, 0.5f, 8, ContainerUtil.identityStrategy());
checkKeyTossedEventually(map);
}
private void checkKeyTossedEventually(Map<Object, Object> map) {
checkClearsEventuallyAfterGCPressure(map, ()->map.put(new Object(), new Object()));
checkClearsEventuallyAfterGCPressure(map, ()->map.put(new Object(), this));
}
private void checkKeyTossedEventually(ObjectIntMap<Object> map) {
checkClearsEventuallyAfterGCPressure(map, ()->map.put(new Object(), 0));
}
private void checkValueTossedEventually(IntObjectMap<Object> map) {
checkClearsEventuallyAfterGCPressure(map, ()->map.put(0, new Object()));
}
private void checkValueTossedEventually(Map<Object, Object> map) {
checkClearsEventuallyAfterGCPressure(map, ()->map.put(new Object(), new Object()));
checkClearsEventuallyAfterGCPressure(map, ()->map.put(this, new Object()));
}
@Test(timeout = TIMEOUT)
public void testConcurrentWeakKeyWeakValueTossedEvenWithIdentityStrategy() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentWeakKeyWeakValueMap(ContainerUtil.identityStrategy());
checkKeyTossedEventually(map);
checkValueTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testConcurrentSoftKeySoftValueTossedEvenWithIdentityStrategy() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentSoftKeySoftValueMap(10, 0.5f, 8, ContainerUtil.identityStrategy());
checkKeyTossedEventually(map);
checkValueTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testConcurrentWeakKeySoftValueTossedEvenWithIdentityStrategy() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentWeakKeySoftValueMap(10, 0.5f, 8, ContainerUtil.identityStrategy());
checkKeyTossedEventually(map);
checkValueTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testWeakMapTossedEvenWithIdentityStrategy() {
Map<Object, Object> map = ContainerUtil.createWeakMap(10,0.5f,ContainerUtil.identityStrategy());
checkKeyTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testSoftMapTossedEvenWithIdentityStrategy() {
Map<Object, Object> map = ContainerUtil.createSoftMap(ContainerUtil.identityStrategy());
checkKeyTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testRemoveFromSoftEntrySet() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentSoftMap();
map.put(this, this);
Set<Map.Entry<Object, Object>> entries = map.entrySet();
assertEquals(1, entries.size());
Map.Entry<Object, Object> entry = entries.iterator().next();
entries.remove(entry);
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testRemoveFromWeakEntrySet() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentWeakMap();
map.put(this, this);
Set<Map.Entry<Object, Object>> entries = map.entrySet();
assertEquals(1, entries.size());
Map.Entry<Object, Object> entry = entries.iterator().next();
entries.remove(entry);
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testConcurrentWeakMapTossed() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentWeakMap();
checkKeyTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testConcurrentWeakMapDoesntRetainOldValueKeyAfterPutWithTheSameKeyButDifferentValue() {
checkMapDoesntLeakOldValueAfterPutWithTheSameKeyButDifferentValue(ContainerUtil.createConcurrentWeakMap());
}
@Test(timeout = TIMEOUT)
public void testConcurrentSoftMapDoesntRetainOldValueKeyAfterPutWithTheSameKeyButDifferentValue() {
checkMapDoesntLeakOldValueAfterPutWithTheSameKeyButDifferentValue(ContainerUtil.createConcurrentSoftMap());
}
@Test(timeout = TIMEOUT)
public void testConcurrentWKWVMapDoesntRetainOldValueKeyAfterPutWithTheSameKeyButDifferentValue() {
checkMapDoesntLeakOldValueAfterPutWithTheSameKeyButDifferentValue(ContainerUtil.createConcurrentWeakKeyWeakValueMap());
}
@Test(timeout = TIMEOUT)
public void testConcurrentWKSVMapDoesntRetainOldValueKeyAfterPutWithTheSameKeyButDifferentValue() {
checkMapDoesntLeakOldValueAfterPutWithTheSameKeyButDifferentValue(ContainerUtil.createConcurrentWeakKeySoftValueMap());
}
@Test(timeout = TIMEOUT)
public void testConcurrentSKSVMapDoesntRetainOldValueKeyAfterPutWithTheSameKeyButDifferentValue() {
checkMapDoesntLeakOldValueAfterPutWithTheSameKeyButDifferentValue(ContainerUtil.createConcurrentSoftKeySoftValueMap(1,1,1,ContainerUtil.canonicalStrategy()));
}
private void checkMapDoesntLeakOldValueAfterPutWithTheSameKeyButDifferentValue(Map<Object, Object> map) {
Object key = new Object();
class MyValue {}
map.put(key, strong = new MyValue());
map.put(key, this);
strong = null;
LeakHunter.checkLeak(map, MyValue.class);
}
@Test(timeout = TIMEOUT)
public void testConcurrentSoftMapTossed() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentSoftMap();
checkKeyTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testConcurrentWeakValueMapTossed() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentWeakValueMap();
checkValueTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testConcurrentSoftValueMapTossed() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentSoftValueMap();
checkValueTossedEventually(map);
}
private void checkClearsEventuallyAfterGCPressure(Map<Object, Object> map, @NotNull Runnable putKey) {
assertTrue(map.isEmpty());
assertEquals(0, map.size());
putKey.run();
Object strong = new Object();
//noinspection SizeReplaceableByIsEmpty
do {
map.put(strong, strong); // to run processQueues();
assertFalse(map.isEmpty());
map.remove(strong);
assertNull(map.get(strong));
GCUtil.tryGcSoftlyReachableObjects();
}
while (map.size() != 0);
assertTrue(map.isEmpty());
assertEquals(0, map.size());
map.put(this, this);
assertEquals(1, map.size());
map.clear();
assertEquals(0, map.size());
assertNull(map.get(strong));
}
private static final int RANDOM_INT = 987654321;
private void checkClearsEventuallyAfterGCPressure(ObjectIntMap<Object> map, @NotNull Runnable put) {
assertTrue(map.isEmpty());
assertEquals(0, map.size());
put.run();
strong = new Object();
//noinspection SizeReplaceableByIsEmpty
do {
map.put(strong, RANDOM_INT); // to run processQueues();
assertFalse(map.isEmpty());
map.remove(strong);
assertEquals(0, map.get(strong));
GCUtil.tryGcSoftlyReachableObjects();
}
while (map.size() != 0);
assertTrue(map.isEmpty());
assertEquals(0, map.size());
map.put(this, RANDOM_INT);
assertEquals(1, map.size());
map.clear();
assertEquals(0, map.size());
assertEquals(0, map.get(strong));
}
private void checkClearsEventuallyAfterGCPressure(IntObjectMap<Object> map, @NotNull Runnable put) {
assertTrue(map.isEmpty());
assertEquals(0, map.size());
put.run();
strong = new Object();
//noinspection SizeReplaceableByIsEmpty
do {
map.put(RANDOM_INT, strong); // to run processQueues();
assertFalse(map.isEmpty());
map.remove(RANDOM_INT);
assertNull(map.get(RANDOM_INT));
GCUtil.tryGcSoftlyReachableObjects();
}
while (map.size() != 0);
assertTrue(map.isEmpty());
assertEquals(0, map.size());
map.put(RANDOM_INT, this);
assertEquals(1, map.size());
map.clear();
assertEquals(0, map.size());
assertNull(map.get(RANDOM_INT));
}
@Test(timeout = TIMEOUT)
public void testSoftMapCustomStrategy() {
Map<String, String> map = ContainerUtil.createSoftMap(IGNORE_CASE_WITH_CRAZY_HASH_STRATEGY);
map.put("ab", "ab");
assertEquals("ab", map.get("AB"));
String removed = map.remove("aB");
assertEquals("ab", removed);
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testWeakMapCustomStrategy() {
Map<String, String> map = ContainerUtil.createWeakMap(10, 0.5f, IGNORE_CASE_WITH_CRAZY_HASH_STRATEGY);
String keyL = "ab";
String keyU = StringUtil.toUpperCase(keyL);
String value = "asdfab";
map.put(keyL, value);
assertSame(value, map.get(keyU));
assertSame(value, map.get(keyL));
String removed = map.remove("aB");
assertSame(value, removed);
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testWeakNativeHashCodeDoesNotGetCalledWhenCustomStrategyIsSpecified() {
Map<Object, Object> map = ContainerUtil.createWeakMap(10,0.5f,ContainerUtil.identityStrategy());
checkHashCodeDoesntCalledFor(map);
}
@Test(timeout = TIMEOUT)
public void testSoftNativeHashCodeDoesNotGetCalledWhenCustomStrategyIsSpecified() {
Map<Object, Object> map = ContainerUtil.createSoftMap(ContainerUtil.identityStrategy());
checkHashCodeDoesntCalledFor(map);
}
private void checkHashCodeDoesntCalledFor(Map<Object, Object> map) {
Object key = new Object(){
@Override
public int hashCode() {
fail("must not be called");
return super.hashCode();
}
};
map.put(key, "ab");
assertSame("ab", map.get(key));
map.remove(key);
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testConcurrentSoftCustomStrategy() {
ConcurrentMap<String, String> map = ContainerUtil.createConcurrentSoftMap(10, 0.7f, 16, IGNORE_CASE_WITH_CRAZY_HASH_STRATEGY);
map.put("ab", "ab");
assertEquals(1, map.size());
assertSame("ab",map.get("AB"));
String removed = map.remove("aB");
assertEquals("ab", removed);
assertTrue(map.isEmpty());
}
@Test
public void testConcurrentSoftNullKey() {
Map<String, String> map = ContainerUtil.createConcurrentSoftMap();
tryToInsertNullKeys(map);
}
@Test
public void testConcurrentWeakNullKey() {
Map<String, String> map = ContainerUtil.createConcurrentWeakMap();
tryToInsertNullKeys(map);
}
@Test(expected = IllegalArgumentException.class)
public void testConcurrentWeakSoftNullKey() {
Map<String, String> map = ContainerUtil.createConcurrentWeakKeySoftValueMap(1, 1, 1, IGNORE_CASE_WITH_CRAZY_HASH_STRATEGY);
tryToInsertNullKeys(map);
}
@Test(expected = IllegalArgumentException.class)
public void testConcurrentWeakWeakNullKey() {
Map<String, String> map = ContainerUtil.createConcurrentWeakKeyWeakValueMap(IGNORE_CASE_WITH_CRAZY_HASH_STRATEGY);
tryToInsertNullKeys(map);
}
private static void tryToInsertNullKeys(Map<String, String> map) {
map.put(null, "ab");
assertEquals(1, map.size());
assertEquals("ab", map.get(null));
String removed = map.remove(null);
assertEquals("ab", removed);
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testConcurrentWeakSoftCustomStrategy() {
ConcurrentMap<String, String> map = ContainerUtil.createConcurrentWeakKeySoftValueMap(1, 1, 1, IGNORE_CASE_WITH_CRAZY_HASH_STRATEGY);
map.put("ab", "ab");
assertEquals(1, map.size());
assertSame("ab", map.get("AB"));
String removed = map.remove("aB");
assertEquals("ab", removed);
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testConcurrentLongObjectHashMap() {
ConcurrentLongObjectMap<Object> map = ContainerUtil.createConcurrentLongObjectMap();
for (int i = 0; i < 1000; i++) {
Object prev = map.put(i, i);
assertNull(prev);
Object ret = map.get(i);
assertTrue(ret instanceof Integer);
assertEquals(i, ret);
if (i != 0) {
Object remove = map.remove(i - 1);
assertTrue(remove instanceof Integer);
assertEquals(i - 1, remove);
}
assertEquals(1, map.size());
}
map.clear();
assertEquals(0, map.size());
assertTrue(map.isEmpty());
}
@Test(timeout = TIMEOUT)
public void testConcurrentIntObjectHashMap() {
IntObjectMap<Object> map = ContainerUtil.createConcurrentIntObjectMap();
for (int i = 0; i < 1000; i++) {
Object prev = map.put(i, i);
assertNull(prev);
Object ret = map.get(i);
assertTrue(ret instanceof Integer);
assertEquals(i, ret);
if (i != 0) {
Object remove = map.remove(i - 1);
assertTrue(remove instanceof Integer);
assertEquals(i - 1, remove);
}
assertEquals(1, map.size());
}
map.clear();
assertEquals(0, map.size());
}
@Test(timeout = TIMEOUT)
public void testConcurrentWeakKeyWeakValueMapTossed() {
ConcurrentMap<Object, Object> map = ContainerUtil.createConcurrentWeakKeyWeakValueMap();
checkKeyTossedEventually(map);
checkValueTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testSoftKeySoftValueMapTossed() {
Map<Object, Object> map = ContainerUtil.createSoftKeySoftValueMap();
checkKeyTossedEventually(map);
checkValueTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testWeakKeySoftValueMapTossed() {
Map<Object, Object> map = ContainerUtil.createWeakKeySoftValueMap();
checkKeyTossedEventually(map);
checkValueTossedEventually(map);
}
private volatile Object strong;
@Test
public void testConcurrentWeakValueSize() {
Map<String, Object> map = ContainerUtil.createConcurrentWeakValueMap();
Ref<Object> ref1 = Ref.create(new Object());
Ref<Object> ref2 = Ref.create(new Object());
map.put("a", ref1.get());
map.put("b", ref2.get());
GCWatcher.fromClearedRef(ref2).tryGc();
assertEquals(1, map.size());
GCWatcher.fromClearedRef(ref1).tryGc();
assertTrue(map.toString(), map.isEmpty());
}
@Test
public void testConcurrentWeakValuePutIfAbsentMustActuallyPutNewValueIfTheOldWasGced() {
Map<String, Object> map = ContainerUtil.createConcurrentWeakValueMap();
checkPutIfAbsent(map);
}
@Test
public void testConcurrentSoftValuePutIfAbsentMustActuallyPutNewValueIfTheOldWasGced() {
Map<String, Object> map = ContainerUtil.createConcurrentSoftValueMap();
checkPutIfAbsent(map);
}
@Test
public void testConcurrentIntKeyWeakValuePutIfAbsentMustActuallyPutNewValueIfTheOldWasGced() {
ConcurrentIntObjectMap<Object> map = ContainerUtil.createConcurrentIntObjectWeakValueMap();
checkPutIfAbsent(map);
}
@Test
public void testConcurrentIntKeySoftValuePutIfAbsentMustActuallyPutNewValueIfTheOldWasGced() {
ConcurrentIntObjectMap<Object> map = ContainerUtil.createConcurrentIntObjectSoftValueMap();
checkPutIfAbsent(map);
}
private static void checkPutIfAbsent(Map<String, Object> map) {
String key = "a";
map.put(key, new Object());
String newVal = "xxx";
int i;
int N = 1_000_000;
for (i = 0; i < N; i++) {
Object prev = map.putIfAbsent(key, newVal);
if (prev == null) {
assertSame(newVal, map.get(key));
break;
}
assertEquals(Object.class, prev.getClass());
Object actual = map.get(key);
assertNotNull(actual);
if (actual == newVal) {
break; // gced, replaced
}
assertEquals(Object.class, actual.getClass()); // still not gced, put failed. repeat
}
if (i == N) {
GCUtil.tryGcSoftlyReachableObjects();
Object prev = map.putIfAbsent(key, newVal);
assertNull(prev);
assertSame(newVal, map.get(key));
}
}
private static void checkPutIfAbsent(ConcurrentIntObjectMap<Object> map) {
int key = 4;
map.put(key, new Object());
String newVal = "xxx";
int i;
int N = 1_000_000;
for (i = 0; i < N; i++) {
Object prev = map.putIfAbsent(key, newVal);
if (prev == null) {
assertSame(newVal, map.get(key));
break;
}
assertEquals(Object.class, prev.getClass());
Object actual = map.get(key);
assertNotNull(actual);
if (actual == newVal) {
break; // gced, replaced
}
assertEquals(Object.class, actual.getClass()); // still not gced, put failed. repeat
}
if (i == N) {
GCUtil.tryGcSoftlyReachableObjects();
Object prev = map.putIfAbsent(key, newVal);
assertNull(prev);
assertSame(newVal, map.get(key));
}
}
@Test
public void testConcurrentHashMapTreeBinifiesItself() {
class AwfulHashCode {
@Override
public int hashCode() {
return 0;
}
}
ConcurrentMap<Object, Object> map = ConcurrentCollectionFactory.createMap(new TObjectHashingStrategy<Object>() {
@Override
public int computeHashCode(Object object) {
return 0;
}
@Override
public boolean equals(Object o1, Object o2) {
return o1==o2;
}
});
int N = 1000;
for (int i = 0; i < N; i++) {
map.put(new AwfulHashCode(), 0);
}
assertEquals(N, map.size());
}
@Test
public void weakSetTossed() {
Set<Object> set = ContainerUtil.createWeakSet();
checkClearsEventuallyAfterGCPressure(set);
}
private void checkClearsEventuallyAfterGCPressure(Set<Object> set) {
assertTrue(set.isEmpty());
Ref<Object> ref = Ref.create(new Object());
set.add(ref.get());
GCWatcher.fromClearedRef(ref).tryGc();
set.add(this); // to run processQueues();
assertFalse(set.isEmpty());
set.remove(this);
assertTrue(set.isEmpty());
assertEquals(0, set.size());
set.add(this);
assertEquals(1, set.size());
}
@Test(timeout = TIMEOUT)
public void testWeakKeyIntValueMapTossed() {
ObjectIntMap<Object> map = ContainerUtil.createWeakKeyIntValueMap();
checkKeyTossedEventually(map);
}
@Test(timeout = TIMEOUT)
public void testIntKeyWeakValueMapTossed() {
IntObjectMap<Object> map = ContainerUtil.createIntKeyWeakValueMap();
checkValueTossedEventually(map);
}
@Test
public void testEntrySet() {
checkEntrySetIterator(ContainerUtil.createConcurrentIntObjectMap());
checkEntrySetIterator(ContainerUtil.createConcurrentIntObjectSoftValueMap());
checkEntrySetIterator(ContainerUtil.createConcurrentIntObjectWeakValueMap());
checkEntrySetIterator(ContainerUtil.createIntKeyWeakValueMap());
}
@Test
public void testEntrySetTossesValue() {
checkEntrySetIteratorTossesValue(ContainerUtil.createConcurrentIntObjectSoftValueMap());
checkEntrySetIteratorTossesValue(ContainerUtil.createConcurrentIntObjectWeakValueMap());
checkEntrySetIteratorTossesValue(ContainerUtil.createIntKeyWeakValueMap());
}
private void checkEntrySetIteratorTossesValue(IntObjectMap<Object> map) {
map.put(1, this);
map.put(2, this);
map.put(3, strong = new Object());
map.put(4, this);
Iterator<IntObjectMap.Entry<Object>> iterator = map.entrySet().iterator();
assertTrue(iterator.hasNext());
strong = null;
for (int i=0; i<10; i++) {
if (map.get(3)==null) break;
GCUtil.tryGcSoftlyReachableObjects();
}
if (map.get(3) == null) {
List<Integer> keys = ContainerUtil.map(ContainerUtil.collect(iterator), e -> e.getKey());
assertFalse(keys.contains(3));
}
else {
// bad luck - iterator has started with 3
assertEquals(3, iterator.next().getKey());
}
}
private void checkEntrySetIterator(IntObjectMap<Object> map) {
map.clear();
int K1 = 1;
map.put(K1, this);
int K2 = 2;
map.put(K2, map);
assertEquals(2, ContainerUtil.collect(map.entrySet().iterator()).size());
assertEquals(2, ContainerUtil.collect(map.entrySet().iterator()).size());
Iterator<IntObjectMap.Entry<Object>> iterator = map.entrySet().iterator();
assertTrue(iterator.hasNext());
IntObjectMap.Entry<Object> next = iterator.next();
int key = next.getKey();
assertTrue(key==K1 || key==K2);
iterator.remove();
assertEquals(1, ContainerUtil.collect(map.entrySet().iterator()).size());
Iterator<IntObjectMap.Entry<Object>> it2 = map.entrySet().iterator();
int otherKey = K1 + K2 - key;
assertEquals(otherKey, it2.next().getKey());
assertFalse(it2.hasNext());
try {
it2.next();
fail("must throw");
}
catch (NoSuchElementException ignored) {
}
assertTrue(iterator.hasNext());
assertEquals(otherKey, iterator.next().getKey());
iterator.remove();
UsefulTestCase.assertEmpty(ContainerUtil.collect(map.entrySet().iterator()));
assertTrue(map.isEmpty());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.users;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.Iterator;
import org.apache.catalina.Globals;
import org.apache.catalina.Group;
import org.apache.catalina.Role;
import org.apache.catalina.User;
import org.apache.catalina.UserDatabase;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.digester.AbstractObjectCreationFactory;
import org.apache.tomcat.util.digester.Digester;
import org.apache.tomcat.util.file.ConfigFileLoader;
import org.apache.tomcat.util.res.StringManager;
import org.xml.sax.Attributes;
/**
* <p>Concrete implementation of {@link UserDatabase} that loads all
* defined users, groups, and roles into an in-memory data structure,
* and uses a specified XML file for its persistent storage.</p>
*
* @author Craig R. McClanahan
* @since 4.1
*/
public class MemoryUserDatabase implements UserDatabase {
private static final Log log = LogFactory.getLog(MemoryUserDatabase.class);
// ----------------------------------------------------------- Constructors
/**
* Create a new instance with default values.
*/
public MemoryUserDatabase() {
this(null);
}
/**
* Create a new instance with the specified values.
*
* @param id Unique global identifier of this user database
*/
public MemoryUserDatabase(String id) {
this.id = id;
}
// ----------------------------------------------------- Instance Variables
/**
* The set of {@link Group}s defined in this database, keyed by
* group name.
*/
protected final HashMap<String,Group> groups = new HashMap<String,Group>();
/**
* The unique global identifier of this user database.
*/
protected final String id;
/**
* The relative (to <code>catalina.base</code>) or absolute pathname to
* the XML file in which we will save our persistent information.
*/
protected String pathname = "conf/tomcat-users.xml";
/**
* The relative or absolute pathname to the file in which our old
* information is stored while renaming is in progress.
*/
protected String pathnameOld = pathname + ".old";
/**
* The relative or absolute pathname of the file in which we write
* our new information prior to renaming.
*/
protected String pathnameNew = pathname + ".new";
/**
* A flag, indicating if the user database is read only.
*/
protected boolean readonly = true;
/**
* The set of {@link Role}s defined in this database, keyed by
* role name.
*/
protected final HashMap<String,Role> roles = new HashMap<String,Role>();
/**
* The string manager for this package.
*/
private static final StringManager sm =
StringManager.getManager(Constants.Package);
/**
* The set of {@link User}s defined in this database, keyed by
* user name.
*/
protected final HashMap<String,User> users = new HashMap<String,User>();
// ------------------------------------------------------------- Properties
/**
* Return the set of {@link Group}s defined in this user database.
*/
@Override
public Iterator<Group> getGroups() {
synchronized (groups) {
return (groups.values().iterator());
}
}
/**
* Return the unique global identifier of this user database.
*/
@Override
public String getId() {
return (this.id);
}
/**
* Return the relative or absolute pathname to the persistent storage file.
*/
public String getPathname() {
return (this.pathname);
}
/**
* Set the relative or absolute pathname to the persistent storage file.
*
* @param pathname The new pathname
*/
public void setPathname(String pathname) {
this.pathname = pathname;
this.pathnameOld = pathname + ".old";
this.pathnameNew = pathname + ".new";
}
/**
* Returning the readonly status of the user database
*/
public boolean getReadonly() {
return (this.readonly);
}
/**
* Setting the readonly status of the user database
*
* @param readonly the new status
*/
public void setReadonly(boolean readonly) {
this.readonly = readonly;
}
/**
* Return the set of {@link Role}s defined in this user database.
*/
@Override
public Iterator<Role> getRoles() {
synchronized (roles) {
return (roles.values().iterator());
}
}
/**
* Return the set of {@link User}s defined in this user database.
*/
@Override
public Iterator<User> getUsers() {
synchronized (users) {
return (users.values().iterator());
}
}
// --------------------------------------------------------- Public Methods
/**
* Finalize access to this user database.
*
* @exception Exception if any exception is thrown during closing
*/
@Override
public void close() throws Exception {
save();
synchronized (groups) {
synchronized (users) {
users.clear();
groups.clear();
}
}
}
/**
* Create and return a new {@link Group} defined in this user database.
*
* @param groupname The group name of the new group (must be unique)
* @param description The description of this group
*/
@Override
public Group createGroup(String groupname, String description) {
if (groupname == null || groupname.length() == 0) {
String msg = sm.getString("memoryUserDatabase.nullGroup");
log.warn(msg);
throw new IllegalArgumentException(msg);
}
MemoryGroup group = new MemoryGroup(this, groupname, description);
synchronized (groups) {
groups.put(group.getGroupname(), group);
}
return (group);
}
/**
* Create and return a new {@link Role} defined in this user database.
*
* @param rolename The role name of the new group (must be unique)
* @param description The description of this group
*/
@Override
public Role createRole(String rolename, String description) {
if (rolename == null || rolename.length() == 0) {
String msg = sm.getString("memoryUserDatabase.nullRole");
log.warn(msg);
throw new IllegalArgumentException(msg);
}
MemoryRole role = new MemoryRole(this, rolename, description);
synchronized (roles) {
roles.put(role.getRolename(), role);
}
return (role);
}
/**
* Create and return a new {@link User} defined in this user database.
*
* @param username The logon username of the new user (must be unique)
* @param password The logon password of the new user
* @param fullName The full name of the new user
*/
@Override
public User createUser(String username, String password,
String fullName) {
if (username == null || username.length() == 0) {
String msg = sm.getString("memoryUserDatabase.nullUser");
log.warn(msg);
throw new IllegalArgumentException(msg);
}
MemoryUser user = new MemoryUser(this, username, password, fullName);
synchronized (users) {
users.put(user.getUsername(), user);
}
return (user);
}
/**
* Return the {@link Group} with the specified group name, if any;
* otherwise return <code>null</code>.
*
* @param groupname Name of the group to return
*/
@Override
public Group findGroup(String groupname) {
synchronized (groups) {
return groups.get(groupname);
}
}
/**
* Return the {@link Role} with the specified role name, if any;
* otherwise return <code>null</code>.
*
* @param rolename Name of the role to return
*/
@Override
public Role findRole(String rolename) {
synchronized (roles) {
return roles.get(rolename);
}
}
/**
* Return the {@link User} with the specified user name, if any;
* otherwise return <code>null</code>.
*
* @param username Name of the user to return
*/
@Override
public User findUser(String username) {
synchronized (users) {
return users.get(username);
}
}
/**
* Initialize access to this user database.
*
* @exception Exception if any exception is thrown during opening
*/
@Override
public void open() throws Exception {
synchronized (groups) {
synchronized (users) {
// Erase any previous groups and users
users.clear();
groups.clear();
roles.clear();
String pathName = getPathname();
InputStream is = null;
try {
is = ConfigFileLoader.getInputStream(pathName);
// Construct a digester to read the XML input file
Digester digester = new Digester();
try {
digester.setFeature(
"http://apache.org/xml/features/allow-java-encodings",
true);
} catch (Exception e) {
log.warn(sm.getString("memoryUserDatabase.xmlFeatureEncoding"), e);
}
digester.addFactoryCreate
("tomcat-users/group",
new MemoryGroupCreationFactory(this), true);
digester.addFactoryCreate
("tomcat-users/role",
new MemoryRoleCreationFactory(this), true);
digester.addFactoryCreate
("tomcat-users/user",
new MemoryUserCreationFactory(this), true);
// Parse the XML input to load this database
digester.parse(is);
} catch (IOException ioe) {
log.error(sm.getString("memoryUserDatabase.fileNotFound", pathName));
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
// Ignore
}
}
}
}
}
}
/**
* Remove the specified {@link Group} from this user database.
*
* @param group The group to be removed
*/
@Override
public void removeGroup(Group group) {
synchronized (groups) {
Iterator<User> users = getUsers();
while (users.hasNext()) {
User user = users.next();
user.removeGroup(group);
}
groups.remove(group.getGroupname());
}
}
/**
* Remove the specified {@link Role} from this user database.
*
* @param role The role to be removed
*/
@Override
public void removeRole(Role role) {
synchronized (roles) {
Iterator<Group> groups = getGroups();
while (groups.hasNext()) {
Group group = groups.next();
group.removeRole(role);
}
Iterator<User> users = getUsers();
while (users.hasNext()) {
User user = users.next();
user.removeRole(role);
}
roles.remove(role.getRolename());
}
}
/**
* Remove the specified {@link User} from this user database.
*
* @param user The user to be removed
*/
@Override
public void removeUser(User user) {
synchronized (users) {
users.remove(user.getUsername());
}
}
/**
* Check for permissions to save this user database
* to persistent storage location
*
*/
public boolean isWriteable() {
File file = new File(pathname);
if (!file.isAbsolute()) {
file = new File(System.getProperty(Globals.CATALINA_BASE_PROP),
pathname);
}
File dir = file.getParentFile();
return dir.exists() && dir.isDirectory() && dir.canWrite();
}
/**
* Save any updated information to the persistent storage location for
* this user database.
*
* @exception Exception if any exception is thrown during saving
*/
@Override
public void save() throws Exception {
if (getReadonly()) {
log.error(sm.getString("memoryUserDatabase.readOnly"));
return;
}
if (!isWriteable()) {
log.warn(sm.getString("memoryUserDatabase.notPersistable"));
return;
}
// Write out contents to a temporary file
File fileNew = new File(pathnameNew);
if (!fileNew.isAbsolute()) {
fileNew =
new File(System.getProperty(Globals.CATALINA_BASE_PROP), pathnameNew);
}
PrintWriter writer = null;
try {
// Configure our PrintWriter
FileOutputStream fos = new FileOutputStream(fileNew);
OutputStreamWriter osw = new OutputStreamWriter(fos, "UTF8");
writer = new PrintWriter(osw);
// Print the file prolog
writer.println("<?xml version='1.0' encoding='utf-8'?>");
writer.println("<tomcat-users>");
// Print entries for each defined role, group, and user
Iterator<?> values = null;
values = getRoles();
while (values.hasNext()) {
writer.print(" ");
writer.println(values.next());
}
values = getGroups();
while (values.hasNext()) {
writer.print(" ");
writer.println(values.next());
}
values = getUsers();
while (values.hasNext()) {
writer.print(" ");
writer.println(((MemoryUser) values.next()).toXml());
}
// Print the file epilog
writer.println("</tomcat-users>");
// Check for errors that occurred while printing
if (writer.checkError()) {
writer.close();
fileNew.delete();
throw new IOException
(sm.getString("memoryUserDatabase.writeException",
fileNew.getAbsolutePath()));
}
writer.close();
} catch (IOException e) {
if (writer != null) {
writer.close();
}
fileNew.delete();
throw e;
}
// Perform the required renames to permanently save this file
File fileOld = new File(pathnameOld);
if (!fileOld.isAbsolute()) {
fileOld =
new File(System.getProperty(Globals.CATALINA_BASE_PROP), pathnameOld);
}
fileOld.delete();
File fileOrig = new File(pathname);
if (!fileOrig.isAbsolute()) {
fileOrig =
new File(System.getProperty(Globals.CATALINA_BASE_PROP), pathname);
}
if (fileOrig.exists()) {
fileOld.delete();
if (!fileOrig.renameTo(fileOld)) {
throw new IOException
(sm.getString("memoryUserDatabase.renameOld",
fileOld.getAbsolutePath()));
}
}
if (!fileNew.renameTo(fileOrig)) {
if (fileOld.exists()) {
fileOld.renameTo(fileOrig);
}
throw new IOException
(sm.getString("memoryUserDatabase.renameNew",
fileOrig.getAbsolutePath()));
}
fileOld.delete();
}
/**
* Return a String representation of this UserDatabase.
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder("MemoryUserDatabase[id=");
sb.append(this.id);
sb.append(",pathname=");
sb.append(pathname);
sb.append(",groupCount=");
sb.append(this.groups.size());
sb.append(",roleCount=");
sb.append(this.roles.size());
sb.append(",userCount=");
sb.append(this.users.size());
sb.append("]");
return (sb.toString());
}
// -------------------------------------------------------- Package Methods
/**
* Return the <code>StringManager</code> for use in looking up messages.
*/
StringManager getStringManager() {
return (sm);
}
}
/**
* Digester object creation factory for group instances.
*/
class MemoryGroupCreationFactory extends AbstractObjectCreationFactory {
public MemoryGroupCreationFactory(MemoryUserDatabase database) {
this.database = database;
}
@Override
public Object createObject(Attributes attributes) {
String groupname = attributes.getValue("groupname");
if (groupname == null) {
groupname = attributes.getValue("name");
}
String description = attributes.getValue("description");
String roles = attributes.getValue("roles");
Group group = database.createGroup(groupname, description);
if (roles != null) {
while (roles.length() > 0) {
String rolename = null;
int comma = roles.indexOf(',');
if (comma >= 0) {
rolename = roles.substring(0, comma).trim();
roles = roles.substring(comma + 1);
} else {
rolename = roles.trim();
roles = "";
}
if (rolename.length() > 0) {
Role role = database.findRole(rolename);
if (role == null) {
role = database.createRole(rolename, null);
}
group.addRole(role);
}
}
}
return (group);
}
private MemoryUserDatabase database = null;
}
/**
* Digester object creation factory for role instances.
*/
class MemoryRoleCreationFactory extends AbstractObjectCreationFactory {
public MemoryRoleCreationFactory(MemoryUserDatabase database) {
this.database = database;
}
@Override
public Object createObject(Attributes attributes) {
String rolename = attributes.getValue("rolename");
if (rolename == null) {
rolename = attributes.getValue("name");
}
String description = attributes.getValue("description");
Role role = database.createRole(rolename, description);
return (role);
}
private MemoryUserDatabase database = null;
}
/**
* Digester object creation factory for user instances.
*/
class MemoryUserCreationFactory extends AbstractObjectCreationFactory {
public MemoryUserCreationFactory(MemoryUserDatabase database) {
this.database = database;
}
@Override
public Object createObject(Attributes attributes) {
String username = attributes.getValue("username");
if (username == null) {
username = attributes.getValue("name");
}
String password = attributes.getValue("password");
String fullName = attributes.getValue("fullName");
if (fullName == null) {
fullName = attributes.getValue("fullname");
}
String groups = attributes.getValue("groups");
String roles = attributes.getValue("roles");
User user = database.createUser(username, password, fullName);
if (groups != null) {
while (groups.length() > 0) {
String groupname = null;
int comma = groups.indexOf(',');
if (comma >= 0) {
groupname = groups.substring(0, comma).trim();
groups = groups.substring(comma + 1);
} else {
groupname = groups.trim();
groups = "";
}
if (groupname.length() > 0) {
Group group = database.findGroup(groupname);
if (group == null) {
group = database.createGroup(groupname, null);
}
user.addGroup(group);
}
}
}
if (roles != null) {
while (roles.length() > 0) {
String rolename = null;
int comma = roles.indexOf(',');
if (comma >= 0) {
rolename = roles.substring(0, comma).trim();
roles = roles.substring(comma + 1);
} else {
rolename = roles.trim();
roles = "";
}
if (rolename.length() > 0) {
Role role = database.findRole(rolename);
if (role == null) {
role = database.createRole(rolename, null);
}
user.addRole(role);
}
}
}
return (user);
}
private MemoryUserDatabase database = null;
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package com.android.webview.chromium;
import android.app.ActivityManager;
import android.content.ComponentCallbacks2;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.net.Uri;
import android.os.Build;
import android.os.Looper;
import android.os.StrictMode;
import android.util.Log;
import android.webkit.CookieManager;
import android.webkit.GeolocationPermissions;
import android.webkit.WebStorage;
import android.webkit.WebView;
import android.webkit.WebViewDatabase;
import android.webkit.WebViewFactory;
import android.webkit.WebViewFactoryProvider;
import android.webkit.WebViewProvider;
import com.android.webview.chromium.WebViewDelegateFactory.WebViewDelegate;
import org.chromium.android_webview.AwBrowserContext;
import org.chromium.android_webview.AwBrowserProcess;
import org.chromium.android_webview.AwContents;
import org.chromium.android_webview.AwContentsClient;
import org.chromium.android_webview.AwContentsStatics;
import org.chromium.android_webview.AwCookieManager;
import org.chromium.android_webview.AwDataReductionProxyManager;
import org.chromium.android_webview.AwDevToolsServer;
import org.chromium.android_webview.AwQuotaManagerBridge;
import org.chromium.android_webview.AwResource;
import org.chromium.android_webview.AwSettings;
import org.chromium.android_webview.R;
import org.chromium.base.CommandLine;
import org.chromium.base.ContextUtils;
import org.chromium.base.MemoryPressureListener;
import org.chromium.base.PathService;
import org.chromium.base.PathUtils;
import org.chromium.base.ThreadUtils;
import org.chromium.base.TraceEvent;
import org.chromium.base.annotations.SuppressFBWarnings;
import org.chromium.base.library_loader.LibraryLoader;
import org.chromium.base.library_loader.LibraryProcessType;
import org.chromium.base.library_loader.ProcessInitException;
import org.chromium.content.browser.ContentViewStatics;
import org.chromium.ui.base.ResourceBundle;
import java.io.File;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
/**
* Entry point to the WebView. The system framework talks to this class to get instances of the
* implementation classes.
*/
@SuppressWarnings("deprecation")
public class WebViewChromiumFactoryProvider implements WebViewFactoryProvider {
private static final String TAG = "WebViewChromiumFactoryProvider";
private static final String CHROMIUM_PREFS_NAME = "WebViewChromiumPrefs";
private static final String VERSION_CODE_PREF = "lastVersionCodeUsed";
private static final String COMMAND_LINE_FILE = "/data/local/tmp/webview-command-line";
// Guards accees to the other members, and is notifyAll() signalled on the UI thread
// when the chromium process has been started.
private final Object mLock = new Object();
// Initialization guarded by mLock.
private AwBrowserContext mBrowserContext;
private Statics mStaticMethods;
private GeolocationPermissionsAdapter mGeolocationPermissions;
private CookieManagerAdapter mCookieManager;
private WebIconDatabaseAdapter mWebIconDatabase;
private WebStorageAdapter mWebStorage;
private WebViewDatabaseAdapter mWebViewDatabase;
private AwDevToolsServer mDevToolsServer;
private Context mWrappedAppContext;
private ArrayList<WeakReference<WebViewChromium>> mWebViewsToStart =
new ArrayList<WeakReference<WebViewChromium>>();
// Read/write protected by mLock.
private boolean mStarted;
private AwDataReductionProxyManager mProxyManager;
private SharedPreferences mWebViewPrefs;
private WebViewDelegate mWebViewDelegate;
/**
* Constructor called by the API 21 version of {@link WebViewFactory} and earlier.
*/
public WebViewChromiumFactoryProvider() {
initialize(WebViewDelegateFactory.createApi21CompatibilityDelegate());
}
/**
* Constructor called by the API 22 version of {@link WebViewFactory} and later.
*/
public WebViewChromiumFactoryProvider(android.webkit.WebViewDelegate delegate) {
initialize(WebViewDelegateFactory.createProxyDelegate(delegate));
}
@SuppressFBWarnings("DMI_HARDCODED_ABSOLUTE_FILENAME")
private void initialize(WebViewDelegate webViewDelegate) {
mWebViewDelegate = webViewDelegate;
if (isBuildDebuggable()) {
// Suppress the StrictMode violation as this codepath is only hit on debugglable builds.
StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
CommandLine.initFromFile(COMMAND_LINE_FILE);
StrictMode.setThreadPolicy(oldPolicy);
} else {
CommandLine.init(null);
}
CommandLine cl = CommandLine.getInstance();
// TODO: currently in a relase build the DCHECKs only log. We either need to insall
// a report handler with SetLogReportHandler to make them assert, or else compile
// them out of the build altogether (b/8284203). Either way, so long they're
// compiled in, we may as unconditionally enable them here.
cl.appendSwitch("enable-dcheck");
ThreadUtils.setWillOverrideUiThread();
// Load chromium library.
AwBrowserProcess.loadLibrary(getWrappedCurrentApplicationContext());
final PackageInfo packageInfo = WebViewFactory.getLoadedPackageInfo();
// Load glue-layer support library.
System.loadLibrary("webviewchromium_plat_support");
// Use shared preference to check for package downgrade.
mWebViewPrefs = mWebViewDelegate.getApplication().getSharedPreferences(
CHROMIUM_PREFS_NAME, Context.MODE_PRIVATE);
int lastVersion = mWebViewPrefs.getInt(VERSION_CODE_PREF, 0);
int currentVersion = packageInfo.versionCode;
if (lastVersion > currentVersion) {
// The WebView package has been downgraded since we last ran in this application.
// Delete the WebView data directory's contents.
String dataDir = PathUtils.getDataDirectory(mWebViewDelegate.getApplication());
Log.i(TAG, "WebView package downgraded from " + lastVersion + " to " + currentVersion
+ "; deleting contents of " + dataDir);
deleteContents(new File(dataDir));
}
if (lastVersion != currentVersion) {
mWebViewPrefs.edit().putInt(VERSION_CODE_PREF, currentVersion).apply();
}
// Now safe to use WebView data directory.
}
private static boolean isBuildDebuggable() {
return !Build.TYPE.equals("user");
}
private static void deleteContents(File dir) {
File[] files = dir.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
deleteContents(file);
}
if (!file.delete()) {
Log.w(TAG, "Failed to delete " + file);
}
}
}
}
private void initPlatSupportLibrary() {
DrawGLFunctor.setChromiumAwDrawGLFunction(AwContents.getAwDrawGLFunction());
AwContents.setAwDrawSWFunctionTable(GraphicsUtils.getDrawSWFunctionTable());
AwContents.setAwDrawGLFunctionTable(GraphicsUtils.getDrawGLFunctionTable());
}
private void ensureChromiumStartedLocked(boolean onMainThread) {
assert Thread.holdsLock(mLock);
if (mStarted) { // Early-out for the common case.
return;
}
Looper looper = !onMainThread ? Looper.myLooper() : Looper.getMainLooper();
Log.v(TAG, "Binding Chromium to "
+ (Looper.getMainLooper().equals(looper) ? "main" : "background")
+ " looper " + looper);
ThreadUtils.setUiThread(looper);
if (ThreadUtils.runningOnUiThread()) {
startChromiumLocked();
return;
}
// We must post to the UI thread to cover the case that the user has invoked Chromium
// startup by using the (thread-safe) CookieManager rather than creating a WebView.
ThreadUtils.postOnUiThread(new Runnable() {
@Override
public void run() {
synchronized (mLock) {
startChromiumLocked();
}
}
});
while (!mStarted) {
try {
// Important: wait() releases |mLock| the UI thread can take it :-)
mLock.wait();
} catch (InterruptedException e) {
// Keep trying... eventually the UI thread will process the task we sent it.
}
}
}
// TODO: DIR_RESOURCE_PAKS_ANDROID needs to live somewhere sensible,
// inlined here for simplicity setting up the HTMLViewer demo. Unfortunately
// it can't go into base.PathService, as the native constant it refers to
// lives in the ui/ layer. See ui/base/ui_base_paths.h
private static final int DIR_RESOURCE_PAKS_ANDROID = 3003;
private void startChromiumLocked() {
assert Thread.holdsLock(mLock) && ThreadUtils.runningOnUiThread();
// The post-condition of this method is everything is ready, so notify now to cover all
// return paths. (Other threads will not wake-up until we release |mLock|, whatever).
mLock.notifyAll();
if (mStarted) {
return;
}
Context context = getWrappedCurrentApplicationContext();
try {
LibraryLoader.get(LibraryProcessType.PROCESS_WEBVIEW).ensureInitialized(context);
} catch (ProcessInitException e) {
throw new RuntimeException("Error initializing WebView library", e);
}
PathService.override(PathService.DIR_MODULE, "/system/lib/");
PathService.override(DIR_RESOURCE_PAKS_ANDROID, "/system/framework/webview/paks");
// Make sure that ResourceProvider is initialized before starting the browser process.
setUpResources(context);
ResourceBundle.initializeLocalePaks(context, R.array.locale_paks);
initPlatSupportLibrary();
AwBrowserProcess.start(context);
if (isBuildDebuggable()) {
setWebContentsDebuggingEnabled(true);
}
TraceEvent.setATraceEnabled(mWebViewDelegate.isTraceTagEnabled());
mWebViewDelegate.setOnTraceEnabledChangeListener(
new WebViewDelegate.OnTraceEnabledChangeListener() {
@Override
public void onTraceEnabledChange(boolean enabled) {
TraceEvent.setATraceEnabled(enabled);
}
});
mStarted = true;
for (WeakReference<WebViewChromium> wvc : mWebViewsToStart) {
WebViewChromium w = wvc.get();
if (w != null) {
w.startYourEngine();
}
}
mWebViewsToStart.clear();
mWebViewsToStart = null;
// Start listening for data reduction proxy setting changes.
mProxyManager = new AwDataReductionProxyManager();
mProxyManager.start(mWebViewDelegate.getApplication());
}
boolean hasStarted() {
return mStarted;
}
void startYourEngines(boolean onMainThread) {
synchronized (mLock) {
ensureChromiumStartedLocked(onMainThread);
}
}
private Context getWrappedCurrentApplicationContext() {
if (mWrappedAppContext == null) {
mWrappedAppContext = ResourcesContextWrapperFactory.get(
mWebViewDelegate.getApplication());
}
return mWrappedAppContext;
}
AwBrowserContext getBrowserContext() {
synchronized (mLock) {
return getBrowserContextLocked();
}
}
private AwBrowserContext getBrowserContextLocked() {
assert Thread.holdsLock(mLock);
assert mStarted;
if (mBrowserContext == null) {
mBrowserContext =
new AwBrowserContext(mWebViewPrefs, getWrappedCurrentApplicationContext());
}
return mBrowserContext;
}
private void setWebContentsDebuggingEnabled(boolean enable) {
if (Looper.myLooper() != ThreadUtils.getUiThreadLooper()) {
throw new RuntimeException(
"Toggling of Web Contents Debugging must be done on the UI thread");
}
if (mDevToolsServer == null) {
if (!enable) return;
mDevToolsServer = new AwDevToolsServer();
}
mDevToolsServer.setRemoteDebuggingEnabled(enable);
}
private void setUpResources(Context context) {
final String packageName = WebViewFactory.getLoadedPackageInfo().packageName;
ResourceRewriter.rewriteRValues(
mWebViewDelegate.getPackageId(context.getResources(), packageName));
AwResource.setResources(context.getResources());
AwResource.setConfigKeySystemUuidMapping(android.R.array.config_keySystemUuidMapping);
}
@Override
public Statics getStatics() {
synchronized (mLock) {
if (mStaticMethods == null) {
// TODO: Optimization potential: most these methods only need the native library
// loaded and initialized, not the entire browser process started.
// See also http://b/7009882
ensureChromiumStartedLocked(true);
mStaticMethods = new WebViewFactoryProvider.Statics() {
@Override
public String findAddress(String addr) {
return ContentViewStatics.findAddress(addr);
}
@Override
public String getDefaultUserAgent(Context context) {
return AwSettings.getDefaultUserAgent();
}
@Override
public void setWebContentsDebuggingEnabled(boolean enable) {
// Web Contents debugging is always enabled on debug builds.
if (!isBuildDebuggable()) {
WebViewChromiumFactoryProvider.this.setWebContentsDebuggingEnabled(
enable);
}
}
// TODO enable after L release to AOSP
//@Override
public void clearClientCertPreferences(Runnable onCleared) {
AwContentsStatics.clearClientCertPreferences(onCleared);
}
@Override
public void freeMemoryForTests() {
if (ActivityManager.isRunningInTestHarness()) {
MemoryPressureListener.maybeNotifyMemoryPresure(
ComponentCallbacks2.TRIM_MEMORY_COMPLETE);
}
}
// TODO: Add @Override.
public void enableSlowWholeDocumentDraw() {
WebViewChromium.enableSlowWholeDocumentDraw();
}
@Override
public Uri[] parseFileChooserResult(int resultCode, Intent intent) {
return AwContentsClient.parseFileChooserResult(resultCode, intent);
}
};
}
}
return mStaticMethods;
}
@Override
public WebViewProvider createWebView(WebView webView, WebView.PrivateAccess privateAccess) {
WebViewChromium wvc = new WebViewChromium(this, webView, privateAccess);
synchronized (mLock) {
if (mWebViewsToStart != null) {
mWebViewsToStart.add(new WeakReference<WebViewChromium>(wvc));
}
}
return wvc;
}
@Override
public GeolocationPermissions getGeolocationPermissions() {
synchronized (mLock) {
if (mGeolocationPermissions == null) {
ensureChromiumStartedLocked(true);
mGeolocationPermissions = new GeolocationPermissionsAdapter(
getBrowserContextLocked().getGeolocationPermissions());
}
}
return mGeolocationPermissions;
}
@Override
public CookieManager getCookieManager() {
synchronized (mLock) {
if (mCookieManager == null) {
if (!mStarted) {
// We can use CookieManager without starting Chromium; the native code
// will bring up just the parts it needs to make this work on a temporary
// basis until Chromium is started for real. The temporary cookie manager
// needs the application context to have been set.
ContextUtils.initApplicationContext(getWrappedCurrentApplicationContext());
}
mCookieManager = new CookieManagerAdapter(new AwCookieManager());
}
}
return mCookieManager;
}
@Override
public android.webkit.WebIconDatabase getWebIconDatabase() {
synchronized (mLock) {
if (mWebIconDatabase == null) {
ensureChromiumStartedLocked(true);
mWebIconDatabase = new WebIconDatabaseAdapter();
}
}
return mWebIconDatabase;
}
@Override
public WebStorage getWebStorage() {
synchronized (mLock) {
if (mWebStorage == null) {
ensureChromiumStartedLocked(true);
mWebStorage = new WebStorageAdapter(AwQuotaManagerBridge.getInstance());
}
}
return mWebStorage;
}
@Override
public WebViewDatabase getWebViewDatabase(Context context) {
synchronized (mLock) {
if (mWebViewDatabase == null) {
ensureChromiumStartedLocked(true);
AwBrowserContext browserContext = getBrowserContextLocked();
mWebViewDatabase = new WebViewDatabaseAdapter(
browserContext.getHttpAuthDatabase(context));
}
}
return mWebViewDatabase;
}
WebViewDelegate getWebViewDelegate() {
return mWebViewDelegate;
}
}
| |
// ========================================================================
// Copyright 2000-2005 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
package org.mortbay.jetty;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import org.mortbay.io.Buffer;
import org.mortbay.io.BufferCache;
import org.mortbay.io.BufferCache.CachedBuffer;
import org.mortbay.log.Log;
import org.mortbay.util.StringUtil;
/* ------------------------------------------------------------ */
/**
* @author Greg Wilkins
*/
public class MimeTypes
{
public final static String
FORM_ENCODED="application/x-www-form-urlencoded",
MESSAGE_HTTP="message/http",
MULTIPART_BYTERANGES="multipart/byteranges",
TEXT_HTML="text/html",
TEXT_PLAIN="text/plain",
TEXT_XML="text/xml",
TEXT_HTML_8859_1="text/html; charset=iso-8859-1",
TEXT_PLAIN_8859_1="text/plain; charset=iso-8859-1",
TEXT_XML_8859_1="text/xml; charset=iso-8859-1",
TEXT_HTML_UTF_8="text/html; charset=utf-8",
TEXT_PLAIN_UTF_8="text/plain; charset=utf-8",
TEXT_XML_UTF_8="text/xml; charset=utf-8",
// minimal changes for 6.1.12
TEXT_JSON="text/json",
TEXT_JSON_UTF_8="text/json;charset=UTF-8";
private final static int
FORM_ENCODED_ORDINAL=1,
MESSAGE_HTTP_ORDINAL=2,
MULTIPART_BYTERANGES_ORDINAL=3,
TEXT_HTML_ORDINAL=4,
TEXT_PLAIN_ORDINAL=5,
TEXT_XML_ORDINAL=6,
TEXT_HTML_8859_1_ORDINAL=7,
TEXT_PLAIN_8859_1_ORDINAL=8,
TEXT_XML_8859_1_ORDINAL=9,
TEXT_HTML_UTF_8_ORDINAL=10,
TEXT_PLAIN_UTF_8_ORDINAL=11,
TEXT_XML_UTF_8_ORDINAL=12,
TEXT_JSON_ORDINAL=13,
TEXT_JSON_UTF_8_ORDINAL=14;
private static int __index=15;
public final static BufferCache CACHE = new BufferCache();
public final static CachedBuffer
FORM_ENCODED_BUFFER=CACHE.add(FORM_ENCODED,FORM_ENCODED_ORDINAL),
MESSAGE_HTTP_BUFFER=CACHE.add(MESSAGE_HTTP, MESSAGE_HTTP_ORDINAL),
MULTIPART_BYTERANGES_BUFFER=CACHE.add(MULTIPART_BYTERANGES,MULTIPART_BYTERANGES_ORDINAL),
TEXT_HTML_BUFFER=CACHE.add(TEXT_HTML,TEXT_HTML_ORDINAL),
TEXT_PLAIN_BUFFER=CACHE.add(TEXT_PLAIN,TEXT_PLAIN_ORDINAL),
TEXT_XML_BUFFER=CACHE.add(TEXT_XML,TEXT_XML_ORDINAL),
TEXT_HTML_8859_1_BUFFER=new CachedBuffer(TEXT_HTML_8859_1,TEXT_HTML_8859_1_ORDINAL),
TEXT_PLAIN_8859_1_BUFFER=new CachedBuffer(TEXT_PLAIN_8859_1,TEXT_PLAIN_8859_1_ORDINAL),
TEXT_XML_8859_1_BUFFER=new CachedBuffer(TEXT_XML_8859_1,TEXT_XML_8859_1_ORDINAL),
TEXT_HTML_UTF_8_BUFFER=new CachedBuffer(TEXT_HTML_UTF_8,TEXT_HTML_UTF_8_ORDINAL),
TEXT_PLAIN_UTF_8_BUFFER=new CachedBuffer(TEXT_PLAIN_UTF_8,TEXT_PLAIN_UTF_8_ORDINAL),
TEXT_XML_UTF_8_BUFFER=new CachedBuffer(TEXT_XML_UTF_8,TEXT_XML_UTF_8_ORDINAL),
TEXT_JSON_BUFFER=CACHE.add(TEXT_JSON,TEXT_JSON_ORDINAL),
TEXT_JSON_UTF_8_BUFFER=CACHE.add(TEXT_JSON_UTF_8,TEXT_JSON_UTF_8_ORDINAL);
/* ------------------------------------------------------------ */
/* ------------------------------------------------------------ */
private final static Map __dftMimeMap = new HashMap();
private final static Map __encodings = new HashMap();
static
{
try
{
ResourceBundle mime = ResourceBundle.getBundle("org/mortbay/jetty/mime");
Enumeration i = mime.getKeys();
while(i.hasMoreElements())
{
String ext = (String)i.nextElement();
String m = mime.getString(ext);
__dftMimeMap.put(StringUtil.asciiToLowerCase(ext),normalizeMimeType(m));
}
}
catch(MissingResourceException e)
{
Log.warn(e.toString());
Log.debug(e);
}
try
{
ResourceBundle encoding = ResourceBundle.getBundle("org/mortbay/jetty/encoding");
Enumeration i = encoding.getKeys();
while(i.hasMoreElements())
{
Buffer type = normalizeMimeType((String)i.nextElement());
__encodings.put(type,encoding.getString(type.toString()));
}
}
catch(MissingResourceException e)
{
Log.warn(e.toString());
Log.debug(e);
}
TEXT_HTML_BUFFER.setAssociate("ISO-8859-1",TEXT_HTML_8859_1_BUFFER);
TEXT_HTML_BUFFER.setAssociate("ISO_8859_1",TEXT_HTML_8859_1_BUFFER);
TEXT_HTML_BUFFER.setAssociate("iso-8859-1",TEXT_HTML_8859_1_BUFFER);
TEXT_PLAIN_BUFFER.setAssociate("ISO-8859-1",TEXT_PLAIN_8859_1_BUFFER);
TEXT_PLAIN_BUFFER.setAssociate("ISO_8859_1",TEXT_PLAIN_8859_1_BUFFER);
TEXT_PLAIN_BUFFER.setAssociate("iso-8859-1",TEXT_PLAIN_8859_1_BUFFER);
TEXT_XML_BUFFER.setAssociate("ISO-8859-1",TEXT_XML_8859_1_BUFFER);
TEXT_XML_BUFFER.setAssociate("ISO_8859_1",TEXT_XML_8859_1_BUFFER);
TEXT_XML_BUFFER.setAssociate("iso-8859-1",TEXT_XML_8859_1_BUFFER);
TEXT_HTML_BUFFER.setAssociate("UTF-8",TEXT_HTML_UTF_8_BUFFER);
TEXT_HTML_BUFFER.setAssociate("UTF8",TEXT_HTML_UTF_8_BUFFER);
TEXT_HTML_BUFFER.setAssociate("utf8",TEXT_HTML_UTF_8_BUFFER);
TEXT_HTML_BUFFER.setAssociate("utf-8",TEXT_HTML_UTF_8_BUFFER);
TEXT_PLAIN_BUFFER.setAssociate("UTF-8",TEXT_PLAIN_UTF_8_BUFFER);
TEXT_PLAIN_BUFFER.setAssociate("UTF8",TEXT_PLAIN_UTF_8_BUFFER);
TEXT_PLAIN_BUFFER.setAssociate("utf-8",TEXT_PLAIN_UTF_8_BUFFER);
TEXT_XML_BUFFER.setAssociate("UTF-8",TEXT_XML_UTF_8_BUFFER);
TEXT_XML_BUFFER.setAssociate("utf8",TEXT_XML_UTF_8_BUFFER);
TEXT_XML_BUFFER.setAssociate("UTF8",TEXT_XML_UTF_8_BUFFER);
TEXT_XML_BUFFER.setAssociate("utf-8",TEXT_XML_UTF_8_BUFFER);
TEXT_JSON_BUFFER.setAssociate("UTF-8",TEXT_JSON_UTF_8_BUFFER);
TEXT_JSON_BUFFER.setAssociate("utf8",TEXT_JSON_UTF_8_BUFFER);
TEXT_JSON_BUFFER.setAssociate("UTF8",TEXT_JSON_UTF_8_BUFFER);
TEXT_JSON_BUFFER.setAssociate("utf-8",TEXT_JSON_UTF_8_BUFFER);
}
/* ------------------------------------------------------------ */
private Map _mimeMap;
/* ------------------------------------------------------------ */
/** Constructor.
*/
public MimeTypes()
{
}
/* ------------------------------------------------------------ */
public synchronized Map getMimeMap()
{
return _mimeMap;
}
/* ------------------------------------------------------------ */
/**
* @param mimeMap A Map of file extension to mime-type.
*/
public void setMimeMap(Map mimeMap)
{
if (mimeMap==null)
{
_mimeMap=null;
return;
}
Map m=new HashMap();
Iterator i=mimeMap.entrySet().iterator();
while (i.hasNext())
{
Map.Entry entry = (Map.Entry)i.next();
m.put(entry.getKey(),normalizeMimeType(entry.getValue().toString()));
}
_mimeMap=m;
}
/* ------------------------------------------------------------ */
/** Get the MIME type by filename extension.
* @param filename A file name
* @return MIME type matching the longest dot extension of the
* file name.
*/
public Buffer getMimeByExtension(String filename)
{
Buffer type=null;
if (filename!=null)
{
int i=-1;
while(type==null)
{
i=filename.indexOf(".",i+1);
if (i<0 || i>=filename.length())
break;
String ext=StringUtil.asciiToLowerCase(filename.substring(i+1));
if (_mimeMap!=null)
type = (Buffer)_mimeMap.get(ext);
if (type==null)
type=(Buffer)__dftMimeMap.get(ext);
}
}
if (type==null)
{
if (_mimeMap!=null)
type=(Buffer)_mimeMap.get("*");
if (type==null)
type=(Buffer)__dftMimeMap.get("*");
}
return type;
}
/* ------------------------------------------------------------ */
/** Set a mime mapping
* @param extension
* @param type
*/
public void addMimeMapping(String extension,String type)
{
if (_mimeMap==null)
_mimeMap=new HashMap();
_mimeMap.put(StringUtil.asciiToLowerCase(extension),normalizeMimeType(type));
}
/* ------------------------------------------------------------ */
private static synchronized Buffer normalizeMimeType(String type)
{
Buffer b =CACHE.get(type);
if (b==null)
b=CACHE.add(type,__index++);
return b;
}
/* ------------------------------------------------------------ */
public static String getCharsetFromContentType(Buffer value)
{
if (value instanceof CachedBuffer)
{
switch(((CachedBuffer)value).getOrdinal())
{
case TEXT_HTML_8859_1_ORDINAL:
case TEXT_PLAIN_8859_1_ORDINAL:
case TEXT_XML_8859_1_ORDINAL:
return StringUtil.__ISO_8859_1;
case TEXT_HTML_UTF_8_ORDINAL:
case TEXT_PLAIN_UTF_8_ORDINAL:
case TEXT_XML_UTF_8_ORDINAL:
case TEXT_JSON_ORDINAL:
case TEXT_JSON_UTF_8_ORDINAL:
return StringUtil.__UTF8;
}
}
int i=value.getIndex();
int end=value.putIndex();
int state=0;
int start=0;
boolean quote=false;
for (;i<end;i++)
{
byte b = value.peek(i);
if (quote && state!=10)
{
if ('"'==b)
quote=false;
continue;
}
switch(state)
{
case 0:
if ('"'==b)
{
quote=true;
break;
}
if (';'==b)
state=1;
break;
case 1: if ('c'==b) state=2; else if (' '!=b) state=0; break;
case 2: if ('h'==b) state=3; else state=0;break;
case 3: if ('a'==b) state=4; else state=0;break;
case 4: if ('r'==b) state=5; else state=0;break;
case 5: if ('s'==b) state=6; else state=0;break;
case 6: if ('e'==b) state=7; else state=0;break;
case 7: if ('t'==b) state=8; else state=0;break;
case 8: if ('='==b) state=9; else if (' '!=b) state=0; break;
case 9:
if (' '==b)
break;
if ('"'==b)
{
quote=true;
start=i+1;
state=10;
break;
}
start=i;
state=10;
break;
case 10:
if (!quote && (';'==b || ' '==b )||
(quote && '"'==b ))
return CACHE.lookup(value.peek(start,i-start)).toString();
}
}
if (state==10)
return CACHE.lookup(value.peek(start,i-start)).toString();
return null;
}
}
| |
package twilightforest.item;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.block.Block;
import net.minecraft.block.Block.SoundType;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.item.EnumAction;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import net.minecraft.world.World;
import twilightforest.block.TFBlocks;
import twilightforest.entity.EntityTFMoonwormShot;
public class ItemTFMoonwormQueen
extends ItemTF
{
private static final int FIRING_TIME = 12;
private IIcon[] icons;
private String[] iconNames = { "moonwormQueen", "moonwormQueenAlt" };
protected ItemTFMoonwormQueen()
{
func_77637_a(TFItems.creativeTab);
field_77777_bU = 1;
func_77656_e(256);
}
public ItemStack func_77659_a(ItemStack par1ItemStack, World world, EntityPlayer player)
{
if (par1ItemStack.func_77960_j() < func_77612_l())
{
player.func_71008_a(par1ItemStack, func_77626_a(par1ItemStack));
}
else
{
player.func_71034_by();
}
return par1ItemStack;
}
public boolean func_77648_a(ItemStack par1ItemStack, EntityPlayer player, World world, int x, int y, int z, int side, float hitX, float hitY, float hitZ)
{
Block currentBlockID = world.func_147439_a(x, y, z);
if (currentBlockID == TFBlocks.moonworm)
{
return false;
}
if ((par1ItemStack != null) && (par1ItemStack.func_77960_j() == func_77612_l()))
{
return false;
}
if (currentBlockID == Blocks.field_150433_aE)
{
side = 1;
}
else if ((currentBlockID != Blocks.field_150395_bd) && (currentBlockID != Blocks.field_150329_H) && (currentBlockID != Blocks.field_150330_I) && ((currentBlockID == Blocks.field_150350_a) ||
(!currentBlockID.isReplaceable(world, x, y, z))))
{
if (side == 0)
{
y--;
}
if (side == 1)
{
y++;
}
if (side == 2)
{
z--;
}
if (side == 3)
{
z++;
}
if (side == 4)
{
x--;
}
if (side == 5)
{
x++;
}
}
if (world.func_147472_a(TFBlocks.moonworm, x, y, z, false, side, player, par1ItemStack))
{
int placementMeta = TFBlocks.moonworm.func_149660_a(world, x, y, z, side, hitX, hitY, hitZ, 0);
if (world.func_147465_d(x, y, z, TFBlocks.moonworm, placementMeta, 3))
{
if (world.func_147439_a(x, y, z) == TFBlocks.moonworm)
{
TFBlocks.moonworm.func_149689_a(world, x, y, z, player, par1ItemStack);
}
world.func_72908_a(x + 0.5F, y + 0.5F, z + 0.5F, getSound(), moonwormfield_149762_H.func_150497_c() / 2.0F, moonwormfield_149762_H.func_150494_d() * 0.8F);
if (par1ItemStack != null)
{
par1ItemStack.func_77972_a(1, player);
player.func_71034_by();
}
}
return true;
}
return false;
}
public String getSound()
{
return "mob.slime.big";
}
public void func_77615_a(ItemStack par1ItemStack, World world, EntityPlayer player, int useRemaining)
{
int useTime = func_77626_a(par1ItemStack) - useRemaining;
if ((!field_72995_K) && (useTime > 12) && (par1ItemStack.func_77960_j() + 1 < func_77612_l()))
{
boolean fired = world.func_72838_d(new EntityTFMoonwormShot(world, player));
if (fired)
{
par1ItemStack.func_77972_a(2, player);
world.func_72956_a(player, getSound(), 1.0F, 1.0F);
}
}
}
public IIcon getIcon(ItemStack stack, int renderPass, EntityPlayer player, ItemStack usingItem, int useRemaining)
{
if ((usingItem != null) && (usingItem.func_77973_b() == this))
{
int useTime = usingItem.func_77988_m() - useRemaining;
if (useTime >= 12)
{
return (useTime >> 1) % 2 == 0 ? icons[0] : icons[1];
}
}
return icons[0];
}
@SideOnly(Side.CLIENT)
public void func_94581_a(IIconRegister par1IconRegister)
{
super.func_94581_a(par1IconRegister);
icons = new IIcon[iconNames.length];
for (int i = 0; i < iconNames.length; i++)
{
icons[i] = par1IconRegister.func_94245_a("TwilightForest:" + iconNames[i]);
}
}
public EnumAction func_77661_b(ItemStack par1ItemStack)
{
return EnumAction.bow;
}
public int func_77626_a(ItemStack par1ItemStack)
{
return 72000;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.statemgr;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.protobuf.Message;
import org.apache.heron.api.generated.TopologyAPI;
import org.apache.heron.proto.ckptmgr.CheckpointManager;
import org.apache.heron.proto.scheduler.Scheduler;
import org.apache.heron.proto.system.ExecutionEnvironment;
import org.apache.heron.proto.system.PackingPlans;
import org.apache.heron.proto.system.PhysicalPlans;
import org.apache.heron.proto.tmaster.TopologyMaster;
import org.apache.heron.spi.common.Config;
import org.apache.heron.spi.common.Context;
import org.apache.heron.spi.common.Key;
import org.apache.heron.spi.statemgr.IStateManager;
import org.apache.heron.spi.statemgr.Lock;
import org.apache.heron.spi.statemgr.WatchCallback;
public abstract class FileSystemStateManager implements IStateManager {
private static final Logger LOG = Logger.getLogger(FileSystemStateManager.class.getName());
protected static <V> void safeSetFuture(SettableFuture<V> future, V result) {
if (!future.set(result)) {
LOG.warning("Unexpected - a local settable future is set twice!");
}
}
protected static <V> void safeSetException(SettableFuture<V> future, Throwable cause) {
if (!future.setException(cause)) {
LOG.warning("Unexpected - a local settable future is set twice!");
}
}
// Store the root address of the hierarchical file system
protected String rootAddress;
protected enum StateLocation {
TMASTER_LOCATION("tmasters", "TMaster location"),
METRICSCACHE_LOCATION("metricscaches", "MetricsCache location"),
TOPOLOGY("topologies", "Topologies"),
PACKING_PLAN("packingplans", "Packing plan"),
PHYSICAL_PLAN("pplans", "Physical plan"),
EXECUTION_STATE("executionstate", "Execution state"),
SCHEDULER_LOCATION("schedulers", "Scheduler location"),
STATEFUL_CHECKPOINT("statefulcheckpoints", "Stateful checkpoints"),
LOCKS("locks", "Distributed locks");
private final String dir;
private final String name;
StateLocation(String dir, String name) {
this.dir = dir;
this.name = name;
}
public String getName() {
return name;
}
public String getDirectory(String root) {
return concatPath(root, dir);
}
public String getNodePath(String root, String topology) {
return concatPath(getDirectory(root), topology);
}
public String getNodePath(String root, String topology, String extraToken) {
return getNodePath(root, String.format("%s__%s", topology, extraToken));
}
private static String concatPath(String basePath, String appendPath) {
return String.format("%s/%s", basePath, appendPath);
}
}
protected abstract ListenableFuture<Boolean> nodeExists(String path);
protected abstract ListenableFuture<Boolean> deleteNode(String path,
boolean deleteChildrenIfNecessary);
protected abstract <M extends Message> ListenableFuture<M> getNodeData(WatchCallback watcher,
String path,
Message.Builder builder);
protected abstract Lock getLock(String path);
protected String getStateDirectory(StateLocation location) {
return location.getDirectory(rootAddress);
}
protected String getStatePath(StateLocation location, String topologyName) {
return location.getNodePath(rootAddress, topologyName);
}
@Override
public void initialize(Config config) {
this.rootAddress = Context.stateManagerRootPath(config);
LOG.log(Level.FINE, "File system state manager root address: {0}", rootAddress);
}
@Override
public ListenableFuture<Boolean> isTopologyRunning(String topologyName) {
return nodeExists(getStatePath(StateLocation.TOPOLOGY, topologyName));
}
@Override
public Lock getLock(String topologyName, LockName lockName) {
return getLock(
StateLocation.LOCKS.getNodePath(this.rootAddress, topologyName, lockName.getName()));
}
@Override
public ListenableFuture<Scheduler.SchedulerLocation> getSchedulerLocation(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.SCHEDULER_LOCATION, topologyName,
Scheduler.SchedulerLocation.newBuilder());
}
@Override
public ListenableFuture<TopologyAPI.Topology> getTopology(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.TOPOLOGY, topologyName,
TopologyAPI.Topology.newBuilder());
}
@Override
public ListenableFuture<ExecutionEnvironment.ExecutionState> getExecutionState(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.EXECUTION_STATE, topologyName,
ExecutionEnvironment.ExecutionState.newBuilder());
}
@Override
public ListenableFuture<PackingPlans.PackingPlan> getPackingPlan(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.PACKING_PLAN, topologyName,
PackingPlans.PackingPlan.newBuilder());
}
@Override
public ListenableFuture<PhysicalPlans.PhysicalPlan> getPhysicalPlan(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.PHYSICAL_PLAN, topologyName,
PhysicalPlans.PhysicalPlan.newBuilder());
}
@Override
public ListenableFuture<TopologyMaster.TMasterLocation> getTMasterLocation(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.TMASTER_LOCATION, topologyName,
TopologyMaster.TMasterLocation.newBuilder());
}
@Override
public ListenableFuture<TopologyMaster.MetricsCacheLocation> getMetricsCacheLocation(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.METRICSCACHE_LOCATION, topologyName,
TopologyMaster.MetricsCacheLocation.newBuilder());
}
@Override
public ListenableFuture<CheckpointManager.StatefulConsistentCheckpoints> getStatefulCheckpoints(
WatchCallback watcher, String topologyName) {
return getNodeData(watcher, StateLocation.STATEFUL_CHECKPOINT, topologyName,
CheckpointManager.StatefulConsistentCheckpoints.newBuilder());
}
@Override
public ListenableFuture<Boolean> deleteTMasterLocation(String topologyName) {
return deleteNode(StateLocation.TMASTER_LOCATION, topologyName);
}
@Override
public ListenableFuture<Boolean> deleteMetricsCacheLocation(String topologyName) {
return deleteNode(StateLocation.METRICSCACHE_LOCATION, topologyName);
}
@Override
public ListenableFuture<Boolean> deleteSchedulerLocation(String topologyName) {
return deleteNode(StateLocation.SCHEDULER_LOCATION, topologyName);
}
@Override
public ListenableFuture<Boolean> deleteExecutionState(String topologyName) {
return deleteNode(StateLocation.EXECUTION_STATE, topologyName);
}
@Override
public ListenableFuture<Boolean> deleteTopology(String topologyName) {
return deleteNode(StateLocation.TOPOLOGY, topologyName);
}
@Override
public ListenableFuture<Boolean> deletePackingPlan(String topologyName) {
return deleteNode(StateLocation.PACKING_PLAN, topologyName);
}
@Override
public ListenableFuture<Boolean> deletePhysicalPlan(String topologyName) {
return deleteNode(StateLocation.PHYSICAL_PLAN, topologyName);
}
@Override
public ListenableFuture<Boolean> deleteStatefulCheckpoints(String topologyName) {
return deleteNode(StateLocation.STATEFUL_CHECKPOINT, topologyName);
}
@Override
public ListenableFuture<Boolean> deleteLocks(String topologyName) {
boolean result = true;
for (LockName lockName : LockName.values()) {
String path =
StateLocation.LOCKS.getNodePath(this.rootAddress, topologyName, lockName.getName());
ListenableFuture<Boolean> thisResult = deleteNode(path, true);
try {
if (!thisResult.get()) {
result = false;
}
} catch (InterruptedException | ExecutionException e) {
LOG.log(Level.WARNING, "Error while waiting on result of delete lock at " + thisResult, e);
}
}
final SettableFuture<Boolean> future = SettableFuture.create();
safeSetFuture(future, result);
return future;
}
private ListenableFuture<Boolean> deleteNode(StateLocation location, String topologyName) {
return deleteNode(getStatePath(location, topologyName), false);
}
private <M extends Message> ListenableFuture<M> getNodeData(WatchCallback watcher,
StateLocation location,
String topologyName,
Message.Builder builder) {
return getNodeData(watcher, getStatePath(location, topologyName), builder);
}
/**
* Returns all information stored in the StateManager. This is a utility method used for debugging
* while developing. To invoke, run:
*
* bazel run heron/statemgrs/src/java:localfs-statemgr-unshaded -- \
* <topology-name> [new_instance_distribution]
*
* If a new_instance_distribution is provided, the instance distribution will be updated to
* trigger a scaling event. For example:
*
* bazel run heron/statemgrs/src/java:localfs-statemgr-unshaded -- \
* ExclamationTopology 1:word:3:0:exclaim1:2:0:exclaim1:1:0
*
*/
protected void doMain(String[] args, Config config)
throws ExecutionException, InterruptedException, InstantiationException,
IllegalAccessException, ClassNotFoundException {
if (args.length < 1) {
throw new RuntimeException(String.format(
"Usage: java %s <topology_name> - view state manager details for a topology",
this.getClass().getCanonicalName()));
}
String topologyName = args[0];
print("==> State Manager root path: %s",
config.getStringValue(Key.STATEMGR_ROOT_PATH));
initialize(config);
if (isTopologyRunning(topologyName).get()) {
print("==> Topology %s found", topologyName);
try {
print("==> Topology:\n%s", getTopology(null, topologyName).get());
} catch (ExecutionException e) {
print("Topology node not found %s", e.getMessage());
}
try {
print("==> ExecutionState:\n%s", getExecutionState(null, topologyName).get());
} catch (ExecutionException e) {
print("ExecutionState node not found %s", e.getMessage());
}
try {
print("==> SchedulerLocation:\n%s",
getSchedulerLocation(null, topologyName).get());
} catch (ExecutionException e) {
print("SchedulerLocation node not found %s", e.getMessage());
}
try {
print("==> TMasterLocation:\n%s", getTMasterLocation(null, topologyName).get());
} catch (ExecutionException e) {
print("TMasterLocation node not found %s", e.getMessage());
}
try {
print("==> MetricsCacheLocation:\n%s", getMetricsCacheLocation(null, topologyName).get());
} catch (ExecutionException e) {
print("MetricsCacheLocation node not found %s", e.getMessage());
}
try {
print("==> PackingPlan:\n%s", getPackingPlan(null, topologyName).get());
} catch (ExecutionException e) {
print("PackingPlan node not found %s", e.getMessage());
}
try {
print("==> PhysicalPlan:\n%s", getPhysicalPlan(null, topologyName).get());
} catch (ExecutionException e) {
print("PhysicalPlan node not found %s", e.getMessage());
}
} else {
print("==> Topology %s not found under %s",
topologyName, config.getStringValue(Key.STATEMGR_ROOT_PATH));
}
}
protected void print(String format, Object... values) {
System.out.println(String.format(format, values));
}
}
| |
package com.ftfl.icare.fragment;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Spinner;
import android.widget.Toast;
import com.fourmob.datetimepicker.date.DatePickerDialog;
import com.fourmob.datetimepicker.date.DatePickerDialog.OnDateSetListener;
import com.ftfl.icare.HomeActivity;
import com.ftfl.icare.R;
import com.ftfl.icare.database.DoctorProfileDataSource;
import com.ftfl.icare.database.MedicalHistoryDataSource;
import com.ftfl.icare.util.ICareConstants;
import com.ftfl.icare.util.MedicalHistory;
import com.nostra13.universalimageloader.core.DisplayImageOptions;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.display.RoundedBitmapDisplayer;
public class FragmentNewMedicalHistory extends Fragment implements
OnItemSelectedListener, OnDateSetListener {
ImageView mIvPrescription = null;
EditText mEtDate = null;
Spinner mSpinnerDoctorName = null;
EditText mEtPurpose = null;
EditText mEtSuggestion = null;
Button mBtnCapturePrescription = null;
Button mBtnSave = null;
static final int CAMERA_REQUEST = 11;
static final int CROP_REQUEST = 12;
File image = null;
String mPrescriptionImagePath = "";
String id = null;
String mDoctorName = "";
List<String> mDoctorNameList = new ArrayList<String>();
public FragmentNewMedicalHistory() {
}
public void viewPreviousData() {
int lId = Integer.parseInt(id);
MedicalHistoryDataSource medicalHistoryDS = new MedicalHistoryDataSource(
getActivity());
MedicalHistory viewMedicalHistory = medicalHistoryDS
.singleMedicalHistoryData(lId);
mEtDate.setText(viewMedicalHistory.getDate());
mEtPurpose.setText(viewMedicalHistory.getPurpose());
mEtSuggestion.setText(viewMedicalHistory.getSuggestion());
String doctorName = viewMedicalHistory.getDoctorName();
int position = 0;
for (int i = 0; i < mDoctorNameList.size(); i++) {
if (mDoctorNameList.get(i).equals(doctorName)) {
position = i;
break;
}
}
mSpinnerDoctorName.setSelection(position);
mPrescriptionImagePath = viewMedicalHistory.getPrescription();
if (!mPrescriptionImagePath.equals("")) {
DisplayImageOptions options = new DisplayImageOptions.Builder()
.displayer(new RoundedBitmapDisplayer(0))
.cacheInMemory(true).cacheOnDisk(true).build();
ImageLoader.getInstance().displayImage(
"file:///" + mPrescriptionImagePath, mIvPrescription,
options);
}
}
/**
* open camera method
*/
public void callCamera() {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
// Ensure that there's a camera activity to handle the intent
if (takePictureIntent
.resolveActivity(getActivity().getPackageManager()) != null) {
// Create the File where the photo should go
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException ex) {
Toast.makeText(getActivity(), ex.getMessage(),
Toast.LENGTH_SHORT).show();
}
// Continue only if the File was successfully created
if (photoFile != null) {
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(photoFile));
startActivityForResult(takePictureIntent, CAMERA_REQUEST);
}
}
}
private File createImageFile() throws IOException {
if (image == null) {
// External SD card location
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
ICareConstants.IMAGE_DIRECTORY_NAME);
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
return null;
}
}
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
image = File.createTempFile(imageFileName, /* prefix */
".jpg", /* suffix */
mediaStorageDir /* directory */
);
}
mPrescriptionImagePath = image.getAbsolutePath();
return image;
}
/*
* Perform Crop action of the image.
*/
private void performCrop(String eFilePath) {
try {
File imageFile = new File(eFilePath);
Uri picUri = Uri.fromFile(imageFile);
Intent cropIntent = new Intent("com.android.camera.action.CROP");
// indicate image type and Uri
cropIntent.setDataAndType(picUri, "image/*");
// set crop properties
cropIntent.putExtra("crop", "true");
// indicate aspect of desired crop
cropIntent.putExtra("aspectX", 1);
cropIntent.putExtra("aspectY", 1);
// indicate output X and Y
cropIntent.putExtra("outputX", 200);
cropIntent.putExtra("outputY", 150);
// retrieve data on return
cropIntent.putExtra("return-data", true);
cropIntent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(imageFile));
// startActivity(cropIntent);
startActivityForResult(cropIntent, CROP_REQUEST);
}
// respond to users whose devices do not support the crop action
catch (ActivityNotFoundException anfe) {
// display an error message
String errorMessage = getString(R.string.not_croped);
Toast toast = Toast.makeText(getActivity(), errorMessage,
Toast.LENGTH_SHORT);
toast.show();
}
}
/**
* On activity result
*/
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == CAMERA_REQUEST) {
performCrop(mPrescriptionImagePath);
}
else if (requestCode == CROP_REQUEST) {
if (mPrescriptionImagePath != null) {
mBtnCapturePrescription.setText("Change Image");
Bitmap correctBmp = null;
try {
File f = new File(mPrescriptionImagePath);
ExifInterface exif = new ExifInterface(f.getPath());
int orientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
int angle = 0;
if (orientation == ExifInterface.ORIENTATION_ROTATE_90) {
angle = 90;
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_180) {
angle = 180;
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_270) {
angle = 270;
}
Matrix mat = new Matrix();
mat.postRotate(angle);
BitmapFactory.Options option = new BitmapFactory.Options();
option.inSampleSize = 1;
Bitmap bmp1 = BitmapFactory.decodeStream(
new FileInputStream(f), null, option);
correctBmp = Bitmap.createBitmap(bmp1, 0, 0,
bmp1.getWidth(), bmp1.getHeight(), mat, true);
} catch (IOException e) {
Log.w("TAG", "-- Error in setting image");
} catch (OutOfMemoryError oom) {
Log.w("TAG", "-- OOM Error in setting image");
}
mIvPrescription.setImageBitmap(correctBmp);
}
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(
R.layout.fragment_layout_new_medical_history, container, false);
((HomeActivity) getActivity()).disableMenu();
mIvPrescription = (ImageView) view
.findViewById(R.id.imageviewNewPrescription);
mEtDate = (EditText) view
.findViewById(R.id.et_new_medical_history_date);
mSpinnerDoctorName = (Spinner) view
.findViewById(R.id.spinner_new_medical_history_doctor_name);
mEtPurpose = (EditText) view
.findViewById(R.id.et_new_medical_history_purpose);
mEtSuggestion = (EditText) view
.findViewById(R.id.et_new_medical_history_suggestion);
mBtnCapturePrescription = (Button) view
.findViewById(R.id.button_capture_prescription);
mBtnSave = (Button) view.findViewById(R.id.button_new_medical_history);
final Calendar calendar = Calendar.getInstance();
final DatePickerDialog datePickerDialog = DatePickerDialog.newInstance(
this, calendar.get(Calendar.YEAR),
calendar.get(Calendar.MONTH),
calendar.get(Calendar.DAY_OF_MONTH), false);
mEtDate.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (hasFocus) {
datePickerDialog.setYearRange(1985, 2028);
datePickerDialog.setCloseOnSingleTapDay(false);
datePickerDialog.show(getActivity()
.getSupportFragmentManager(),
ICareConstants.DATEPICKER_TAG);
}
}
});
mEtDate.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
datePickerDialog.setYearRange(1985, 2028);
datePickerDialog.setCloseOnSingleTapDay(false);
datePickerDialog.show(
getActivity().getSupportFragmentManager(),
ICareConstants.DATEPICKER_TAG);
}
});
DoctorProfileDataSource doctorDS = new DoctorProfileDataSource(
getActivity());
mDoctorNameList = doctorDS.doctorNameList();
mSpinnerDoctorName.setOnItemSelectedListener(this);
ArrayAdapter<String> dataAdapter = new ArrayAdapter<String>(
getActivity(), R.layout.spinner_item, mDoctorNameList);
// Specify the layout to use when the list of choices appears
dataAdapter
.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
mSpinnerDoctorName.setAdapter(dataAdapter);
mBtnCapturePrescription.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
callCamera();
}
});
id = getArguments().getString(ICareConstants.MEDICAL_HISTORY_ID);
if (id != null) {
viewPreviousData();
}
mBtnSave.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
String date = mEtDate.getText().toString();
String purpose = mEtPurpose.getText().toString();
String suggestion = mEtSuggestion.getText().toString();
if (!(date.equals("") || purpose.equals("") || suggestion
.equals(""))) {
MedicalHistory insertMedicalHistory = new MedicalHistory(
date, mDoctorName, purpose, suggestion,
mPrescriptionImagePath,
ICareConstants.SELECTED_PROFILE_ID);
MedicalHistoryDataSource medicalHistoryDS = new MedicalHistoryDataSource(
getActivity());
if (id != null) {
int lId = Integer.parseInt(id);
if (medicalHistoryDS.updateData(lId,
insertMedicalHistory)) {
Toast.makeText(getActivity(), "Successfully Saved",
Toast.LENGTH_SHORT).show();
FragmentMedicalHistory fragmentMedicalHistory = new FragmentMedicalHistory();
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction fragmentTransaction = fragmentManager
.beginTransaction();
fragmentTransaction.replace(R.id.content_frame,
fragmentMedicalHistory);
// fragmentTransaction.addToBackStack(null);
fragmentTransaction.commit();
}
else {
Toast.makeText(getActivity(), "not Saved",
Toast.LENGTH_SHORT).show();
}
}
else {
if (medicalHistoryDS.insert(insertMedicalHistory)) {
// ((HomeActivity)getActivity()).SelectItem(7);
Toast.makeText(getActivity(), "Successfully Saved",
Toast.LENGTH_SHORT).show();
FragmentMedicalHistory fragmentMedicalHistory = new FragmentMedicalHistory();
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction fragmentTransaction = fragmentManager
.beginTransaction();
fragmentTransaction.replace(R.id.content_frame,
fragmentMedicalHistory);
// fragmentTransaction.addToBackStack(null);
fragmentTransaction.commit();
}
else {
Toast.makeText(getActivity(), "not Saved",
Toast.LENGTH_SHORT).show();
}
}
}
}
});
return view;
}
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2,
long arg3) {
// TODO Auto-generated method stub
mDoctorName = arg0.getItemAtPosition(arg2).toString();
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
// TODO Auto-generated method stub
mDoctorName = arg0.toString();
}
@Override
public void onDateSet(DatePickerDialog datePickerDialog, int year,
int month, int day) {
// TODO Auto-generated method stub
mEtDate.setText(new StringBuilder()
// Month is 0 based so add 1
.append(day).append("/").append(month + 1).append("/")
.append(year));
}
}
| |
/*
* The MIT License
*
* Pierre Lindenbaum - Institut du Thorax - 2016
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package picard.sam;
import htsjdk.samtools.*;
import htsjdk.samtools.util.IOUtil;
import org.testng.Assert;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import picard.cmdline.CommandLineProgramTest;
import picard.vcf.VcfTestUtils;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.List;
import java.util.stream.StreamSupport;
public class FilterSamReadsTest extends CommandLineProgramTest {
@Override
public String getCommandLineProgramName() {
return FilterSamReads.class.getSimpleName();
}
private static final int READ_LENGTH = 151;
private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
private static final String TEST_DIR = "testdata/picard/sam/";
@BeforeTest
public void setUp() throws IOException {
builder.setReadLength(READ_LENGTH);
builder.addPair("mapped_pair_chr1", 0, 1, 151); //should be kept in first test, filtered out in third
builder.addPair("mapped_pair_chr2", 1, 1, 151); //should be filtered out for first test, and kept in third
builder.addPair("prove_one_of_pair", 0, 1000, 1000); //neither of these will be kept in any test
builder.addPair("one_of_pair", 0, 1, 1000); //first read should pass, second should not, but both will be kept in first test
}
@DataProvider(name = "dataTestReadFilter")
public Object[][] dataTestReadFilter() {
List<String> reads = Arrays.asList(
"mapped_pair_chr1",
"prove_one_of_pair",
"one_of_pair");
return new Object[][]{
{FilterSamReads.Filter.includeReadList, reads, 3 * 2},
{FilterSamReads.Filter.excludeReadList, reads, 1 * 2}
};
}
/**
* filters a SAM using a reads file
*/
@Test(dataProvider = "dataTestReadFilter")
public void testReadFilters(final FilterSamReads.Filter filterType, final List<String> readList, final int expectNumber) throws Exception {
final File inputSam = File.createTempFile("testSam", ".sam", new File(TEST_DIR));
inputSam.deleteOnExit();
final File sortedSamIdx = new File(TEST_DIR, inputSam.getName() + ".idx");
sortedSamIdx.deleteOnExit();
try (final SAMFileWriter writer = new SAMFileWriterFactory()
.setCreateIndex(true).makeSAMWriter(builder.getHeader(), false, inputSam)) {
for (final SAMRecord record : builder) {
writer.addAlignment(record);
}
}
final File reads = File.createTempFile(TEST_DIR, "reads");
reads.deleteOnExit();
try (final FileWriter writer = new FileWriter(reads)) {
writer.write(String.join("\n", readList));
} catch (IOException e) {
e.printStackTrace();
}
FilterSamReads filterTest = setupProgram(reads, inputSam, filterType);
Assert.assertEquals(filterTest.doWork(), 0);
long count = getReadCount(filterTest);
Assert.assertEquals(count, expectNumber);
}
@DataProvider(name = "badArgumentCombinationsdata")
public Object[][] badArgumentCombinationsdata() {
return new Object[][]{
{FilterSamReads.Filter.includeJavascript, "READ_LIST_FILE"},
{FilterSamReads.Filter.excludeAligned, "READ_LIST_FILE"},
{FilterSamReads.Filter.includeAligned, "READ_LIST_FILE"},
{FilterSamReads.Filter.includePairedIntervals, "READ_LIST_FILE"},
{FilterSamReads.Filter.includeTagValues, "READ_LIST_FILE"},
{FilterSamReads.Filter.excludeTagValues, "READ_LIST_FILE"},
{FilterSamReads.Filter.includeJavascript, "INTERVAL_LIST"},
{FilterSamReads.Filter.excludeReadList, "INTERVAL_LIST"},
{FilterSamReads.Filter.includeReadList, "INTERVAL_LIST"},
{FilterSamReads.Filter.excludeAligned, "INTERVAL_LIST"},
{FilterSamReads.Filter.includeAligned, "INTERVAL_LIST"},
{FilterSamReads.Filter.includeTagValues, "INTERVAL_LIST"},
{FilterSamReads.Filter.excludeTagValues, "INTERVAL_LIST"},
{FilterSamReads.Filter.excludeReadList, "JAVASCRIPT_FILE"},
{FilterSamReads.Filter.includeReadList, "JAVASCRIPT_FILE"},
{FilterSamReads.Filter.excludeAligned, "JAVASCRIPT_FILE"},
{FilterSamReads.Filter.includeAligned, "JAVASCRIPT_FILE"},
{FilterSamReads.Filter.includePairedIntervals, "JAVASCRIPT_FILE"},
{FilterSamReads.Filter.includeTagValues, "JAVASCRIPT_FILE"},
{FilterSamReads.Filter.excludeTagValues, "JAVASCRIPT_FILE"},
{FilterSamReads.Filter.excludeReadList, "TAG"},
{FilterSamReads.Filter.includeReadList, "TAG"},
{FilterSamReads.Filter.excludeAligned, "TAG"},
{FilterSamReads.Filter.includeAligned, "TAG"},
{FilterSamReads.Filter.includePairedIntervals, "TAG"},
{FilterSamReads.Filter.excludeAligned, "TAG"},
{FilterSamReads.Filter.includeAligned, "TAG"},
{FilterSamReads.Filter.excludeReadList, "TAG_VALUE"},
{FilterSamReads.Filter.includeReadList, "TAG_VALUE"},
{FilterSamReads.Filter.excludeAligned, "TAG_VALUE"},
{FilterSamReads.Filter.includeAligned, "TAG_VALUE"},
{FilterSamReads.Filter.includePairedIntervals, "TAG_VALUE"},
{FilterSamReads.Filter.excludeAligned, "TAG_VALUE"},
{FilterSamReads.Filter.includeAligned, "TAG_VALUE"},
};
}
@Test(dataProvider = "badArgumentCombinationsdata")
public void testBadArgumentCombinations(final FilterSamReads.Filter filter, final String fileArgument) throws IOException {
final File dummyFile = File.createTempFile(TEST_DIR, "dummy");
dummyFile.deleteOnExit();
try (final FileWriter writer = new FileWriter(dummyFile)) {
writer.write("\n");
} catch (IOException e) {
e.printStackTrace();
}
final File temp = File.createTempFile("FilterSamReads.output.", ".sam");
temp.deleteOnExit();
final String[] args = new String[]{
"INPUT=testdata/picard/sam/aligned.sam",
String.format("OUTPUT=%s", temp.getAbsolutePath()),
String.format("FILTER=%s", filter.toString()),
String.format("%s=%s", fileArgument, dummyFile.getAbsoluteFile()),
};
// make sure program invocation failed - inputs validation error
Assert.assertEquals(runPicardCommandLine(args), 1);
}
@DataProvider(name = "dataTestJsFilter")
public Object[][] dataTestJsFilter() {
return new Object[][]{
{TEST_DIR + "aligned.sam", TEST_DIR + "FilterSamReads/filterOddStarts.js", 3},
{TEST_DIR + "aligned.sam", TEST_DIR + "FilterSamReads/filterReadsWithout5primeSoftClip.js", 0}
};
}
@DataProvider(name = "dataTestPairedIntervalFilter")
public Object[][] dataTestPairedIntervalFilter() {
return new Object[][]{
{TEST_DIR + "FilterSamReads/filter1.interval_list", 4},
{TEST_DIR + "FilterSamReads/filter2.interval_list", 0}
};
}
@DataProvider(name = "dataTestTagFilter")
public Object[][] dataTestTagFilter() {
return new Object[][]{
{"testdata/picard/sam/aligned.sam", "RG", "0", true, 8},
{"testdata/picard/sam/aligned.sam", "RG", "0", false, 0},
{"testdata/picard/sam/aligned.sam", "CB", "ACG", false, 3},
{"testdata/picard/sam/aligned.sam", "CB", "ACG", true, 5}
};
}
/**
* filters a SAM using a javascript filter
*/
@Test(dataProvider = "dataTestJsFilter")
public void testJavaScriptFilters(final String samFilename, final String javascriptFilename, final int expectNumber) throws Exception {
// input as SAM file
final File inputSam = new File(samFilename);
final File javascriptFile = new File(javascriptFilename);
FilterSamReads filterTest = setupProgram(javascriptFile, inputSam, FilterSamReads.Filter.includeJavascript);
Assert.assertEquals(filterTest.doWork(), 0);
long count = getReadCount(filterTest);
Assert.assertEquals(count, expectNumber);
}
/**
* filters a SAM using an interval filter
*/
@Test(dataProvider = "dataTestPairedIntervalFilter")
public void testPairedIntervalFilter(final String intervalFilename, final int expectNumber) throws Exception {
// Build a sam file for testing
final File inputSam = VcfTestUtils.createTemporaryIndexedFile("testSam", ".bam");
final SAMFileWriter writer = new SAMFileWriterFactory()
.setCreateIndex(true).makeSAMWriter(builder.getHeader(), false, inputSam);
for (final SAMRecord record : builder) {
writer.addAlignment(record);
}
writer.close();
final File intervalFile = new File(intervalFilename);
FilterSamReads filterTest = setupProgram(intervalFile, inputSam, FilterSamReads.Filter.includePairedIntervals);
Assert.assertEquals(filterTest.doWork(), 0);
long count = getReadCount(filterTest);
Assert.assertEquals(count, expectNumber);
}
@DataProvider(name = "dataTestDebugOption")
public Object[][] dataTestDebugOption() {
return new Object[][]{
{null, false},
{true, true},
{false, false}
};
}
/**
* makes sure debug files are created properly
*/
@Test(dataProvider = "dataTestDebugOption")
public void testDebugOption(Boolean writeDebugReads, boolean isDebugFileExpected) throws Exception {
// input as SAM file
final File inputSam = new File("testdata/picard/sam/aligned.sam");
final File javascriptFile = new File("testdata/picard/sam/FilterSamReads/filterOddStarts.js");
FilterSamReads filterTest = setupProgram(javascriptFile, inputSam, FilterSamReads.Filter.includeJavascript, writeDebugReads);
Assert.assertEquals(filterTest.doWork(), 0);
final File inputReadsFile = new File(filterTest.OUTPUT.getParentFile(), IOUtil.basename(filterTest.INPUT) + ".reads");
Assert.assertEquals(inputReadsFile.exists(), isDebugFileExpected);
final File outputReadsFile = new File(filterTest.OUTPUT.getParentFile(), IOUtil.basename(filterTest.OUTPUT) + ".reads");
outputReadsFile.deleteOnExit();
Assert.assertEquals(outputReadsFile.exists(), isDebugFileExpected);
// We have to clean up the debug files after each test is run to make sure a clean state is preserved in between tests
// This mostly affects the input *.reads file because it will always be called "aligned.reads" and will cause future
// tests to fail if it sticks around and we dont expect it to be written
Files.deleteIfExists(inputReadsFile.toPath());
}
private FilterSamReads setupProgram(final File inputFile, final File inputSam, final FilterSamReads.Filter filter, final Boolean writeDebugReads) throws Exception {
final FilterSamReads program = new FilterSamReads();
program.INPUT = inputSam;
program.OUTPUT = File.createTempFile("FilterSamReads.output.", ".sam");
program.OUTPUT.deleteOnExit();
program.FILTER = filter;
switch (filter) {
case includePairedIntervals:
program.INTERVAL_LIST = inputFile;
break;
case includeJavascript:
program.JAVASCRIPT_FILE = inputFile;
break;
case includeReadList:
case excludeReadList:
program.READ_LIST_FILE = inputFile;
break;
default:
throw new IllegalArgumentException("Not configured for filter=" + filter);
}
if (writeDebugReads != null) {
program.WRITE_READS_FILES = writeDebugReads;
}
return program;
}
private FilterSamReads setupProgram(final File inputFile, final File inputSam, final FilterSamReads.Filter filter) throws Exception {
return setupProgram(inputFile, inputSam, filter, null);
}
/**
* filters a SAM using Tag Values
*/
@Test(dataProvider = "dataTestTagFilter")
public void testTagFilter(final String samFilename, final String tag, final String tagValue, final boolean includeReads, final int expectNumber) throws Exception {
// input as SAM file
final File inputSam = new File(samFilename);
final FilterSamReads filterTest = new FilterSamReads();
filterTest.INPUT = inputSam;
filterTest.OUTPUT = File.createTempFile("FilterSamReads.output.", ".sam");
filterTest.OUTPUT.deleteOnExit();
filterTest.FILTER = includeReads ? FilterSamReads.Filter.includeTagValues : FilterSamReads.Filter.excludeTagValues;
filterTest.TAG = tag;
filterTest.TAG_VALUE = Arrays.asList(tagValue);
Assert.assertEquals(filterTest.doWork(),0);
long count = getReadCount(filterTest);
Assert.assertEquals(count, expectNumber);
}
private long getReadCount(FilterSamReads filterTest) throws Exception {
final SamReader samReader = SamReaderFactory.makeDefault().open(filterTest.OUTPUT);
long count = StreamSupport.stream(samReader.spliterator(), false)
.count();
samReader.close();
return count;
}
}
| |
package tamaized.voidcraft.registry;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.potion.PotionUtils;
import net.minecraftforge.oredict.OreDictionary;
import tamaized.voidcraft.VoidCraft;
import tamaized.voidcraft.common.capabilities.vadeMecum.IVadeMecumCapability;
import tamaized.voidcraft.common.machina.addons.TERecipeInfuser;
import tamaized.voidcraft.common.machina.addons.TERecipesAlchemy;
import tamaized.voidcraft.common.machina.addons.TERecipesBlastFurnace;
import tamaized.voidcraft.common.machina.addons.TERecipesMacerator;
import java.util.ArrayList;
import java.util.List;
public class VoidCraftTERecipes {
public static TERecipesMacerator macerator;
public static TERecipeInfuser infuser;
public static TERecipesAlchemy alchemy;
public static TERecipesBlastFurnace blastFurnace;
static {
macerator = new TERecipesMacerator();
infuser = new TERecipeInfuser();
alchemy = new TERecipesAlchemy();
blastFurnace = new TERecipesBlastFurnace();
}
public static void init() {
for (ItemStack input : getOreDict(new String[]{"oreCoal"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.coalDust, 8), 200));
for (ItemStack input : getOreDict(new String[]{"oreQuartz"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.quartzDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"oreIron"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.ironDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"oreGold"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.goldDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"oreCopper"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.copperDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"oreTin"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.tinDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"oreLead"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.leadDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"oreLapis"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.lapisDust, 8), 200));
for (ItemStack input : getOreDict(new String[]{"oreEmerald"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.emeraldDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"oreDiamond"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.diamondDust, 4), 200));
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{new ItemStack(Items.COAL, 1)}, new ItemStack(VoidCraft.items.coalDust, 4), 200));
for (ItemStack input : getOreDict(new String[]{"gemQuartz"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.quartzDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"ingotIron"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.ironDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"ingotGold"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.goldDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"ingotCopper"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.copperDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"ingotTin"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.tinDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"ingotLead"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.leadDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"gemLapis"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.lapisDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"gemEmerald"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.emeraldDust, 1), 200));
for (ItemStack input : getOreDict(new String[]{"gemDiamond"}))
macerator.registerRecipe(macerator.new MaceratorRecipe(new ItemStack[]{input}, new ItemStack(VoidCraft.items.diamondDust, 1), 200));
blastFurnace.registerRecipe(blastFurnace.new BlastFurnaceRecipe(new ItemStack[]{new ItemStack(VoidCraft.items.ironDust), new ItemStack(VoidCraft.items.coalDust)}, new ItemStack(VoidCraft.items.voidicSteel, 1), 500));
infuser.registerRecipe(infuser.new InfuserRecipe(new ItemStack[]{new ItemStack(Blocks.FURNACE)}, new ItemStack(VoidCraft.blocks.voidMacerator), 1000));
infuser.registerRecipe(infuser.new InfuserRecipe(new ItemStack[]{new ItemStack(VoidCraft.tools.archSword)}, new ItemStack(VoidCraft.tools.demonSword), 1000));
infuser.registerRecipe(infuser.new InfuserRecipe(new ItemStack[]{new ItemStack(Blocks.BEACON)}, new ItemStack(VoidCraft.blocks.Heimdall), 1000));
{
ItemStack[] voidicImmunityInputStack = new ItemStack[]{new ItemStack(VoidCraft.items.etherealFruit), new ItemStack(VoidCraft.items.etherealFruit_redstone), new ItemStack(VoidCraft.items.etherealFruit_lapis), new ItemStack(VoidCraft.items.etherealFruit_gold), new ItemStack(VoidCraft.items.etherealFruit_emerald), new ItemStack(VoidCraft.items.etherealFruit_diamond)};
ItemStack result = PotionUtils.addPotionToItemStack(new ItemStack(Items.POTIONITEM), VoidCraft.potions.type_voidImmunity);
alchemy.registerRecipe(alchemy.new AlchemyRecipe(null, voidicImmunityInputStack, result, 350));
alchemy.registerRecipe(alchemy.new AlchemyRecipe(
IVadeMecumCapability.Category.Flame,
new ItemStack[]{
new ItemStack(Items.NETHER_WART),
new ItemStack(Items.BLAZE_POWDER),
new ItemStack(Items.MAGMA_CREAM),
new ItemStack(Items.FIRE_CHARGE),
new ItemStack(VoidCraft.items.etherealFruit),
new ItemStack(VoidCraft.items.emptyObsidianFlask)
},
new ItemStack(VoidCraft.items.obsidianFlaskFire, 1),
350)
);
alchemy.registerRecipe(alchemy.new AlchemyRecipe(
IVadeMecumCapability.Category.Freeze,
new ItemStack[]{
new ItemStack(Blocks.ICE),
new ItemStack(Blocks.SNOW),
new ItemStack(VoidCraft.items.ectoplasm),
new ItemStack(VoidCraft.items.voidcrystal),
new ItemStack(VoidCraft.items.etherealFruit),
new ItemStack(VoidCraft.items.emptyObsidianFlask)
},
new ItemStack(VoidCraft.items.obsidianFlaskFreeze, 1),
350)
);
alchemy.registerRecipe(alchemy.new AlchemyRecipe(
IVadeMecumCapability.Category.Shock,
new ItemStack[]{
new ItemStack(Blocks.GLOWSTONE),
new ItemStack(Blocks.END_STONE),
new ItemStack(Blocks.END_ROD),
new ItemStack(Items.FEATHER),
new ItemStack(VoidCraft.items.etherealFruit),
new ItemStack(VoidCraft.items.emptyObsidianFlask)
},
new ItemStack(VoidCraft.items.obsidianFlaskShock, 1),
350)
);
alchemy.registerRecipe(alchemy.new AlchemyRecipe(
IVadeMecumCapability.Category.AcidSpray,
new ItemStack[]{
new ItemStack(Items.FISH, 1, 3),
new ItemStack(Blocks.SLIME_BLOCK),
new ItemStack(Items.FERMENTED_SPIDER_EYE),
new ItemStack(Items.POISONOUS_POTATO),
new ItemStack(VoidCraft.items.etherealFruit),
new ItemStack(VoidCraft.items.emptyObsidianFlask)
},
new ItemStack(VoidCraft.items.obsidianFlaskAcid, 1),
350)
);
alchemy.registerRecipe(alchemy.new AlchemyRecipe(
IVadeMecumCapability.Category.Implosion,
new ItemStack[]{
new ItemStack(VoidCraft.blocks.blockVoidcrystal),
new ItemStack(VoidCraft.blocks.realityHole),
new ItemStack(VoidCraft.items.voidCloth),
new ItemStack(VoidCraft.items.astralEssence),
new ItemStack(VoidCraft.items.etherealFruit),
new ItemStack(VoidCraft.items.emptyObsidianFlask)
},
new ItemStack(VoidCraft.items.obsidianFlaskVoid, 1),
350)
);
}
}
public static List<ItemStack> getOreDict(String[] input) {
List<ItemStack> stacks = new ArrayList<ItemStack>();
for (String ore : input) {
stacks.addAll(OreDictionary.getOres(ore));
}
return stacks;
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dx.rop.cst;
import com.android.dx.rop.type.Type;
import java.util.HashMap;
/**
* Constants that represent an arbitrary type (reference or primitive).
*/
public final class CstType extends TypedConstant {
/** {@code non-null;} map of interned types */
private static final HashMap<Type, CstType> interns =
new HashMap<Type, CstType>(100);
/** {@code non-null;} instance corresponding to the class {@code Object} */
public static final CstType OBJECT = intern(Type.OBJECT);
/** {@code non-null;} instance corresponding to the class {@code Boolean} */
public static final CstType BOOLEAN = intern(Type.BOOLEAN_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Byte} */
public static final CstType BYTE = intern(Type.BYTE_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Character} */
public static final CstType CHARACTER = intern(Type.CHARACTER_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Double} */
public static final CstType DOUBLE = intern(Type.DOUBLE_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Float} */
public static final CstType FLOAT = intern(Type.FLOAT_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Long} */
public static final CstType LONG = intern(Type.LONG_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Integer} */
public static final CstType INTEGER = intern(Type.INTEGER_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Short} */
public static final CstType SHORT = intern(Type.SHORT_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Void} */
public static final CstType VOID = intern(Type.VOID_CLASS);
/** {@code non-null;} instance corresponding to the type {@code boolean[]} */
public static final CstType BOOLEAN_ARRAY = intern(Type.BOOLEAN_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code byte[]} */
public static final CstType BYTE_ARRAY = intern(Type.BYTE_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code char[]} */
public static final CstType CHAR_ARRAY = intern(Type.CHAR_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code double[]} */
public static final CstType DOUBLE_ARRAY = intern(Type.DOUBLE_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code float[]} */
public static final CstType FLOAT_ARRAY = intern(Type.FLOAT_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code long[]} */
public static final CstType LONG_ARRAY = intern(Type.LONG_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code int[]} */
public static final CstType INT_ARRAY = intern(Type.INT_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code short[]} */
public static final CstType SHORT_ARRAY = intern(Type.SHORT_ARRAY);
/** {@code non-null;} the underlying type */
private final Type type;
/**
* {@code null-ok;} the type descriptor corresponding to this instance, if
* calculated
*/
private CstString descriptor;
/**
* Returns an instance of this class that represents the wrapper
* class corresponding to a given primitive type. For example, if
* given {@link Type#INT}, this method returns the class reference
* {@code java.lang.Integer}.
*
* @param primitiveType {@code non-null;} the primitive type
* @return {@code non-null;} the corresponding wrapper class
*/
public static CstType forBoxedPrimitiveType(Type primitiveType) {
switch (primitiveType.getBasicType()) {
case Type.BT_BOOLEAN: return BOOLEAN;
case Type.BT_BYTE: return BYTE;
case Type.BT_CHAR: return CHARACTER;
case Type.BT_DOUBLE: return DOUBLE;
case Type.BT_FLOAT: return FLOAT;
case Type.BT_INT: return INTEGER;
case Type.BT_LONG: return LONG;
case Type.BT_SHORT: return SHORT;
case Type.BT_VOID: return VOID;
}
throw new IllegalArgumentException("not primitive: " + primitiveType);
}
/**
* Returns an interned instance of this class for the given type.
*
* @param type {@code non-null;} the underlying type
* @return {@code non-null;} an appropriately-constructed instance
*/
public static CstType intern(Type type) {
synchronized (interns) {
CstType cst = interns.get(type);
if (cst == null) {
cst = new CstType(type);
interns.put(type, cst);
}
return cst;
}
}
/**
* Constructs an instance.
*
* @param type {@code non-null;} the underlying type
*/
public CstType(Type type) {
if (type == null) {
throw new NullPointerException("type == null");
}
if (type == Type.KNOWN_NULL) {
throw new UnsupportedOperationException(
"KNOWN_NULL is not representable");
}
this.type = type;
this.descriptor = null;
}
/** {@inheritDoc} */
@Override
public boolean equals(Object other) {
if (!(other instanceof CstType)) {
return false;
}
return type == ((CstType) other).type;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return type.hashCode();
}
/** {@inheritDoc} */
@Override
protected int compareTo0(Constant other) {
String thisDescriptor = type.getDescriptor();
String otherDescriptor = ((CstType) other).type.getDescriptor();
return thisDescriptor.compareTo(otherDescriptor);
}
/** {@inheritDoc} */
@Override
public String toString() {
return "type{" + toHuman() + '}';
}
/** {@inheritDoc} */
public Type getType() {
return Type.CLASS;
}
/** {@inheritDoc} */
@Override
public String typeName() {
return "type";
}
/** {@inheritDoc} */
@Override
public boolean isCategory2() {
return false;
}
/** {@inheritDoc} */
public String toHuman() {
return type.toHuman();
}
/**
* Gets the underlying type (as opposed to the type corresponding
* to this instance as a constant, which is always
* {@code Class}).
*
* @return {@code non-null;} the type corresponding to the name
*/
public Type getClassType() {
return type;
}
/**
* Gets the type descriptor for this instance.
*
* @return {@code non-null;} the descriptor
*/
public CstString getDescriptor() {
if (descriptor == null) {
descriptor = new CstString(type.getDescriptor());
}
return descriptor;
}
/**
* Returns a human readable package name for this type, like "java.util".
* If this is an array type, this returns the package name of the array's
* component type. If this is a primitive type, this returns "default".
*/
public String getPackageName() {
// descriptor is a string like "[[Ljava/util/String;"
String descriptor = getDescriptor().getString();
int lastSlash = descriptor.lastIndexOf('/');
int lastLeftSquare = descriptor.lastIndexOf('['); // -1 unless this is an array
if (lastSlash == -1) {
return "default";
} else {
// +2 to skip the '[' and the 'L' prefix
return descriptor.substring(lastLeftSquare + 2, lastSlash).replace('/', '.');
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ConnectionConfiguration;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.hfile.CorruptHFileException;
import org.apache.hadoop.hbase.io.hfile.TestHFile;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobTestUtil;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
@Category(MediumTests.class)
public class TestMobStoreScanner {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMobStoreScanner.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static byte [] row1 = Bytes.toBytes("row1");
private final static byte [] row2 = Bytes.toBytes("row2");
private final static byte [] family = Bytes.toBytes("family");
private final static byte [] qf1 = Bytes.toBytes("qualifier1");
private final static byte [] qf2 = Bytes.toBytes("qualifier2");
protected final byte[] qf3 = Bytes.toBytes("qualifier3");
private static Table table;
private static Admin admin;
private static HColumnDescriptor hcd;
private static HTableDescriptor desc;
private static Random random = new Random();
private static long defaultThreshold = 10;
private FileSystem fs;
private Configuration conf;
@Rule
public TestName name = new TestName();
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(ConnectionConfiguration.MAX_KEYVALUE_SIZE_KEY,
100 * 1024 * 1024);
TEST_UTIL.getConfiguration().setInt(HRegion.HBASE_MAX_CELL_SIZE_KEY, 100 * 1024 * 1024);
TEST_UTIL.startMiniCluster(1);
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
public void setUp(long threshold, TableName tn) throws Exception {
conf = TEST_UTIL.getConfiguration();
fs = FileSystem.get(conf);
desc = new HTableDescriptor(tn);
hcd = new HColumnDescriptor(family);
hcd.setMobEnabled(true);
hcd.setMobThreshold(threshold);
hcd.setMaxVersions(4);
desc.addFamily(hcd);
admin = TEST_UTIL.getAdmin();
admin.createTable(desc);
table = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())
.getTable(tn);
}
/**
* Generate the mob value.
*
* @param size the size of the value
* @return the mob value generated
*/
private static byte[] generateMobValue(int size) {
byte[] mobVal = new byte[size];
random.nextBytes(mobVal);
return mobVal;
}
/**
* Set the scan attribute
*
* @param reversed if true, scan will be backward order
* @param mobScanRaw if true, scan will get the mob reference
*/
public void setScan(Scan scan, boolean reversed, boolean mobScanRaw) {
scan.setReversed(reversed);
scan.setMaxVersions(4);
if(mobScanRaw) {
scan.setAttribute(MobConstants.MOB_SCAN_RAW, Bytes.toBytes(Boolean.TRUE));
}
}
@Test
public void testMobStoreScanner() throws Exception {
testGetFromFiles(false);
testGetFromMemStore(false);
testGetReferences(false);
testMobThreshold(false);
testGetFromArchive(false);
}
@Test
public void testReversedMobStoreScanner() throws Exception {
testGetFromFiles(true);
testGetFromMemStore(true);
testGetReferences(true);
testMobThreshold(true);
testGetFromArchive(true);
}
@Test
public void testGetMassive() throws Exception {
setUp(defaultThreshold, TableName.valueOf(name.getMethodName()));
// Put some data 5 10, 15, 20 mb ok (this would be right below protobuf
// default max size of 64MB.
// 25, 30, 40 fail. these is above protobuf max size of 64MB
byte[] bigValue = new byte[25*1024*1024];
Put put = new Put(row1);
put.addColumn(family, qf1, bigValue);
put.addColumn(family, qf2, bigValue);
put.addColumn(family, qf3, bigValue);
table.put(put);
Get g = new Get(row1);
table.get(g);
// should not have blown up.
}
@Test
public void testReadPt() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
setUp(0L, tableName);
long ts = System.currentTimeMillis();
byte[] value1 = Bytes.toBytes("value1");
Put put1 = new Put(row1);
put1.addColumn(family, qf1, ts, value1);
table.put(put1);
Put put2 = new Put(row2);
byte[] value2 = Bytes.toBytes("value2");
put2.addColumn(family, qf1, ts, value2);
table.put(put2);
Scan scan = new Scan();
scan.setCaching(1);
ResultScanner rs = table.getScanner(scan);
Result result = rs.next();
Put put3 = new Put(row1);
byte[] value3 = Bytes.toBytes("value3");
put3.addColumn(family, qf1, ts, value3);
table.put(put3);
Put put4 = new Put(row2);
byte[] value4 = Bytes.toBytes("value4");
put4.addColumn(family, qf1, ts, value4);
table.put(put4);
Cell cell = result.getColumnLatestCell(family, qf1);
Assert.assertArrayEquals(value1, CellUtil.cloneValue(cell));
admin.flush(tableName);
result = rs.next();
cell = result.getColumnLatestCell(family, qf1);
Assert.assertArrayEquals(value2, CellUtil.cloneValue(cell));
}
@Test
public void testReadFromCorruptMobFilesWithReadEmptyValueOnMobCellMiss() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
setUp(0, tableName);
createRecordAndCorruptMobFile(tableName, row1, family, qf1, Bytes.toBytes("value1"));
Get get = new Get(row1);
get.setAttribute(MobConstants.EMPTY_VALUE_ON_MOBCELL_MISS, Bytes.toBytes(true));
Result result = table.get(get);
Cell cell = result.getColumnLatestCell(family, qf1);
Assert.assertEquals(0, cell.getValueLength());
}
@Test
public void testReadFromCorruptMobFiles() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
setUp(0, tableName);
createRecordAndCorruptMobFile(tableName, row1, family, qf1, Bytes.toBytes("value1"));
Get get = new Get(row1);
IOException ioe = null;
try {
table.get(get);
} catch (IOException e) {
ioe = e;
}
Assert.assertNotNull(ioe);
Assert.assertEquals(CorruptHFileException.class.getName(), ioe.getClass().getName());
}
private void createRecordAndCorruptMobFile(TableName tn, byte[] row, byte[] family, byte[] qf,
byte[] value) throws IOException {
Put put1 = new Put(row);
put1.addColumn(family, qf, value);
table.put(put1);
admin.flush(tn);
Path mobFile = getFlushedMobFile(conf, fs, tn, Bytes.toString(family));
Assert.assertNotNull(mobFile);
// create new corrupt mob file.
Path corruptFile = new Path(mobFile.getParent(), "dummy");
TestHFile.truncateFile(fs, mobFile, corruptFile);
fs.delete(mobFile, true);
fs.rename(corruptFile, mobFile);
}
private Path getFlushedMobFile(Configuration conf, FileSystem fs, TableName table, String family)
throws IOException {
Path famDir = MobUtils.getMobFamilyPath(conf, table, family);
FileStatus[] hfFss = fs.listStatus(famDir);
for (FileStatus hfs : hfFss) {
if (!hfs.isDirectory()) {
return hfs.getPath();
}
}
return null;
}
private void testGetFromFiles(boolean reversed) throws Exception {
TableName tn = TableName.valueOf("testGetFromFiles" + reversed);
testGet(tn, reversed, true);
}
private void testGetFromMemStore(boolean reversed) throws Exception {
TableName tn = TableName.valueOf("testGetFromMemStore" + reversed);
testGet(tn, reversed, false);
}
private void testGet(TableName tableName, boolean reversed, boolean doFlush)
throws Exception {
setUp(defaultThreshold, tableName);
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
byte [] value = generateMobValue((int)defaultThreshold+1);
Put put1 = new Put(row1);
put1.addColumn(family, qf1, ts3, value);
put1.addColumn(family, qf2, ts2, value);
put1.addColumn(family, qf3, ts1, value);
table.put(put1);
if (doFlush) {
admin.flush(tableName);
}
Scan scan = new Scan();
setScan(scan, reversed, false);
MobTestUtil.assertCellsValue(table, scan, value, 3);
}
private void testGetReferences(boolean reversed) throws Exception {
TableName tn = TableName.valueOf("testGetReferences" + reversed);
setUp(defaultThreshold, tn);
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
byte [] value = generateMobValue((int)defaultThreshold+1);;
Put put1 = new Put(row1);
put1.addColumn(family, qf1, ts3, value);
put1.addColumn(family, qf2, ts2, value);
put1.addColumn(family, qf3, ts1, value);
table.put(put1);
admin.flush(tn);
Scan scan = new Scan();
setScan(scan, reversed, true);
ResultScanner results = table.getScanner(scan);
int count = 0;
for (Result res : results) {
List<Cell> cells = res.listCells();
for(Cell cell : cells) {
// Verify the value
assertIsMobReference(cell, row1, family, value, tn);
count++;
}
}
results.close();
Assert.assertEquals(3, count);
}
private void testMobThreshold(boolean reversed) throws Exception {
TableName tn = TableName.valueOf("testMobThreshold" + reversed);
setUp(defaultThreshold, tn);
byte [] valueLess = generateMobValue((int)defaultThreshold-1);
byte [] valueEqual = generateMobValue((int)defaultThreshold);
byte [] valueGreater = generateMobValue((int)defaultThreshold+1);
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
Put put1 = new Put(row1);
put1.addColumn(family, qf1, ts3, valueLess);
put1.addColumn(family, qf2, ts2, valueEqual);
put1.addColumn(family, qf3, ts1, valueGreater);
table.put(put1);
admin.flush(tn);
Scan scan = new Scan();
setScan(scan, reversed, true);
Cell cellLess= null;
Cell cellEqual = null;
Cell cellGreater = null;
ResultScanner results = table.getScanner(scan);
int count = 0;
for (Result res : results) {
List<Cell> cells = res.listCells();
for(Cell cell : cells) {
// Verify the value
String qf = Bytes.toString(CellUtil.cloneQualifier(cell));
if(qf.equals(Bytes.toString(qf1))) {
cellLess = cell;
}
if(qf.equals(Bytes.toString(qf2))) {
cellEqual = cell;
}
if(qf.equals(Bytes.toString(qf3))) {
cellGreater = cell;
}
count++;
}
}
Assert.assertEquals(3, count);
assertNotMobReference(cellLess, row1, family, valueLess);
assertNotMobReference(cellEqual, row1, family, valueEqual);
assertIsMobReference(cellGreater, row1, family, valueGreater, tn);
results.close();
}
private void testGetFromArchive(boolean reversed) throws Exception {
TableName tn = TableName.valueOf("testGetFromArchive" + reversed);
setUp(defaultThreshold, tn);
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
byte [] value = generateMobValue((int)defaultThreshold+1);;
// Put some data
Put put1 = new Put(row1);
put1.addColumn(family, qf1, ts3, value);
put1.addColumn(family, qf2, ts2, value);
put1.addColumn(family, qf3, ts1, value);
table.put(put1);
admin.flush(tn);
// Get the files in the mob path
Path mobFamilyPath;
mobFamilyPath = MobUtils.getMobFamilyPath(
TEST_UTIL.getConfiguration(), tn, hcd.getNameAsString());
FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
FileStatus[] files = fs.listStatus(mobFamilyPath);
// Get the archive path
Path rootDir = CommonFSUtils.getRootDir(TEST_UTIL.getConfiguration());
Path tableDir = CommonFSUtils.getTableDir(rootDir, tn);
RegionInfo regionInfo = MobUtils.getMobRegionInfo(tn);
Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(TEST_UTIL.getConfiguration(),
regionInfo, tableDir, family);
// Move the files from mob path to archive path
fs.mkdirs(storeArchiveDir);
int fileCount = 0;
for(FileStatus file : files) {
fileCount++;
Path filePath = file.getPath();
Path src = new Path(mobFamilyPath, filePath.getName());
Path dst = new Path(storeArchiveDir, filePath.getName());
fs.rename(src, dst);
}
// Verify the moving success
FileStatus[] files1 = fs.listStatus(mobFamilyPath);
Assert.assertEquals(0, files1.length);
FileStatus[] files2 = fs.listStatus(storeArchiveDir);
Assert.assertEquals(fileCount, files2.length);
// Scan from archive
Scan scan = new Scan();
setScan(scan, reversed, false);
MobTestUtil.assertCellsValue(table, scan, value, 3);
}
/**
* Assert the value is not store in mob.
*/
private static void assertNotMobReference(Cell cell, byte[] row, byte[] family,
byte[] value) throws IOException {
Assert.assertArrayEquals(row, CellUtil.cloneRow(cell));
Assert.assertArrayEquals(family, CellUtil.cloneFamily(cell));
Assert.assertArrayEquals(value, CellUtil.cloneValue(cell));
}
/**
* Assert the value is store in mob.
*/
private static void assertIsMobReference(Cell cell, byte[] row, byte[] family,
byte[] value, TableName tn) throws IOException {
Assert.assertArrayEquals(row, CellUtil.cloneRow(cell));
Assert.assertArrayEquals(family, CellUtil.cloneFamily(cell));
Assert.assertFalse(Bytes.equals(value, CellUtil.cloneValue(cell)));
byte[] referenceValue = CellUtil.cloneValue(cell);
String fileName = MobUtils.getMobFileName(cell);
int valLen = Bytes.toInt(referenceValue, 0, Bytes.SIZEOF_INT);
Assert.assertEquals(value.length, valLen);
Path mobFamilyPath = MobUtils.getMobFamilyPath(
TEST_UTIL.getConfiguration(), tn, hcd.getNameAsString());
Path targetPath = new Path(mobFamilyPath, fileName);
FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
Assert.assertTrue(fs.exists(targetPath));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.functions.runtime;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.converters.StringConverter;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.google.protobuf.Empty;
import com.google.protobuf.util.JsonFormat;
import io.grpc.Server;
import io.grpc.ServerBuilder;
import io.grpc.stub.StreamObserver;
import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.exporter.HTTPServer;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.pulsar.functions.instance.AuthenticationConfig;
import org.apache.pulsar.functions.instance.InstanceCache;
import org.apache.pulsar.functions.instance.InstanceConfig;
import org.apache.pulsar.functions.proto.Function.FunctionDetails;
import org.apache.pulsar.functions.proto.InstanceCommunication;
import org.apache.pulsar.functions.proto.InstanceControlGrpc;
import org.apache.pulsar.functions.secretsprovider.ClearTextSecretsProvider;
import org.apache.pulsar.functions.secretsprovider.SecretsProvider;
import org.apache.pulsar.functions.utils.Reflections;
import java.lang.reflect.Type;
import java.net.InetSocketAddress;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
/**
* A function container implemented using java thread.
*/
@Slf4j
public class JavaInstanceMain implements AutoCloseable {
@Parameter(names = "--function_details", description = "Function details json\n", required = true)
protected String functionDetailsJsonString;
@Parameter(
names = "--jar",
description = "Path to Jar\n",
listConverter = StringConverter.class)
protected String jarFile;
@Parameter(names = "--instance_id", description = "Instance Id\n", required = true)
protected int instanceId;
@Parameter(names = "--function_id", description = "Function Id\n", required = true)
protected String functionId;
@Parameter(names = "--function_version", description = "Function Version\n", required = true)
protected String functionVersion;
@Parameter(names = "--pulsar_serviceurl", description = "Pulsar Service Url\n", required = true)
protected String pulsarServiceUrl;
@Parameter(names = "--client_auth_plugin", description = "Client auth plugin name\n")
protected String clientAuthenticationPlugin;
@Parameter(names = "--client_auth_params", description = "Client auth param\n")
protected String clientAuthenticationParameters;
@Parameter(names = "--use_tls", description = "Use tls connection\n")
protected String useTls = Boolean.FALSE.toString();
@Parameter(names = "--tls_allow_insecure", description = "Allow insecure tls connection\n")
protected String tlsAllowInsecureConnection = Boolean.TRUE.toString();
@Parameter(names = "--hostname_verification_enabled", description = "Enable hostname verification")
protected String tlsHostNameVerificationEnabled = Boolean.FALSE.toString();
@Parameter(names = "--tls_trust_cert_path", description = "tls trust cert file path")
protected String tlsTrustCertFilePath;
@Parameter(names = "--state_storage_serviceurl", description = "State Storage Service Url\n", required= false)
protected String stateStorageServiceUrl;
@Parameter(names = "--port", description = "Port to listen on\n", required = true)
protected int port;
@Parameter(names = "--metrics_port", description = "Port metrics will be exposed on\n", required = true)
protected int metrics_port;
@Parameter(names = "--max_buffered_tuples", description = "Maximum number of tuples to buffer\n", required = true)
protected int maxBufferedTuples;
@Parameter(names = "--expected_healthcheck_interval", description = "Expected interval in seconds between healtchecks", required = true)
protected int expectedHealthCheckInterval;
@Parameter(names = "--secrets_provider", description = "The classname of the secrets provider", required = false)
protected String secretsProviderClassName;
@Parameter(names = "--secrets_provider_config", description = "The config that needs to be passed to secrets provider", required = false)
protected String secretsProviderConfig;
@Parameter(names = "--cluster_name", description = "The name of the cluster this instance is running on", required = true)
protected String clusterName;
private Server server;
private RuntimeSpawner runtimeSpawner;
private ThreadRuntimeFactory containerFactory;
private Long lastHealthCheckTs = null;
private HTTPServer metricsServer;
private ScheduledFuture healthCheckTimer;
public JavaInstanceMain() { }
public void start() throws Exception {
InstanceConfig instanceConfig = new InstanceConfig();
instanceConfig.setFunctionId(functionId);
instanceConfig.setFunctionVersion(functionVersion);
instanceConfig.setInstanceId(instanceId);
instanceConfig.setMaxBufferedTuples(maxBufferedTuples);
instanceConfig.setClusterName(clusterName);
FunctionDetails.Builder functionDetailsBuilder = FunctionDetails.newBuilder();
if (functionDetailsJsonString.charAt(0) == '\'') {
functionDetailsJsonString = functionDetailsJsonString.substring(1);
}
if (functionDetailsJsonString.charAt(functionDetailsJsonString.length() - 1) == '\'') {
functionDetailsJsonString = functionDetailsJsonString.substring(0, functionDetailsJsonString.length() - 1);
}
JsonFormat.parser().merge(functionDetailsJsonString, functionDetailsBuilder);
FunctionDetails functionDetails = functionDetailsBuilder.build();
instanceConfig.setFunctionDetails(functionDetails);
instanceConfig.setPort(port);
Map<String, String> secretsProviderConfigMap = null;
if (!StringUtils.isEmpty(secretsProviderConfig)) {
if (secretsProviderConfig.charAt(0) == '\'') {
secretsProviderConfig = secretsProviderConfig.substring(1);
}
if (secretsProviderConfig.charAt(secretsProviderConfig.length() - 1) == '\'') {
secretsProviderConfig = secretsProviderConfig.substring(0, secretsProviderConfig.length() - 1);
}
Type type = new TypeToken<Map<String, String>>() {}.getType();
secretsProviderConfigMap = new Gson().fromJson(secretsProviderConfig, type);
}
if (StringUtils.isEmpty(secretsProviderClassName)) {
secretsProviderClassName = ClearTextSecretsProvider.class.getName();
}
SecretsProvider secretsProvider;
try {
secretsProvider = (SecretsProvider) Reflections.createInstance(secretsProviderClassName, ClassLoader.getSystemClassLoader());
} catch (Exception e) {
throw new RuntimeException(e);
}
secretsProvider.init(secretsProviderConfigMap);
// Collector Registry for prometheus metrics
CollectorRegistry collectorRegistry = new CollectorRegistry();
containerFactory = new ThreadRuntimeFactory("LocalRunnerThreadGroup", pulsarServiceUrl,
stateStorageServiceUrl,
AuthenticationConfig.builder().clientAuthenticationPlugin(clientAuthenticationPlugin)
.clientAuthenticationParameters(clientAuthenticationParameters).useTls(isTrue(useTls))
.tlsAllowInsecureConnection(isTrue(tlsAllowInsecureConnection))
.tlsHostnameVerificationEnable(isTrue(tlsHostNameVerificationEnabled))
.tlsTrustCertsFilePath(tlsTrustCertFilePath).build(),
secretsProvider, collectorRegistry);
runtimeSpawner = new RuntimeSpawner(
instanceConfig,
jarFile,
null, // we really dont use this in thread container
containerFactory,
expectedHealthCheckInterval * 1000);
server = ServerBuilder.forPort(port)
.addService(new InstanceControlImpl(runtimeSpawner))
.build()
.start();
log.info("JaveInstance Server started, listening on " + port);
java.lang.Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
// Use stderr here since the logger may have been reset by its JVM shutdown hook.
try {
close();
} catch (Exception ex) {
System.err.println(ex);
}
}
});
log.info("Starting runtimeSpawner");
runtimeSpawner.start();
// starting metrics server
log.info("Starting metrics server on port {}", metrics_port);
metricsServer = new HTTPServer(new InetSocketAddress(metrics_port), collectorRegistry, true);
if (expectedHealthCheckInterval > 0) {
healthCheckTimer = InstanceCache.getInstanceCache().getScheduledExecutorService().scheduleAtFixedRate(() -> {
try {
if (System.currentTimeMillis() - lastHealthCheckTs > 3 * expectedHealthCheckInterval * 1000) {
log.info("Haven't received health check from spawner in a while. Stopping instance...");
close();
}
} catch (Exception e) {
log.error("Error occurred when checking for latest health check", e);
}
}, expectedHealthCheckInterval * 1000, expectedHealthCheckInterval * 1000, TimeUnit.MILLISECONDS);
}
runtimeSpawner.join();
log.info("RuntimeSpawner quit, shutting down JavaInstance");
close();
}
private static boolean isTrue(String param) {
return Boolean.TRUE.toString().equals(param);
}
public static void main(String[] args) throws Exception {
JavaInstanceMain javaInstanceMain = new JavaInstanceMain();
JCommander jcommander = new JCommander(javaInstanceMain);
jcommander.setProgramName("JavaInstanceMain");
// parse args by JCommander
jcommander.parse(args);
javaInstanceMain.start();
}
@Override
public void close() {
try {
// Use stderr here since the logger may have been reset by its JVM shutdown hook.
if (server != null) {
server.shutdown();
}
if (runtimeSpawner != null) {
runtimeSpawner.close();
}
if (healthCheckTimer != null) {
healthCheckTimer.cancel(false);
}
if (containerFactory != null) {
containerFactory.close();
}
if (metricsServer != null) {
metricsServer.stop();
}
InstanceCache.shutdown();
} catch (Exception ex) {
System.err.println(ex);
}
}
class InstanceControlImpl extends InstanceControlGrpc.InstanceControlImplBase {
private RuntimeSpawner runtimeSpawner;
public InstanceControlImpl(RuntimeSpawner runtimeSpawner) {
this.runtimeSpawner = runtimeSpawner;
lastHealthCheckTs = System.currentTimeMillis();
}
@Override
public void getFunctionStatus(Empty request, StreamObserver<InstanceCommunication.FunctionStatus> responseObserver) {
try {
InstanceCommunication.FunctionStatus response = runtimeSpawner.getFunctionStatus(runtimeSpawner.getInstanceConfig().getInstanceId()).get();
responseObserver.onNext(response);
responseObserver.onCompleted();
} catch (Exception e) {
log.error("Exception in JavaInstance doing getFunctionStatus", e);
throw new RuntimeException(e);
}
}
@Override
public void getAndResetMetrics(com.google.protobuf.Empty request,
io.grpc.stub.StreamObserver<org.apache.pulsar.functions.proto.InstanceCommunication.MetricsData> responseObserver) {
Runtime runtime = runtimeSpawner.getRuntime();
if (runtime != null) {
try {
InstanceCommunication.MetricsData metrics = runtime.getAndResetMetrics().get();
responseObserver.onNext(metrics);
responseObserver.onCompleted();
} catch (InterruptedException | ExecutionException e) {
log.error("Exception in JavaInstance doing getAndResetMetrics", e);
throw new RuntimeException(e);
}
}
}
@Override
public void getMetrics(com.google.protobuf.Empty request,
io.grpc.stub.StreamObserver<org.apache.pulsar.functions.proto.InstanceCommunication.MetricsData> responseObserver) {
Runtime runtime = runtimeSpawner.getRuntime();
if (runtime != null) {
try {
InstanceCommunication.MetricsData metrics = runtime.getMetrics(instanceId).get();
responseObserver.onNext(metrics);
responseObserver.onCompleted();
} catch (InterruptedException | ExecutionException e) {
log.error("Exception in JavaInstance doing getAndResetMetrics", e);
throw new RuntimeException(e);
}
}
}
public void resetMetrics(com.google.protobuf.Empty request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
Runtime runtime = runtimeSpawner.getRuntime();
if (runtime != null) {
try {
runtime.resetMetrics().get();
responseObserver.onNext(com.google.protobuf.Empty.getDefaultInstance());
responseObserver.onCompleted();
} catch (InterruptedException | ExecutionException e) {
log.error("Exception in JavaInstance doing resetMetrics", e);
throw new RuntimeException(e);
}
}
}
@Override
public void healthCheck(com.google.protobuf.Empty request,
io.grpc.stub.StreamObserver<org.apache.pulsar.functions.proto.InstanceCommunication.HealthCheckResult> responseObserver) {
log.debug("Recieved health check request...");
InstanceCommunication.HealthCheckResult healthCheckResult
= InstanceCommunication.HealthCheckResult.newBuilder().setSuccess(true).build();
responseObserver.onNext(healthCheckResult);
responseObserver.onCompleted();
lastHealthCheckTs = System.currentTimeMillis();
}
}
}
| |
package com.example.androidgame1;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.HashMap;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.app.Activity;
import android.content.res.Resources;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.SystemClock;
import android.util.Log;
public class SimpleShader {
/** How many bytes per float. */
public static final int BYTES_PER_FLOAT = 4;
/** Size of the position data in elements. */
public static final int NUMBER_POSITION_ELEMENTS = 3;
/** Size of the color data in elements. */
public static final int NUMBER_COLOR_ELEMENTS = 4;
/** Size of the texture data in elements. */
public static final int NUMBER_TEXTURE_ELEMENTS = 2;
private int programHandle = -1;
public static final String SHADER_UNIFORM_MVPMATRIX = "u_MVPMatrix";
public static final String SHADER_UNIFORM_TEXTURE = "u_Texture";
public static final String SHADER_ATTRIBUTE_POSITION = "a_Position";
public static final String SHADER_ATTRIBUTE_COLOR = "a_Color";
public static final String SHADER_ATTRIBUTE_TEXTURE = "a_TexCoord";
public static final int VERTEX_SHADER_RESOURCE = R.raw.texturevertexshader;
public static final int FRAGMENT_SHADER_RESOURCE = R.raw.texturefragmentshader;
/**
* Stores the locations of the shader variables
*/
private HashMap<String, Integer> locations;
private Resources resources;
private String vertexShaderSource;
private String fragmentShaderSource;
private int vertexShaderHandle;
private int fragmentShaderHandle;
private boolean isInitialized = false;
public SimpleShader(Resources resources) {
this.resources = resources;
locations = new HashMap<String, Integer>();
}
public void initializeResources() throws Exception {
InputStream inputStream = resources.openRawResource(VERTEX_SHADER_RESOURCE);
try {
byte[] b = new byte[inputStream.available()];
inputStream.read(b);
vertexShaderSource = new String(b);
}
catch (Exception e) {
Log.d("INIT", "Cannot read vshader resource!");
throw new Exception("Cannot read vshader resource!");
}
try {
inputStream = resources.openRawResource(FRAGMENT_SHADER_RESOURCE);
byte[] b = new byte[inputStream.available()];
inputStream.read(b);
fragmentShaderSource = new String(b);
}
catch (Exception e) {
Log.d("INIT", "Cannot read fshader resource!");
throw new Exception("Cannot read vshader resource!");
}
// Load in the vertex shader.
vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(vertexShaderHandle, vertexShaderSource);
// Compile the shader.
GLES20.glCompileShader(vertexShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0)
{
throw new Exception("Error creating vertex shader.");
}
// Load in the fragment shader shader.
fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(fragmentShaderHandle, fragmentShaderSource);
// Compile the shader.
GLES20.glCompileShader(fragmentShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0)
{
throw new RuntimeException("Error creating fragment shader.");
}
// Create a program object and store the handle to it.
this.programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(programHandle, 0, SHADER_ATTRIBUTE_POSITION);
GLES20.glBindAttribLocation(programHandle, 1, SHADER_ATTRIBUTE_COLOR);
GLES20.glBindAttribLocation(programHandle, 2, SHADER_ATTRIBUTE_TEXTURE);
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0)
{
throw new RuntimeException("Error creating program.");
}
// Tell OpenGL to use this program when rendering.
GLES20.glUseProgram(programHandle);
// Set program handles. These will later be used to pass in values to the program.
locations.put(SHADER_UNIFORM_MVPMATRIX, GLES20.glGetUniformLocation(programHandle, SHADER_UNIFORM_MVPMATRIX) );
locations.put(SHADER_UNIFORM_TEXTURE, GLES20.glGetUniformLocation(programHandle, SHADER_UNIFORM_TEXTURE) );
locations.put(SHADER_ATTRIBUTE_POSITION, GLES20.glGetAttribLocation(programHandle, SHADER_ATTRIBUTE_POSITION) );
locations.put(SHADER_ATTRIBUTE_COLOR, GLES20.glGetAttribLocation(programHandle, SHADER_ATTRIBUTE_COLOR) );
locations.put(SHADER_ATTRIBUTE_TEXTURE, GLES20.glGetAttribLocation(programHandle, SHADER_ATTRIBUTE_TEXTURE) );
isInitialized = true;
}
public void useProgram() {
GLES20.glUseProgram(programHandle);
}
/**
*
* @param mvpMatrix
* @param positionBuffer Buffer of triangle points, Gets bound to vertex attribute pointer related to position
* @param colorBuffer
* @param textureHandle GlBindTexture on this handle
* @param textureBuffer Gets bound to shader uniform
* @param mode
* @param count
*/
public void draw(float[] mvpMatrix, VertexBuffer positionBuffer, VertexBuffer colorBuffer, int textureHandle, VertexBuffer textureBuffer, int mode, int count) {
// Pass in mvpMatrix
GLES20.glUniformMatrix4fv(locations.get(SHADER_UNIFORM_MVPMATRIX), 1, false, mvpMatrix, 0);
// Pass in the position information
positionBuffer.bindBuffer();
GLES20.glVertexAttribPointer(locations.get(SHADER_ATTRIBUTE_POSITION),
NUMBER_POSITION_ELEMENTS,
GLES20.GL_FLOAT, false,
0, 0);
GLES20.glEnableVertexAttribArray(locations.get(SHADER_ATTRIBUTE_POSITION));
// Pass in the color information
colorBuffer.bindBuffer();
GLES20.glVertexAttribPointer(locations.get(SHADER_ATTRIBUTE_COLOR),
NUMBER_COLOR_ELEMENTS,
GLES20.GL_FLOAT, false,
0, 0);
GLES20.glEnableVertexAttribArray(locations.get(SHADER_ATTRIBUTE_COLOR));
// Textures
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
// Tell texture uniform sampler to use texture in shader
GLES20.glUniform1i(locations.get(SHADER_UNIFORM_TEXTURE), 0);
// Set Texture coords
textureBuffer.bindBuffer();
GLES20.glVertexAttribPointer(locations.get(SHADER_ATTRIBUTE_TEXTURE),
NUMBER_TEXTURE_ELEMENTS,
GLES20.GL_FLOAT, false,
0, 0);
GLES20.glEnableVertexAttribArray(locations.get(SHADER_ATTRIBUTE_TEXTURE));
GLES20.glDrawArrays(mode, 0, count);
}
/**
* Calls draw arrays after setting up shader variables
* @param mvpMatrix
* @param positionBuffer Remember to set position() to draw from correct indices
* @param colorBuffer Remember to set position() to draw from correct indices
* @param textureHandle
* @param textureBuffer Remember to set position() to draw from correct indices
* @param mode GLES20.GL_TRIANGLE_STRIP for example
* @param count The number of indices to render
* <b>Preconditions:</b> Must have called initializedResources first
*/
public void draw(float[] mvpMatrix, FloatBuffer positionBuffer, FloatBuffer colorBuffer, int textureHandle, FloatBuffer textureBuffer, int mode, int count) {
// Pass in mvpMatrix
GLES20.glUniformMatrix4fv(locations.get(SHADER_UNIFORM_MVPMATRIX), 1, false, mvpMatrix, 0);
// Pass in the position information
positionBuffer.position(0);
GLES20.glVertexAttribPointer(locations.get(SHADER_ATTRIBUTE_POSITION),
NUMBER_POSITION_ELEMENTS,
GLES20.GL_FLOAT, false,
0, positionBuffer);
GLES20.glEnableVertexAttribArray(locations.get(SHADER_ATTRIBUTE_POSITION));
// Pass in the color information
colorBuffer.position(0);
GLES20.glVertexAttribPointer(locations.get(SHADER_ATTRIBUTE_COLOR),
NUMBER_COLOR_ELEMENTS,
GLES20.GL_FLOAT, false,
0, colorBuffer);
GLES20.glEnableVertexAttribArray(locations.get(SHADER_ATTRIBUTE_COLOR));
// Textures
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
// Tell texture uniform sampler to use texture in shader
GLES20.glUniform1i(locations.get(SHADER_UNIFORM_TEXTURE), 0);
// Set Texture coords
textureBuffer.position(0);
GLES20.glVertexAttribPointer(locations.get(SHADER_ATTRIBUTE_TEXTURE),
NUMBER_TEXTURE_ELEMENTS,
GLES20.GL_FLOAT, false,
0, textureBuffer);
GLES20.glEnableVertexAttribArray(locations.get(SHADER_ATTRIBUTE_TEXTURE));
GLES20.glDrawArrays(mode, 0, count);
}
public boolean isInitialized() {
return isInitialized;
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.util.filesystem;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.util.filesystem.FileSystemMap.Entry;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.junit.Test;
public class FileSystemMapTest {
private FileSystemMap.ValueLoader<Boolean> loader = path -> true;
private FakeProjectFilesystem filesystem = new FakeProjectFilesystem();
@Test
public void testPutLeafNodeWithEmptyTrie() {
Path fooPath = Paths.get("foo");
Path barPath = Paths.get("foo/bar");
Path path = Paths.get("foo/bar/HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.put(path, true);
FileSystemMap.Entry<Boolean> foo = fsMap.root.subLevels.get(fooPath);
assertNotNull(foo);
FileSystemMap.Entry<Boolean> bar = foo.subLevels.get(barPath);
assertNotNull(bar);
FileSystemMap.Entry<Boolean> file = bar.subLevels.get(path);
assertNotNull(file);
assertTrue(file.getWithoutLoading());
assertEquals(1, fsMap.map.size());
assertTrue(fsMap.map.get(path).getWithoutLoading());
}
@Test
public void testPutLeafNodeWithNonEmptyTrie() {
Path fooPath = Paths.get("foo");
Path barPath = Paths.get("foo/bar");
Path usrPath = Paths.get("usr");
Path path = Paths.get("foo/bar/HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
// Set up the trie with one child and ensure the trie is in the state we want.
fsMap.put(Paths.get("usr"), true);
assertNotNull(fsMap.root.subLevels.get(usrPath));
// Write the new entry and check data structure state.
fsMap.put(path, true);
assertEquals(0, fsMap.root.subLevels.get(usrPath).size());
Entry<Boolean> file =
fsMap.root.subLevels.get(fooPath).subLevels.get(barPath).subLevels.get(path);
assertTrue(file.getWithoutLoading());
assertEquals(2, fsMap.map.size());
assertTrue(fsMap.map.get(path).getWithoutLoading());
}
@Test
public void testPutLeafNodeAlreadyInserted() {
Path parent = Paths.get("usr");
Path path = Paths.get("usr/HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
// Insert the entry into the map, verify resulting state.
fsMap.put(path, true);
FileSystemMap.Entry<Boolean> usr = fsMap.root.subLevels.get(parent);
Entry<Boolean> helloWorld = fsMap.map.get(path);
assertTrue(helloWorld.getWithoutLoading());
assertSame(helloWorld, fsMap.root.subLevels.get(parent).subLevels.get(path));
// Insert the entry again with a different value.
fsMap.put(path, false);
// We check that the object hasn't been reinstantiated => reference is the same.
assertSame(fsMap.root.subLevels.get(parent), usr);
assertSame(usr.subLevels.get(path), helloWorld);
Entry<Boolean> helloWorldEntry = usr.subLevels.get(path);
assertNotNull(helloWorldEntry);
assertFalse(helloWorldEntry.getWithoutLoading());
assertEquals(fsMap.map.size(), 1);
assertFalse(fsMap.map.get(path).getWithoutLoading());
}
@Test
public void testPutLeafNodePathPartiallyInserted() {
Path parent = Paths.get("usr");
Path path = Paths.get("usr/HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
// Insert another entry with the same initial path.
fsMap.put(Paths.get("usr/OtherPath"), false);
FileSystemMap.Entry<Boolean> usr = fsMap.root.subLevels.get(parent);
// Now insert the entry.
fsMap.put(path, true);
// We check that the object hasn't been reinstantiated => reference is the same.
assertSame(fsMap.root.subLevels.get(parent), usr);
Entry<Boolean> file = usr.subLevels.get(path);
assertNotNull(file);
assertTrue(file.getWithoutLoading());
assertEquals(2, fsMap.map.size());
assertTrue(fsMap.map.get(path).getWithoutLoading());
}
@Test
public void testRemovePathThatExistsAndIntermediateNodesAreRemovedToo() {
Path parent = Paths.get("usr");
Path path = Paths.get("usr/HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
// Insert the item and ensure data structure is correct.
fsMap.put(path, true);
assertTrue(fsMap.root.subLevels.get(parent).subLevels.get(path).getWithoutLoading());
// Remove the item and check intermediate nodes are deleted.
fsMap.remove(path);
assertFalse(fsMap.root.subLevels != null && fsMap.root.subLevels.containsKey(parent));
assertEquals(0, fsMap.map.size());
}
@Test
public void testRemovePathThatExistsAndIntermediateIsNotRemovedButValueIsRemoved() {
Path parent = Paths.get("usr");
Path path1 = Paths.get("usr/HelloWorld.java");
Path path2 = Paths.get("usr/Yo.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.put(parent, true);
fsMap.put(path1, true);
fsMap.put(path2, true);
fsMap.remove(path1);
assertNull(fsMap.root.subLevels.get(parent).getWithoutLoading());
assertFalse(fsMap.root.subLevels.get(parent).subLevels.containsKey(path1));
assertTrue(fsMap.root.subLevels.get(parent).subLevels.containsKey(path2));
assertEquals(2, fsMap.map.size());
assertTrue(fsMap.map.get(path2).getWithoutLoading());
}
@Test
public void testRemovePathThatDoesntExist() {
Path parent = Paths.get("usr");
Path path = Paths.get("usr/HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.put(parent, true);
fsMap.remove(path);
assertFalse(fsMap.root.subLevels != null && fsMap.root.subLevels.containsKey(parent));
assertEquals(0, fsMap.map.size());
assertFalse(fsMap.map.containsKey(path));
}
@Test
public void testRemoveIntermediateNode() {
Path parent = Paths.get("usr");
Path path1 = Paths.get("usr/HelloWorld.java");
Path path2 = Paths.get("usr/Yo.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.put(parent, true);
fsMap.put(path1, true);
fsMap.put(path2, true);
assertEquals(3, fsMap.map.size());
fsMap.remove(parent);
assertFalse(fsMap.root.subLevels != null && fsMap.root.subLevels.containsKey(parent));
assertFalse(fsMap.map.containsKey(parent));
assertFalse(fsMap.map.containsKey(path1));
assertFalse(fsMap.map.containsKey(path2));
}
@Test
public void testRemoveAll() {
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.put(Paths.get("usr/HelloWorld.java"), true);
fsMap.put(Paths.get("usr/Yo.java"), true);
assertEquals(1, fsMap.root.size());
assertEquals(2, fsMap.map.size());
fsMap.removeAll();
assertEquals(0, fsMap.root.size());
assertEquals(0, fsMap.map.size());
}
@Test
public void testRemoveAllWithEmptyTrie() {
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.removeAll();
assertEquals(fsMap.root.size(), 0);
assertEquals(fsMap.map.size(), 0);
}
@Test
public void testGetWithPathThatExists() {
Path path = Paths.get("usr/HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.put(path, true);
assertTrue(fsMap.get(path));
assertEquals(fsMap.root.size(), 1);
assertTrue(fsMap.map.get(path).getWithoutLoading());
}
@Test
public void testGetAtRootLevelWithPathThatExists() {
Path path = Paths.get("HelloWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
fsMap.put(path, true);
assertTrue(fsMap.get(path));
assertEquals(fsMap.root.size(), 1);
assertTrue(fsMap.map.get(path).getWithoutLoading());
}
@Test
public void testGetWithPathDoesntExist() {
Path path = Paths.get("usr/GoodbyeCruelWorld.java");
FileSystemMap<Boolean> fsMap = new FileSystemMap<>(loader, filesystem);
// Put a path that does exist.
fsMap.put(Paths.get("usr/HelloWorld.java"), true);
// Fetch a value that does not exist, see that it is loaded and cached in the map.
Boolean entry = fsMap.get(path);
assertNotNull(entry);
assertTrue(entry);
assertEquals(fsMap.map.size(), 2);
assertTrue(fsMap.map.get(path).getWithoutLoading());
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*/
package bpsim.impl;
import bpsim.BpsimPackage;
import bpsim.EnumParameterType;
import bpsim.ParameterValue;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.BasicFeatureMap;
import org.eclipse.emf.ecore.util.FeatureMap;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Enum Parameter Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link bpsim.impl.EnumParameterTypeImpl#getGroup <em>Group</em>}</li>
* <li>{@link bpsim.impl.EnumParameterTypeImpl#getParameterValueGroup <em>Parameter Value Group</em>}</li>
* <li>{@link bpsim.impl.EnumParameterTypeImpl#getParameterValue <em>Parameter Value</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class EnumParameterTypeImpl extends ParameterValueImpl implements EnumParameterType {
/**
* The cached value of the '{@link #getGroup() <em>Group</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getGroup()
* @generated
* @ordered
*/
protected FeatureMap group;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EnumParameterTypeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return BpsimPackage.Literals.ENUM_PARAMETER_TYPE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public FeatureMap getGroup() {
if (group == null) {
group = new BasicFeatureMap(this, BpsimPackage.ENUM_PARAMETER_TYPE__GROUP);
}
return group;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public FeatureMap getParameterValueGroup() {
return (FeatureMap)getGroup().<FeatureMap.Entry>list(BpsimPackage.Literals.ENUM_PARAMETER_TYPE__PARAMETER_VALUE_GROUP);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ParameterValue> getParameterValue() {
return getParameterValueGroup().list(BpsimPackage.Literals.ENUM_PARAMETER_TYPE__PARAMETER_VALUE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case BpsimPackage.ENUM_PARAMETER_TYPE__GROUP:
return ((InternalEList<?>)getGroup()).basicRemove(otherEnd, msgs);
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE_GROUP:
return ((InternalEList<?>)getParameterValueGroup()).basicRemove(otherEnd, msgs);
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE:
return ((InternalEList<?>)getParameterValue()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case BpsimPackage.ENUM_PARAMETER_TYPE__GROUP:
if (coreType) return getGroup();
return ((FeatureMap.Internal)getGroup()).getWrapper();
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE_GROUP:
if (coreType) return getParameterValueGroup();
return ((FeatureMap.Internal)getParameterValueGroup()).getWrapper();
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE:
return getParameterValue();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case BpsimPackage.ENUM_PARAMETER_TYPE__GROUP:
((FeatureMap.Internal)getGroup()).set(newValue);
return;
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE_GROUP:
((FeatureMap.Internal)getParameterValueGroup()).set(newValue);
return;
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE:
getParameterValue().clear();
getParameterValue().addAll((Collection<? extends ParameterValue>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case BpsimPackage.ENUM_PARAMETER_TYPE__GROUP:
getGroup().clear();
return;
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE_GROUP:
getParameterValueGroup().clear();
return;
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE:
getParameterValue().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case BpsimPackage.ENUM_PARAMETER_TYPE__GROUP:
return group != null && !group.isEmpty();
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE_GROUP:
return !getParameterValueGroup().isEmpty();
case BpsimPackage.ENUM_PARAMETER_TYPE__PARAMETER_VALUE:
return !getParameterValue().isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (group: ");
result.append(group);
result.append(')');
return result.toString();
}
} //EnumParameterTypeImpl
| |
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.drivers.ciena.c5170.netconf;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.slf4j.LoggerFactory.getLogger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.onlab.packet.ChassisId;
import org.onosproject.drivers.netconf.TemplateManager;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Link;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.behaviour.LinkDiscovery;
import org.onosproject.net.device.DefaultDeviceDescription;
import org.onosproject.net.device.DefaultPortDescription;
import org.onosproject.net.device.DefaultPortStatistics;
import org.onosproject.net.device.DeviceDescription;
import org.onosproject.net.device.DeviceDescriptionDiscovery;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.device.PortDescription;
import org.onosproject.net.device.PortStatistics;
import org.onosproject.net.device.PortStatisticsDiscovery;
import org.onosproject.net.driver.AbstractHandlerBehaviour;
import org.onosproject.net.link.DefaultLinkDescription;
import org.onosproject.net.link.LinkDescription;
import org.onosproject.netconf.NetconfController;
import org.onosproject.netconf.NetconfException;
import org.onosproject.netconf.NetconfSession;
import org.slf4j.Logger;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Discovers the ports from a Ciena WaveServer Rest device.
*/
public class Ciena5170DeviceDescription extends AbstractHandlerBehaviour
implements DeviceDescriptionDiscovery, PortStatisticsDiscovery, LinkDiscovery {
static final TemplateManager TEMPLATE_MANAGER = new TemplateManager();
static {
TEMPLATE_MANAGER.load(Ciena5170DeviceDescription.class, "/templates/requests/%s.j2", "systemInfo",
"chassis-mac", "softwareVersion", "logicalPorts", "port-stats", "link-info");
}
private static final Logger log = getLogger(Ciena5170DeviceDescription.class);
@Override
public DeviceDescription discoverDeviceDetails() {
DeviceId deviceId = handler().data().deviceId();
NetconfController controller = checkNotNull(handler().get(NetconfController.class));
NetconfSession session = controller.getDevicesMap().get(handler().data().deviceId()).getSession();
try {
Node systemInfo = TEMPLATE_MANAGER.doRequest(session, "systemInfo");
Node chassisMac = TEMPLATE_MANAGER.doRequest(session, "chassis-mac");
Node softwareVersion = TEMPLATE_MANAGER.doRequest(session, "softwareVersion");
XPath xp = XPathFactory.newInstance().newXPath();
String mac = xp.evaluate("lldp-global-operational/chassis-id/text()", chassisMac).toUpperCase();
return new DefaultDeviceDescription(deviceId.uri(), Device.Type.SWITCH, "Ciena",
xp.evaluate("components/component/name/text()", systemInfo),
xp.evaluate("software-state/running-package/package-version/text()", softwareVersion), mac,
new ChassisId(Long.valueOf(mac, 16)));
} catch (XPathExpressionException | NetconfException ne) {
log.error("failed to query system info from device {} : {}", handler().data().deviceId(), ne.getMessage(),
ne);
}
return new DefaultDeviceDescription(deviceId.uri(), Device.Type.SWITCH, "Ciena", "5170", "Unknown", "Unknown",
new ChassisId());
}
/**
* Convert the specification of port speed in the of of #unit, i.e. {@10G} to MB
* as represented by a Long.
*
* @param ps
* specification of port speed
* @return port speed as MBs
*/
private Long portSpeedToLong(String ps) {
String value = ps.trim();
StringBuilder digits = new StringBuilder();
String unit = "";
for (int i = 0; i < value.length(); i += 1) {
final char c = value.charAt(i);
if (Character.isDigit(c)) {
digits.append(c);
} else {
unit = value.substring(i).toUpperCase().trim();
break;
}
}
switch (unit) {
case "G":
case "GB":
return Long.valueOf(digits.toString()) * 1000;
case "M":
case "MB":
default:
return Long.valueOf(digits.toString());
}
}
@Override
public List<PortDescription> discoverPortDetails() {
List<PortDescription> ports = new ArrayList<PortDescription>();
DeviceId deviceId = handler().data().deviceId();
NetconfController controller = checkNotNull(handler().get(NetconfController.class));
if (controller == null || controller.getDevicesMap() == null
|| controller.getDevicesMap().get(deviceId) == null) {
log.warn("NETCONF session to device {} not yet established, will be retried", deviceId);
return ports;
}
NetconfSession session = controller.getDevicesMap().get(deviceId).getSession();
try {
Node logicalPorts = TEMPLATE_MANAGER.doRequest(session, "logicalPorts");
XPath xp = XPathFactory.newInstance().newXPath();
NodeList nl = (NodeList) xp.evaluate("interfaces/interface/config", logicalPorts, XPathConstants.NODESET);
int count = nl.getLength();
Node node;
for (int i = 0; i < count; i += 1) {
node = nl.item(i);
if (xp.evaluate("type/text()", node).equals("ettp")) {
ports.add(DefaultPortDescription.builder()
.withPortNumber(PortNumber.portNumber(xp.evaluate("name/text()", node)))
.isEnabled(Boolean.valueOf(xp.evaluate("admin-status/text()", node)))
.portSpeed(portSpeedToLong(xp.evaluate("port-speed/text()", node))).type(Port.Type.PACKET)
.build());
}
}
} catch (NetconfException | XPathExpressionException e) {
log.error("Unable to retrieve port information for device {}, {}", deviceId, e);
}
return ports;
}
@Override
public Collection<PortStatistics> discoverPortStatistics() {
List<PortStatistics> stats = new ArrayList<PortStatistics>();
DeviceId deviceId = handler().data().deviceId();
NetconfController controller = checkNotNull(handler().get(NetconfController.class));
if (controller == null || controller.getDevicesMap() == null
|| controller.getDevicesMap().get(deviceId) == null) {
log.warn("NETCONF session to device {} not yet established, will be retried", deviceId);
return stats;
}
NetconfSession session = controller.getDevicesMap().get(deviceId).getSession();
try {
Node data = TEMPLATE_MANAGER.doRequest(session, "port-stats");
XPath xp = XPathFactory.newInstance().newXPath();
NodeList interfaces = (NodeList) xp.evaluate("interfaces/interface", data, XPathConstants.NODESET);
int count = interfaces.getLength();
for (int i = 0; i < count; i += 1) {
Node iface = interfaces.item(i);
if (xp.evaluate("config/type/text()", iface).equals("ettp")) {
stats.add(DefaultPortStatistics.builder().setDeviceId(deviceId)
.setPort(PortNumber.portNumber(xp.evaluate("name/text()", iface)))
.setBytesReceived(Long.valueOf(xp.evaluate("state/counters/in-octets/text()", iface)))
.setBytesSent(Long.valueOf(xp.evaluate("state/counters/out-octets/text()", iface)))
.setPacketsReceived(Long.valueOf(xp.evaluate("state/counters/in-pkts/text()", iface)))
.setPacketsSent(Long.valueOf(xp.evaluate("state/counters/out-pkts/text()", iface)))
.setPacketsTxErrors(Long.valueOf(xp.evaluate("state/counters/out-errors/text()", iface)))
.setPacketsRxErrors(Long.valueOf(xp.evaluate("state/counters/in-errors/text()", iface)))
.build());
}
}
} catch (NetconfException | XPathExpressionException e) {
log.error("Unable to retrieve port statistics for device {}, {}", deviceId, e);
}
return stats;
}
@Override
public Set<LinkDescription> getLinks() {
log.debug("LINKS CHECKING ...");
Set<LinkDescription> links = new HashSet<LinkDescription>();
DeviceId deviceId = handler().data().deviceId();
NetconfController controller = checkNotNull(handler().get(NetconfController.class));
if (controller == null || controller.getDevicesMap() == null
|| controller.getDevicesMap().get(deviceId) == null) {
log.warn("NETCONF session to device {} not yet established, cannot load links, will be retried", deviceId);
return links;
}
NetconfSession session = controller.getDevicesMap().get(deviceId).getSession();
try {
DeviceService deviceService = this.handler().get(DeviceService.class);
Iterable<Device> devices = deviceService.getAvailableDevices();
Map<String, Device> lookup = new HashMap<String, Device>();
for (Device d : devices) {
lookup.put(d.chassisId().toString().toUpperCase(), d);
}
Node logicalPorts = TEMPLATE_MANAGER.doRequest(session, "link-info");
XPath xp = XPathFactory.newInstance().newXPath();
NodeList ifaces = (NodeList) xp.evaluate("interfaces/interface", logicalPorts, XPathConstants.NODESET);
int count = ifaces.getLength();
Node iface;
Node destChassis;
for (int i = 0; i < count; i += 1) {
iface = ifaces.item(i);
if (xp.evaluate("config/type/text()", iface).equals("ettp")) {
destChassis = (Node) xp.evaluate("state/lldp-remote-port-operational/chassis-id", iface,
XPathConstants.NODE);
if (destChassis != null) {
Device dest = lookup.get(destChassis.getTextContent().toUpperCase());
if (dest != null) {
links.add(new DefaultLinkDescription(
new ConnectPoint(dest.id(),
PortNumber.portNumber(xp.evaluate(
"state/lldp-remote-port-operational/port-id/text()", iface))),
new ConnectPoint(deviceId,
PortNumber.portNumber(xp.evaluate("name/text()", iface))),
Link.Type.DIRECT, true));
} else {
log.warn("DEST chassisID not found: chassis {} port {}",
destChassis.getTextContent().toUpperCase(), xp.evaluate("name/text()", iface));
}
} else {
log.debug("NO LINK for {}", xp.evaluate("name/text()", iface));
}
}
}
} catch (NetconfException | XPathExpressionException e) {
log.error("Unable to retrieve links for device {}, {}", deviceId, e);
}
return links;
}
}
| |
/*
* Copyright (c) 2012-2020 Snowflake Computing Inc. All rights reserved.
*/
package net.snowflake.client.core;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import com.sun.jna.Memory;
import com.sun.jna.Pointer;
import com.sun.jna.ptr.PointerByReference;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import net.snowflake.client.ConditionalIgnoreRule;
import net.snowflake.client.RunningNotOnLinux;
import net.snowflake.client.RunningNotOnWinMac;
import org.junit.Rule;
import org.junit.Test;
class MockAdvapi32Lib implements SecureStorageWindowsManager.Advapi32Lib {
@Override
public boolean CredReadW(String targetName, int type, int flags, PointerByReference pcred) {
Pointer target = MockWindowsCredentialManager.getCredential(targetName);
pcred.setValue(target);
return target == null ? false : true;
}
@Override
public boolean CredWriteW(
SecureStorageWindowsManager.SecureStorageWindowsCredential cred, int flags) {
MockWindowsCredentialManager.addCredential(cred);
return true;
}
@Override
public boolean CredDeleteW(String targetName, int type, int flags) {
MockWindowsCredentialManager.deleteCredential(targetName);
return true;
}
@Override
public void CredFree(Pointer cred) {
// mock function
}
}
class MockSecurityLib implements SecureStorageAppleManager.SecurityLib {
@Override
public int SecKeychainFindGenericPassword(
Pointer keychainOrArray,
int serviceNameLength,
byte[] serviceName,
int accountNameLength,
byte[] accountName,
int[] passwordLength,
Pointer[] passwordData,
Pointer[] itemRef) {
MockMacKeychainManager.MockMacKeychainItem credItem =
MockMacKeychainManager.getCredential(serviceName, accountName);
if (credItem == null) {
return SecureStorageAppleManager.SecurityLib.ERR_SEC_ITEM_NOT_FOUND;
}
if (passwordLength != null && passwordLength.length > 0) {
passwordLength[0] = credItem.getLength();
}
if (passwordData != null && passwordData.length > 0) {
passwordData[0] = credItem.getPointer();
}
if (itemRef != null && itemRef.length > 0) {
itemRef[0] = credItem.getPointer();
}
return SecureStorageAppleManager.SecurityLib.ERR_SEC_SUCCESS;
}
@Override
public int SecKeychainAddGenericPassword(
Pointer keychain,
int serviceNameLength,
byte[] serviceName,
int accountNameLength,
byte[] accountName,
int passwordLength,
byte[] passwordData,
Pointer[] itemRef) {
MockMacKeychainManager.addCredential(serviceName, accountName, passwordLength, passwordData);
return SecureStorageAppleManager.SecurityLib.ERR_SEC_SUCCESS;
}
@Override
public int SecKeychainItemModifyContent(
Pointer itemRef, Pointer attrList, int length, byte[] data) {
MockMacKeychainManager.replaceCredential(itemRef, length, data);
return SecureStorageAppleManager.SecurityLib.ERR_SEC_SUCCESS;
}
@Override
public int SecKeychainItemDelete(Pointer itemRef) {
MockMacKeychainManager.deleteCredential(itemRef);
return SecureStorageAppleManager.SecurityLib.ERR_SEC_SUCCESS;
}
@Override
public int SecKeychainItemFreeContent(Pointer[] attrList, Pointer data) {
// mock function
return SecureStorageAppleManager.SecurityLib.ERR_SEC_SUCCESS;
}
}
class MockWindowsCredentialManager {
private static final Map<String, Pointer> credentialManager = new HashMap<>();
static void addCredential(SecureStorageWindowsManager.SecureStorageWindowsCredential cred) {
cred.write();
credentialManager.put(cred.TargetName.toString(), cred.getPointer());
}
static Pointer getCredential(String target) {
return credentialManager.get(target);
}
static void deleteCredential(String target) {
credentialManager.remove(target);
}
}
class MockMacKeychainManager {
private static final Map<String, Map<String, MockMacKeychainItem>> keychainManager =
new HashMap<>();
static void addCredential(byte[] targetName, byte[] userName, int credLength, byte[] credData) {
String target = new String(targetName);
String user = new String(userName);
keychainManager.computeIfAbsent(target, newMap -> new HashMap<>());
Map<String, MockMacKeychainItem> currentTargetMap = keychainManager.get(target);
currentTargetMap.put(user, buildMacKeychainItem(credLength, credData));
}
static MockMacKeychainItem getCredential(byte[] targetName, byte[] userName) {
Map<String, MockMacKeychainItem> targetMap = keychainManager.get(new String(targetName));
return targetMap != null ? targetMap.get(new String(userName)) : null;
}
static void replaceCredential(Pointer itemRef, int credLength, byte[] credData) {
for (Map.Entry<String, Map<String, MockMacKeychainItem>> elem : keychainManager.entrySet()) {
Map<String, MockMacKeychainItem> targetMap = elem.getValue();
for (Map.Entry<String, MockMacKeychainItem> elem0 : targetMap.entrySet()) {
if (elem0.getValue().getPointer().toString().equals(itemRef.toString())) {
targetMap.put(elem0.getKey(), buildMacKeychainItem(credLength, credData));
return;
}
}
}
}
static void deleteCredential(Pointer itemRef) {
Iterator<Map.Entry<String, Map<String, MockMacKeychainItem>>> targetIter =
keychainManager.entrySet().iterator();
while (targetIter.hasNext()) {
Map.Entry<String, Map<String, MockMacKeychainItem>> targetMap = targetIter.next();
Iterator<Map.Entry<String, MockMacKeychainItem>> userIter =
targetMap.getValue().entrySet().iterator();
while (userIter.hasNext()) {
Map.Entry<String, MockMacKeychainItem> cred = userIter.next();
if (cred.getValue().getPointer().toString().equals(itemRef.toString())) {
userIter.remove();
return;
}
}
}
}
static MockMacKeychainItem buildMacKeychainItem(int itemLength, byte[] itemData) {
Memory itemMem = new Memory(itemLength);
itemMem.write(0, itemData, 0, itemLength);
return new MockMacKeychainItem(itemLength, itemMem);
}
static class MockMacKeychainItem {
private int length;
private Pointer pointer;
MockMacKeychainItem(int length, Pointer pointer) {
this.length = length;
this.pointer = pointer;
}
void setLength(int length) {
this.length = length;
}
int getLength() {
return length;
}
void setPointer(Pointer pointer) {
this.pointer = pointer;
}
Pointer getPointer() {
return pointer;
}
}
}
public class SecureStorageManagerTest {
// This is required to use ConditionalIgnore annotation
@Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule();
private static final String host = "fakeHost";
private static final String user = "fakeUser";
private static final String idToken = "fakeIdToken";
private static final String idToken0 = "fakeIdToken0";
private static final String mfaToken = "fakeMfaToken";
private static final String ID_TOKEN = "ID_TOKEN";
private static final String MFA_TOKEN = "MFATOKEN";
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnWinMac.class)
public void testLoadNativeLibrary() {
// Make sure the loading of native platform library won't break.
if (Constants.getOS() == Constants.OS.MAC) {
assertThat(SecureStorageAppleManager.SecurityLibManager.getInstance(), is(nullValue()));
}
if (Constants.getOS() == Constants.OS.WINDOWS) {
assertThat(SecureStorageWindowsManager.Advapi32LibManager.getInstance(), is(notNullValue()));
}
}
@Test
public void testWindowsManager() {
SecureStorageWindowsManager.Advapi32LibManager.setInstance(new MockAdvapi32Lib());
SecureStorageManager manager = SecureStorageWindowsManager.builder();
testBody(manager);
SecureStorageWindowsManager.Advapi32LibManager.resetInstance();
}
@Test
public void testMacManager() {
SecureStorageAppleManager.SecurityLibManager.setInstance(new MockSecurityLib());
SecureStorageManager manager = SecureStorageAppleManager.builder();
testBody(manager);
SecureStorageAppleManager.SecurityLibManager.resetInstance();
}
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnLinux.class)
public void testLinuxManager() {
SecureStorageManager manager = SecureStorageLinuxManager.getInstance();
testBody(manager);
testDeleteLinux(manager);
}
private void testBody(SecureStorageManager manager) {
// first delete possible old credential
assertThat(
manager.deleteCredential(host, user, ID_TOKEN),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
// ensure no old credential exists
assertThat(manager.getCredential(host, user, ID_TOKEN), is(nullValue()));
// set token
assertThat(
manager.setCredential(host, user, ID_TOKEN, idToken),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
assertThat(manager.getCredential(host, user, ID_TOKEN), equalTo(idToken));
// update token
assertThat(
manager.setCredential(host, user, ID_TOKEN, idToken0),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
assertThat(manager.getCredential(host, user, ID_TOKEN), equalTo(idToken0));
// delete token
assertThat(
manager.deleteCredential(host, user, ID_TOKEN),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
assertThat(manager.getCredential(host, user, ID_TOKEN), is(nullValue()));
}
private void testDeleteLinux(SecureStorageManager manager) {
// The old delete api of local file cache on Linux was to remove the whole file, where we can't
// partially remove some credentials
// This test aims to test the new delete api
// first create two credentials
assertThat(
manager.setCredential(host, user, ID_TOKEN, idToken),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
assertThat(
manager.setCredential(host, user, MFA_TOKEN, mfaToken),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
assertThat(manager.getCredential(host, user, ID_TOKEN), equalTo(idToken));
assertThat(manager.getCredential(host, user, MFA_TOKEN), equalTo(mfaToken));
// delete one of them
assertThat(
manager.deleteCredential(host, user, ID_TOKEN),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
assertThat(manager.getCredential(host, user, ID_TOKEN), equalTo(null));
// check another one
assertThat(manager.getCredential(host, user, MFA_TOKEN), equalTo(mfaToken));
assertThat(
manager.deleteCredential(host, user, MFA_TOKEN),
equalTo(SecureStorageManager.SecureStorageStatus.SUCCESS));
}
}
| |
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Comparator;
import org.apache.lucene.search.LeafFieldComparator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.TimSorter;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues;
/**
* Sorts documents of a given index by returning a permutation on the document
* IDs.
* @lucene.experimental
*/
final class Sorter {
final Sort sort;
/** Creates a new Sorter to sort the index with {@code sort} */
Sorter(Sort sort) {
if (sort.needsScores()) {
throw new IllegalArgumentException("Cannot sort an index with a Sort that refers to the relevance score");
}
this.sort = sort;
}
/**
* A permutation of doc IDs. For every document ID between <tt>0</tt> and
* {@link IndexReader#maxDoc()}, <code>oldToNew(newToOld(docID))</code> must
* return <code>docID</code>.
*/
static abstract class DocMap {
/** Given a doc ID from the original index, return its ordinal in the
* sorted index. */
abstract int oldToNew(int docID);
/** Given the ordinal of a doc ID, return its doc ID in the original index. */
abstract int newToOld(int docID);
/** Return the number of documents in this map. This must be equal to the
* {@link org.apache.lucene.index.LeafReader#maxDoc() number of documents} of the
* {@link org.apache.lucene.index.LeafReader} which is sorted. */
abstract int size();
}
/** Check consistency of a {@link DocMap}, useful for assertions. */
static boolean isConsistent(DocMap docMap) {
final int maxDoc = docMap.size();
for (int i = 0; i < maxDoc; ++i) {
final int newID = docMap.oldToNew(i);
final int oldID = docMap.newToOld(newID);
assert newID >= 0 && newID < maxDoc : "doc IDs must be in [0-" + maxDoc + "[, got " + newID;
assert i == oldID : "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID;
if (i != oldID || newID < 0 || newID >= maxDoc) {
return false;
}
}
return true;
}
/** A comparator of doc IDs. */
static abstract class DocComparator {
/** Compare docID1 against docID2. The contract for the return value is the
* same as {@link Comparator#compare(Object, Object)}. */
public abstract int compare(int docID1, int docID2);
}
private static final class DocValueSorter extends TimSorter {
private final int[] docs;
private final Sorter.DocComparator comparator;
private final int[] tmp;
DocValueSorter(int[] docs, Sorter.DocComparator comparator) {
super(docs.length / 64);
this.docs = docs;
this.comparator = comparator;
tmp = new int[docs.length / 64];
}
@Override
protected int compare(int i, int j) {
return comparator.compare(docs[i], docs[j]);
}
@Override
protected void swap(int i, int j) {
int tmpDoc = docs[i];
docs[i] = docs[j];
docs[j] = tmpDoc;
}
@Override
protected void copy(int src, int dest) {
docs[dest] = docs[src];
}
@Override
protected void save(int i, int len) {
System.arraycopy(docs, i, tmp, 0, len);
}
@Override
protected void restore(int i, int j) {
docs[j] = tmp[i];
}
@Override
protected int compareSaved(int i, int j) {
return comparator.compare(tmp[i], docs[j]);
}
}
/** Computes the old-to-new permutation over the given comparator. */
private static Sorter.DocMap sort(final int maxDoc, DocComparator comparator) {
// check if the index is sorted
boolean sorted = true;
for (int i = 1; i < maxDoc; ++i) {
if (comparator.compare(i-1, i) > 0) {
sorted = false;
break;
}
}
if (sorted) {
return null;
}
// sort doc IDs
final int[] docs = new int[maxDoc];
for (int i = 0; i < maxDoc; i++) {
docs[i] = i;
}
DocValueSorter sorter = new DocValueSorter(docs, comparator);
// It can be common to sort a reader, add docs, sort it again, ... and in
// that case timSort can save a lot of time
sorter.sort(0, docs.length); // docs is now the newToOld mapping
// The reason why we use MonotonicAppendingLongBuffer here is that it
// wastes very little memory if the index is in random order but can save
// a lot of memory if the index is already "almost" sorted
final PackedLongValues.Builder newToOldBuilder = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
for (int i = 0; i < maxDoc; ++i) {
newToOldBuilder.add(docs[i]);
}
final PackedLongValues newToOld = newToOldBuilder.build();
for (int i = 0; i < maxDoc; ++i) {
docs[(int) newToOld.get(i)] = i;
} // docs is now the oldToNew mapping
final PackedLongValues.Builder oldToNewBuilder = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
for (int i = 0; i < maxDoc; ++i) {
oldToNewBuilder.add(docs[i]);
}
final PackedLongValues oldToNew = oldToNewBuilder.build();
return new Sorter.DocMap() {
@Override
public int oldToNew(int docID) {
return (int) oldToNew.get(docID);
}
@Override
public int newToOld(int docID) {
return (int) newToOld.get(docID);
}
@Override
public int size() {
return maxDoc;
}
};
}
/**
* Returns a mapping from the old document ID to its new location in the
* sorted index. Implementations can use the auxiliary
* {@link #sort(int, DocComparator)} to compute the old-to-new permutation
* given a list of documents and their corresponding values.
* <p>
* A return value of <tt>null</tt> is allowed and means that
* <code>reader</code> is already sorted.
* <p>
* <b>NOTE:</b> deleted documents are expected to appear in the mapping as
* well, they will however be marked as deleted in the sorted view.
*/
DocMap sort(LeafReader reader) throws IOException {
SortField fields[] = sort.getSort();
final int reverseMul[] = new int[fields.length];
final LeafFieldComparator comparators[] = new LeafFieldComparator[fields.length];
for (int i = 0; i < fields.length; i++) {
reverseMul[i] = fields[i].getReverse() ? -1 : 1;
comparators[i] = fields[i].getComparator(1, i).getLeafComparator(reader.getContext());
comparators[i].setScorer(FAKESCORER);
}
final DocComparator comparator = new DocComparator() {
@Override
public int compare(int docID1, int docID2) {
try {
for (int i = 0; i < comparators.length; i++) {
// TODO: would be better if copy() didnt cause a term lookup in TermOrdVal & co,
// the segments are always the same here...
comparators[i].copy(0, docID1);
comparators[i].setBottom(0);
int comp = reverseMul[i] * comparators[i].compareBottom(docID2);
if (comp != 0) {
return comp;
}
}
return Integer.compare(docID1, docID2); // docid order tiebreak
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
return sort(reader.maxDoc(), comparator);
}
/**
* Returns the identifier of this {@link Sorter}.
* <p>This identifier is similar to {@link Object#hashCode()} and should be
* chosen so that two instances of this class that sort documents likewise
* will have the same identifier. On the contrary, this identifier should be
* different on different {@link Sort sorts}.
*/
public String getID() {
return sort.toString();
}
@Override
public String toString() {
return getID();
}
static final Scorer FAKESCORER = new Scorer(null) {
float score;
int doc = -1;
int freq = 1;
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
@Override
public int freq() throws IOException {
return freq;
}
@Override
public float score() throws IOException {
return score;
}
};
}
| |
/*
* Copyright 2015 Jeff Hain
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.jadecy.cmd;
import java.util.Arrays;
import net.jadecy.utils.MemPrintStream;
public class JdcmComp_DEPSTO_Test extends AbstractJdcmTezt {
//--------------------------------------------------------------------------
// PUBLIC METHODS
//--------------------------------------------------------------------------
/*
* Basic computations.
*/
public void test_classes() {
final String[] args = getArgs("-depsto " + C4N);
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C1N + ": " + C1BS,
C2N + ": " + C2BS,
C3N + ": " + C3BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 6",
"",
"total byte size: " + (C1BS + C2BS + C3BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_incl() {
final String[] args = getArgs("-depsto " + C4N + " -incl");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C1N + ": " + C1BS,
C2N + ": " + C2BS,
C3N + ": " + C3BS,
C4N + ": " + C4BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 7",
"",
"total byte size: " + (C1BS + C2BS + C3BS + C4BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_steps() {
final String[] args = getArgs("-depsto " + C4N + " -steps");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
"",
"step 0:",
"",
"step 1:",
C3N + ": " + C3BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
"",
"step 2:",
C1N + ": " + C1BS,
C2N + ": " + C2BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 6",
"",
"total byte size: " + (C1BS + C2BS + C3BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_packages() {
final String[] args = getArgs("-depsto " + P2N + " -packages");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending packages and their byte size:",
P1N + ": " + P1BS,
"",
"number of depending packages: 1",
"",
"total byte size: " + P1BS,
};
checkEqual(expectedLines, defaultStream);
}
public void test_packages_incl() {
final String[] args = getArgs("-depsto " + P2N + " -packages" + " -incl");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending packages and their byte size:",
P1N + ": " + P1BS,
P2N + ": " + P2BS,
"",
"number of depending packages: 2",
"",
"total byte size: " + (P1BS + P2BS),
};
checkEqual(expectedLines, defaultStream);
}
/*
* Advanced computations (only testing with classes).
*/
public void test_classes_minusto() {
final String[] args = getArgs("-depsto " + C4N + " -minusto " + C3N);
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C2N + ": " + C2BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 4",
"",
"total byte size: " + (C2BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_incl_minusto() {
final String[] args = getArgs("-depsto " + C4N + " -incl" + " -minusto " + C3N);
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C2N + ": " + C2BS,
C4N + ": " + C4BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 5",
"",
"total byte size: " + (C2BS + C4BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_from() {
final String[] args = getArgs("-depsto " + C4N + " -from " + P2N + ".*");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 3",
"",
"total byte size: " + (C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_incl_from() {
final String[] args = getArgs("-depsto " + C4N + " -incl" + " -from " + P2N + ".*");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C4N + ": " + C4BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 4",
"",
"total byte size: " + (C4BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_minusto_from() {
final String[] args = getArgs("-depsto " + C4N + " -minusto " + C3N + " -from " + P2N + ".*");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 3",
"",
"total byte size: " + (C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_incl_minusto_from() {
final String[] args = getArgs("-depsto " + C4N + " -incl" + " -minusto " + C3N + " -from " + P2N + ".*");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C4N + ": " + C4BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 4",
"",
"total byte size: " + (C4BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_maxsteps_noOrHugeLimit() {
for (int maxSteps : new int[]{-1,Integer.MAX_VALUE}) {
final String[] args = getArgs("-depsto " + C4N + " -maxsteps " + maxSteps);
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C1N + ": " + C1BS,
C2N + ": " + C2BS,
C3N + ": " + C3BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
"",
"number of depending classes: 6",
"",
"total byte size: " + (C1BS + C2BS + C3BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
}
public void test_classes_maxsteps_0() {
final String[] args = getArgs("-depsto " + C4N + " -maxsteps 0");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
"",
"number of depending classes: 0",
"",
"total byte size: 0",
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_incl_maxsteps_0() {
final String[] args = getArgs("-depsto " + C4N + " -incl" + " -maxsteps 0");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C4N + ": " + C4BS,
"",
"number of depending classes: 1",
"",
"total byte size: " + C4BS,
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_maxsteps_1() {
final String[] args = getArgs("-depsto " + C4N + " -maxsteps 1");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C3N + ": " + C3BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
"",
"number of depending classes: 3",
"",
"total byte size: " + (C3BS + C5BS + C6BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_allCptOptions() {
final String[] args = getArgs("-depsto " + C4N + " -incl" + " -from " + P1N + ".*" + " -minusto " + C1N + " -steps" + " -maxsteps -1");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
"",
"step 0:",
C4N + ": " + C4BS,
"",
"step 1:",
C3N + ": " + C3BS,
"",
"step 2:",
C2N + ": " + C2BS,
"",
"number of depending classes: 3",
"",
"total byte size: " + (C4BS + C3BS + C2BS),
};
checkEqual(expectedLines, defaultStream);
}
public void test_packages_allCptOptions() {
final String[] args = getArgs("-depsto " + P2N + " -packages" + " -incl" + " -from " + P1N + ".*" + " -minusto " + C1N + " -steps" + " -maxsteps -1");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending packages and their byte size:",
"",
"step 0:",
P2N + ": " + P2BS,
"",
"step 1:",
P1N + ": " + P1BS,
"",
"number of depending packages: 2",
"",
"total byte size: " + (P2BS + P1BS),
};
checkEqual(expectedLines, defaultStream);
}
/*
* Output options.
*/
public void test_classes_nostats() {
final String[] args = getArgs("-depsto " + C4N + " -nostats");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending classes and their byte size:",
C1N + ": " + C1BS,
C2N + ": " + C2BS,
C3N + ": " + C3BS,
C5N + ": " + C5BS,
C6N + ": " + C6BS,
C7N + ": " + C7BS,
};
checkEqual(expectedLines, defaultStream);
}
public void test_packages_nostats() {
final String[] args = getArgs("-depsto " + P2N + " -packages" + " -nostats");
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"depending packages and their byte size:",
P1N + ": " + P1BS,
};
checkEqual(expectedLines, defaultStream);
}
public void test_classes_onlystats() {
for (boolean steps : new boolean[]{false,true}) {
final String[] args;
if (steps) {
// -steps doesn't count here.
args = getArgs("-depsto " + C4N + " -onlystats" + " -steps");
} else {
args = getArgs("-depsto " + C4N + " -onlystats");
}
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"number of depending classes: 6",
"",
"total byte size: " + (C1BS + C2BS + C3BS + C5BS + C6BS + C7BS),
};
checkEqual(expectedLines, defaultStream);
}
}
public void test_packages_onlystats() {
for (boolean steps : new boolean[]{false,true}) {
final String[] args;
if (steps) {
// -steps doesn't count here.
args = getArgs("-depsto " + P2N + " -packages" + " -onlystats" + " -steps");
} else {
args = getArgs("-depsto " + P2N + " -packages" + " -onlystats");
}
final MemPrintStream defaultStream = new MemPrintStream();
runArgsWithVirtualDeps(args, defaultStream);
final String[] expectedLines = new String[]{
"args: " + Arrays.toString(args),
"",
"number of depending packages: 1",
"",
"total byte size: " + P1BS,
};
checkEqual(expectedLines, defaultStream);
}
}
}
| |
package com.thinkbiganalytics.util;
/*-
* #%L
* thinkbig-nifi-core-processors
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.hive.util.HiveUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
/**
* Represents a partition specification for a target table
*/
public class PartitionSpec implements Cloneable {
private static final Logger log = LoggerFactory.getLogger(PartitionSpec.class);
private List<PartitionKey> keys;
public PartitionSpec(PartitionKey... partitionKeys) {
super();
keys = Arrays.asList(partitionKeys);
}
/**
* Creates partition keys from a string specification in format: field|type|formula\n
* format, e.g.
* year|string|year(hired)
* month|int|month(hired)
* country|int|country
**/
public PartitionSpec(String spec) {
super();
keys = new Vector<>();
if (!StringUtils.isEmpty(spec)) {
try (BufferedReader br = new BufferedReader(new StringReader(spec))) {
String line = null;
while ((line = br.readLine()) != null) {
PartitionKey partitionKey = PartitionKey.createFromString(line);
if (partitionKey != null) {
keys.add(partitionKey);
}
}
} catch (IOException e) {
throw new RuntimeException("Failed to process specification [" + spec + "]");
}
}
}
public static void main(String[] args) {
PartitionKey key1 = new PartitionKey("country", "string", "country");
PartitionKey key2 = new PartitionKey("year", "int", "year(hired)");
PartitionKey key3 = new PartitionKey("month", "int", "month(hired)");
PartitionSpec spec = new PartitionSpec(key1, key2, key3);
String[] selectFields = new String[]{"id", "name", "company", "zip", "phone", "email", "hired"};
String selectSQL = StringUtils.join(selectFields, ",");
String[] values = new String[]{"USA", "2015", "4"};
String targetSqlWhereClause = spec.toTargetSQLWhere(values);
String sourceSqlWhereClause = spec.toSourceSQLWhere(values);
String partitionClause = spec.toPartitionSpec(values);
/*
insert overwrite table employee partition (year=2015,month=10,country='USA')
select id, name, company, zip, phone, email, hired from employee_feed
where year(hired)=2015 and month(hired)=10 and country='USA'
union distinct
select id, name, company, zip, phone, email, hired from employee
where year=2015 and month=10 and country='USA'
*/
String targetTable = "employee";
String sourceTable = "employee_feed";
String sqlWhere = "employee_feed";
StringBuffer sb = new StringBuffer();
sb.append("insert overwrite table ").append(targetTable).append(" ")
.append(partitionClause)
.append(" select ").append(selectSQL)
.append(" from ").append(sourceTable).append(" ")
.append(" where ")
.append(sourceSqlWhereClause)
.append(" union distinct ")
.append(" select ").append(selectSQL)
.append(" from ").append(targetTable).append(" ")
.append(" where ")
.append(targetSqlWhereClause);
log.info(sb.toString());
}
public Set<String> getKeyNames() {
HashSet<String> keySet = new HashSet<>();
for (PartitionKey partitionKey : keys) {
keySet.add(partitionKey.getKey());
}
return keySet;
}
public boolean isNonPartitioned() {
return keys.size() == 0;
}
/**
* Generates a where clause against the target table using the partition keys
*/
public String toTargetSQLWhere(String[] values) {
String[] parts = new String[keys.size()];
for (int i = 0; i < keys.size(); i++) {
parts[i] = keys.get(i).toTargetSQLWhere(values[i]);
}
return StringUtils.join(parts, " and ");
}
public String toSourceSQLWhere(String[] values) {
String[] parts = new String[keys.size()];
for (int i = 0; i < keys.size(); i++) {
parts[i] = keys.get(i).toSourceSQLWhere(values[i]);
}
return StringUtils.join(parts, " and ");
}
public String toPartitionSpec(String[] values) {
String[] parts = new String[keys.size()];
for (int i = 0; i < keys.size(); i++) {
parts[i] = keys.get(i).toPartitionNameValue(values[i]);
}
return "partition (" + StringUtils.join(parts, ",") + ")";
}
public String toDynamicPartitionSpec() {
String[] parts = new String[keys.size()];
for (int i = 0; i < keys.size(); i++) {
parts[i] = keys.get(i).getKeyWithAlias();
}
return "partition (" + toPartitionSelectSQL() + ")";
}
public String toPartitionSelectSQL() {
String[] parts = new String[keys.size()];
for (int i = 0; i < keys.size(); i++) {
parts[i] = keys.get(i).getKeyWithAlias();
}
return StringUtils.join(parts, ",");
}
public String toDynamicSelectSQLSpec() {
String[] parts = new String[keys.size()];
for (int i = 0; i < keys.size(); i++) {
parts[i] = keys.get(i).getFormulaWithAlias() + " " + keys.get(i).getKeyForSql();
}
return StringUtils.join(parts, ",");
}
/**
* Generates a select statement that will find all unique data partitions in the source table.
*
* @param sourceSchema the schema or database name of the source table
* @param sourceTable the source table name
* @param feedPartitionValue the source processing partition value
*/
public String toDistinctSelectSQL(@Nonnull final String sourceSchema, @Nonnull final String sourceTable, @Nonnull final String feedPartitionValue) {
final String keysWithAliases = keys.stream()
.map(PartitionKey::getFormulaWithAlias)
.collect(Collectors.joining(", "));
return "select " + keysWithAliases + ", count(0) as `tb_cnt` from " + HiveUtils.quoteIdentifier(sourceSchema, sourceTable) +
" where `processing_dttm` = " + HiveUtils.quoteString(feedPartitionValue) +
" group by " + keysWithAliases;
}
public PartitionSpec newForAlias(String alias) {
return new PartitionSpec(PartitionKey.partitionKeysForTableAlias(this.keys.toArray(new PartitionKey[0]), alias));
}
}
| |
/**
* See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Board of Regents of the University of Wisconsin System
* licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.microsoft.exchange;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.bind.JAXBElement;
import javax.xml.datatype.XMLGregorianCalendar;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.util.CollectionUtils;
import com.microsoft.exchange.messages.CreateFolder;
import com.microsoft.exchange.messages.CreateItem;
import com.microsoft.exchange.messages.DeleteFolder;
import com.microsoft.exchange.messages.DeleteItem;
import com.microsoft.exchange.messages.EmptyFolder;
import com.microsoft.exchange.messages.FindFolder;
import com.microsoft.exchange.messages.FindItem;
import com.microsoft.exchange.messages.GetFolder;
import com.microsoft.exchange.messages.GetItem;
import com.microsoft.exchange.messages.GetServerTimeZones;
import com.microsoft.exchange.messages.GetUserAvailabilityRequest;
import com.microsoft.exchange.messages.GetUserConfiguration;
import com.microsoft.exchange.messages.ResolveNames;
import com.microsoft.exchange.messages.UpdateFolder;
import com.microsoft.exchange.types.AcceptItemType;
import com.microsoft.exchange.types.AffectedTaskOccurrencesType;
import com.microsoft.exchange.types.AndType;
import com.microsoft.exchange.types.ArrayOfCalendarPermissionsType;
import com.microsoft.exchange.types.ArrayOfMailboxData;
import com.microsoft.exchange.types.BaseFolderIdType;
import com.microsoft.exchange.types.BaseFolderType;
import com.microsoft.exchange.types.BaseItemIdType;
import com.microsoft.exchange.types.BasePathToElementType;
import com.microsoft.exchange.types.BodyTypeResponseType;
import com.microsoft.exchange.types.CalendarFolderType;
import com.microsoft.exchange.types.CalendarItemCreateOrDeleteOperationType;
import com.microsoft.exchange.types.CalendarPermissionLevelType;
import com.microsoft.exchange.types.CalendarPermissionReadAccessType;
import com.microsoft.exchange.types.CalendarPermissionSetType;
import com.microsoft.exchange.types.CalendarPermissionType;
import com.microsoft.exchange.types.CalendarViewType;
import com.microsoft.exchange.types.ConstantValueType;
import com.microsoft.exchange.types.DefaultShapeNamesType;
import com.microsoft.exchange.types.DeleteFolderFieldType;
import com.microsoft.exchange.types.DisposalType;
import com.microsoft.exchange.types.DistinguishedFolderIdNameType;
import com.microsoft.exchange.types.DistinguishedFolderIdType;
import com.microsoft.exchange.types.ExtendedPropertyType;
import com.microsoft.exchange.types.FieldOrderType;
import com.microsoft.exchange.types.FieldURIOrConstantType;
import com.microsoft.exchange.types.FolderChangeDescriptionType;
import com.microsoft.exchange.types.FolderChangeType;
import com.microsoft.exchange.types.FolderIdType;
import com.microsoft.exchange.types.FolderQueryTraversalType;
import com.microsoft.exchange.types.FolderResponseShapeType;
import com.microsoft.exchange.types.FolderType;
import com.microsoft.exchange.types.FreeBusyViewOptions;
import com.microsoft.exchange.types.IndexBasePointType;
import com.microsoft.exchange.types.IndexedPageViewType;
import com.microsoft.exchange.types.IsGreaterThanOrEqualToType;
import com.microsoft.exchange.types.IsLessThanOrEqualToType;
import com.microsoft.exchange.types.ItemIdType;
import com.microsoft.exchange.types.ItemQueryTraversalType;
import com.microsoft.exchange.types.ItemResponseShapeType;
import com.microsoft.exchange.types.ItemType;
import com.microsoft.exchange.types.MailboxData;
import com.microsoft.exchange.types.MessageDispositionType;
import com.microsoft.exchange.types.NonEmptyArrayOfAllItemsType;
import com.microsoft.exchange.types.NonEmptyArrayOfBaseFolderIdsType;
import com.microsoft.exchange.types.NonEmptyArrayOfBaseItemIdsType;
import com.microsoft.exchange.types.NonEmptyArrayOfFieldOrdersType;
import com.microsoft.exchange.types.NonEmptyArrayOfFolderChangeDescriptionsType;
import com.microsoft.exchange.types.NonEmptyArrayOfFolderChangesType;
import com.microsoft.exchange.types.NonEmptyArrayOfFoldersType;
import com.microsoft.exchange.types.NonEmptyArrayOfPathsToElementType;
import com.microsoft.exchange.types.ObjectFactory;
import com.microsoft.exchange.types.PathToExtendedFieldType;
import com.microsoft.exchange.types.PathToUnindexedFieldType;
import com.microsoft.exchange.types.PermissionActionType;
import com.microsoft.exchange.types.ResolveNamesSearchScopeType;
import com.microsoft.exchange.types.RestrictionType;
import com.microsoft.exchange.types.SearchFolderTraversalType;
import com.microsoft.exchange.types.SearchFolderType;
import com.microsoft.exchange.types.SearchParametersType;
import com.microsoft.exchange.types.SetFolderFieldType;
import com.microsoft.exchange.types.SortDirectionType;
import com.microsoft.exchange.types.SuggestionsViewOptions;
import com.microsoft.exchange.types.TargetFolderIdType;
import com.microsoft.exchange.types.TasksFolderType;
import com.microsoft.exchange.types.TimeZone;
import com.microsoft.exchange.types.UnindexedFieldURIType;
import com.microsoft.exchange.types.UserConfigurationNameType;
import com.microsoft.exchange.types.UserIdType;
public class ExchangeRequestFactory {
protected final Log log = LogFactory.getLog(this.getClass());
private static final int INIT_BASE_OFFSET = 0;
/**
* @see <a href="http://msdn.microsoft.com/en-us/library/office/jj945066(v=exchg.150).aspx">EWS throttling in Exchange</a>
*/
private static final int EWSFindCountLimit = 1000;
private int maxFindItems = 500;
public int getMaxFindItems() {
return maxFindItems;
}
public void setMaxFindItems(int maxFindItems) {
this.maxFindItems = maxFindItems;
}
public EmptyFolder constructEmptyFolder(boolean deleteSubFolders, DisposalType disposalType, Collection<? extends BaseFolderIdType> folderIds){
EmptyFolder request = new EmptyFolder();
request.setDeleteSubFolders(deleteSubFolders);
request.setDeleteType(disposalType);
NonEmptyArrayOfBaseFolderIdsType nonEmptyArrayOfBaseFolderIds = new NonEmptyArrayOfBaseFolderIdsType();
nonEmptyArrayOfBaseFolderIds.getFolderIdsAndDistinguishedFolderIds().addAll(folderIds);
request.setFolderIds(nonEmptyArrayOfBaseFolderIds);
return request;
}
public GetServerTimeZones constructGetServerTimeZones(String tzid, boolean returnFullTimeZoneData){
GetServerTimeZones request = new GetServerTimeZones();
if(StringUtils.isNotBlank(tzid)){
request.getIds().getIds().add(tzid);
}
request.setReturnFullTimeZoneData(returnFullTimeZoneData);
return request;
}
public ResolveNames constructResolveNames(String alias) {
return constructResolveNames(alias, true, ResolveNamesSearchScopeType.ACTIVE_DIRECTORY_CONTACTS, DefaultShapeNamesType.ALL_PROPERTIES);
}
private ResolveNames constructResolveNames(String alias, boolean returnFullContactData, ResolveNamesSearchScopeType searchScope, DefaultShapeNamesType contactDataShape) {
ResolveNames resolveNames = new ResolveNames();
resolveNames.setContactDataShape(contactDataShape);
resolveNames.setReturnFullContactData(returnFullContactData);
resolveNames.setSearchScope(searchScope);
resolveNames.setUnresolvedEntry(alias);
return resolveNames;
}
public GetUserConfiguration constructGetUserConfiguration(String name,
DistinguishedFolderIdType distinguishedFolderIdType) {
GetUserConfiguration getUserConfiguration = new GetUserConfiguration();
UserConfigurationNameType userConfigurationNameType = new UserConfigurationNameType();
userConfigurationNameType
.setDistinguishedFolderId(distinguishedFolderIdType);
userConfigurationNameType.setName(name);
getUserConfiguration
.setUserConfigurationName(userConfigurationNameType);
return getUserConfiguration;
}
public GetUserAvailabilityRequest constructGetUserAvailabilityRequest(Collection<? extends MailboxData> mailboxData, FreeBusyViewOptions freeBusyView, SuggestionsViewOptions suggestionsView, TimeZone timeZone){
GetUserAvailabilityRequest request = new GetUserAvailabilityRequest();
if(!CollectionUtils.isEmpty(mailboxData)){
ArrayOfMailboxData arrayOfMailboxData = new ArrayOfMailboxData();
arrayOfMailboxData.getMailboxDatas().addAll(mailboxData);
request.setMailboxDataArray(arrayOfMailboxData);
}
if(null != suggestionsView){
request.setSuggestionsViewOptions(suggestionsView);
}
if(null != freeBusyView){
request.setFreeBusyViewOptions(freeBusyView);
}
if(null != timeZone){
request.setTimeZone(timeZone);
}
return request;
}
public CreateItem constructCreateCalendarItem(
List<? extends ItemType> list,
CalendarItemCreateOrDeleteOperationType sendTo,
FolderIdType folderIdType) {
DistinguishedFolderIdNameType parent = DistinguishedFolderIdNameType.CALENDAR;
return constructCreateItem(list, parent, sendTo, folderIdType);
}
public CreateItem constructCreateCalendarItem(Set<? extends ItemType> set,
CalendarItemCreateOrDeleteOperationType sendTo,
FolderIdType folderIdType) {
DistinguishedFolderIdNameType parent = DistinguishedFolderIdNameType.CALENDAR;
return constructCreateItem(set, parent, sendTo, folderIdType);
}
public CreateItem constructCreateCalendarItem(Set<? extends ItemType> set,
FolderIdType folderIdType) {
DistinguishedFolderIdNameType parent = DistinguishedFolderIdNameType.CALENDAR;
return constructCreateItem(set, parent, null, folderIdType);
}
public CreateItem constructCreateTaskItem(List<? extends ItemType> list,
FolderIdType folderIdType) {
DistinguishedFolderIdNameType parent = DistinguishedFolderIdNameType.TASKS;
return constructCreateItem(list, parent, null, folderIdType);
}
public CreateItem constructCreateMessageItem(List<? extends ItemType> list,
FolderIdType folderIdType) {
DistinguishedFolderIdNameType parent = DistinguishedFolderIdNameType.INBOX;
MessageDispositionType disposition = MessageDispositionType.SEND_ONLY;
CalendarItemCreateOrDeleteOperationType sendTo = CalendarItemCreateOrDeleteOperationType.SEND_ONLY_TO_ALL;
return constructCreateItem(list, parent, disposition, sendTo,
folderIdType);
}
public CreateItem constructCreateAcceptItem(ItemIdType itemId) {
CreateItem request = new CreateItem();
request.setMessageDisposition(MessageDispositionType.SEND_AND_SAVE_COPY);
NonEmptyArrayOfAllItemsType arrayOfItems = new NonEmptyArrayOfAllItemsType();
AcceptItemType acceptItem = new AcceptItemType();
acceptItem.setReferenceItemId(itemId);
arrayOfItems.getItemsAndMessagesAndCalendarItems().add(acceptItem);
request.setItems(arrayOfItems);
return request;
}
private CreateItem constructCreateItem(List<? extends ItemType> list,
DistinguishedFolderIdNameType parent,
MessageDispositionType dispositionType,
CalendarItemCreateOrDeleteOperationType sendTo,
FolderIdType folderIdType) {
CreateItem request = new CreateItem();
NonEmptyArrayOfAllItemsType arrayOfItems = new NonEmptyArrayOfAllItemsType();
arrayOfItems.getItemsAndMessagesAndCalendarItems().addAll(list);
request.setItems(arrayOfItems);
// When the MessageDispositionType is used for the CreateItemType, it
// only applies to e-mail messages.
if (null != dispositionType) {
request.setMessageDisposition(dispositionType);
}
TargetFolderIdType tagetFolderId = new TargetFolderIdType();
if (folderIdType == null || folderIdType.getId() == null
|| StringUtils.isBlank(folderIdType.getId())) {
DistinguishedFolderIdType parentDistinguishedFolderId = getParentDistinguishedFolderId(parent);
log.debug("calendarId is null or empty. tagetFolderId = "
+ parentDistinguishedFolderId);
tagetFolderId.setDistinguishedFolderId(parentDistinguishedFolderId);
} else {
// don't set changeKey on create as it may have changed in a prior
// operation
FolderIdType fIdType = new FolderIdType();
fIdType.setId(folderIdType.getId());
tagetFolderId.setFolderId(fIdType);
}
request.setSavedItemFolderId(tagetFolderId);
if (null != sendTo) {
request.setSendMeetingInvitations(sendTo);
}
return request;
}
private CreateItem constructCreateItem(Set<? extends ItemType> list,
DistinguishedFolderIdNameType parent,
MessageDispositionType dispositionType,
CalendarItemCreateOrDeleteOperationType sendTo,
FolderIdType folderIdType) {
CreateItem request = new CreateItem();
NonEmptyArrayOfAllItemsType arrayOfItems = new NonEmptyArrayOfAllItemsType();
arrayOfItems.getItemsAndMessagesAndCalendarItems().addAll(list);
request.setItems(arrayOfItems);
// When the MessageDispositionType is used for the CreateItemType, it
// only applies to e-mail messages.
if (null != dispositionType) {
request.setMessageDisposition(dispositionType);
}
TargetFolderIdType tagetFolderId = new TargetFolderIdType();
if (folderIdType == null || folderIdType.getId() == null
|| StringUtils.isBlank(folderIdType.getId())) {
DistinguishedFolderIdType parentDistinguishedFolderId = getParentDistinguishedFolderId(parent);
log.debug("calendarId is null or empty. tagetFolderId = "
+ parentDistinguishedFolderId);
tagetFolderId.setDistinguishedFolderId(parentDistinguishedFolderId);
} else {
// don't set changeKey on create as it may have changed in a prior
// operation
FolderIdType fIdType = new FolderIdType();
fIdType.setId(folderIdType.getId());
tagetFolderId.setFolderId(fIdType);
}
request.setSavedItemFolderId(tagetFolderId);
if (null != sendTo) {
request.setSendMeetingInvitations(sendTo);
}
return request;
}
private CreateItem constructCreateItem(List<? extends ItemType> list,
DistinguishedFolderIdNameType parent,
CalendarItemCreateOrDeleteOperationType sendTo,
FolderIdType folderIdType) {
return constructCreateItem(list, parent, null, sendTo, folderIdType);
}
private CreateItem constructCreateItem(Set<? extends ItemType> set,
DistinguishedFolderIdNameType parent,
CalendarItemCreateOrDeleteOperationType sendTo,
FolderIdType folderIdType) {
return constructCreateItem(set, parent, null, sendTo, folderIdType);
}
/**
* FOLDER OPERATIONS
*
* @param searchRoot
* @param restriction
* @param displayName
*/
public CreateFolder constructCreateSearchFolder(String displayName,
DistinguishedFolderIdNameType searchRoot,
RestrictionType restriction) {
CreateFolder createFolder = new CreateFolder();
// create new searchFolderType
SearchFolderType searchFolderType = new SearchFolderType();
// create search parameters
SearchParametersType searchParameters = new SearchParametersType();
// search folders recursively
searchParameters.setTraversal(SearchFolderTraversalType.DEEP);
NonEmptyArrayOfBaseFolderIdsType baseFolderIds = new NonEmptyArrayOfBaseFolderIdsType();
baseFolderIds.getFolderIdsAndDistinguishedFolderIds().add(
getParentDistinguishedFolderId(searchRoot));
// set the baase of the search
searchParameters.setBaseFolderIds(baseFolderIds);
// set the search restriction
searchParameters.setRestriction(restriction);
// add search parameters to folder
searchFolderType.setSearchParameters(searchParameters);
// set the search folder display name
searchFolderType.setDisplayName(displayName);
// add searchFolder to CreatFolder request
NonEmptyArrayOfFoldersType nonEmptyArrayOfFoldersType = new NonEmptyArrayOfFoldersType();
nonEmptyArrayOfFoldersType
.getFoldersAndCalendarFoldersAndContactsFolders().add(
searchFolderType);
createFolder.setFolders(nonEmptyArrayOfFoldersType);
createFolder
.setParentFolderId(getParentTargetFolderId(DistinguishedFolderIdNameType.SEARCHFOLDERS));
return createFolder;
}
public CreateFolder constructCreateCalendarFolder(String displayName,
Collection<ExtendedPropertyType> exProps) {
Validate.isTrue(StringUtils.isNotBlank(displayName),"displayName argument cannot be empty");
BaseFolderType c = new CalendarFolderType();
c.setDisplayName(displayName);
if(!CollectionUtils.isEmpty(exProps)){
c.getExtendedProperties().addAll(exProps);
}
return constructCreateFolder(DistinguishedFolderIdNameType.CALENDAR, c);
}
public CreateFolder constructCreateTaskFolder(String displayName,
Collection<ExtendedPropertyType> exProps) {
Validate.isTrue(StringUtils.isNotBlank(displayName),"displayName argument cannot be empty");
BaseFolderType c = new TasksFolderType();
c.setDisplayName(displayName);
if(!CollectionUtils.isEmpty(exProps)){
c.getExtendedProperties().addAll(exProps);
}
return constructCreateFolder(DistinguishedFolderIdNameType.TASKS, c);
}
/**
* Attempt to create a calendar group i.e. a folder that may contain a number of sub calendars.
* I don't think you can create a calendar group using EWS
* @param upn
* @param displayName
* @return
*/
@Deprecated
public CreateFolder constructCreateCalendarFolderGroup(String upn,
String displayName) {
CalendarFolderType calendarFolderType = new CalendarFolderType();
calendarFolderType.setDisplayName(displayName);
CalendarPermissionType calendarPermissionType = new CalendarPermissionType();
UserIdType userId = new UserIdType();
userId.setPrimarySmtpAddress(upn);
calendarPermissionType.setUserId(userId);
calendarPermissionType.setCanCreateSubFolders(true);
calendarPermissionType.setIsFolderOwner(true);
calendarPermissionType.setIsFolderContact(true);
calendarPermissionType.setIsFolderVisible(true);
calendarPermissionType.setEditItems(PermissionActionType.ALL);
calendarPermissionType.setDeleteItems(PermissionActionType.ALL);
calendarPermissionType
.setReadItems(CalendarPermissionReadAccessType.FULL_DETAILS);
calendarPermissionType
.setCalendarPermissionLevel(CalendarPermissionLevelType.OWNER);
ArrayOfCalendarPermissionsType arrayOfCalendarPermissionsType = new ArrayOfCalendarPermissionsType();
arrayOfCalendarPermissionsType.getCalendarPermissions().add(
calendarPermissionType);
CalendarPermissionSetType calendarPermissionSetType = new CalendarPermissionSetType();
calendarPermissionSetType
.setCalendarPermissions(arrayOfCalendarPermissionsType);
calendarFolderType.setPermissionSet(calendarPermissionSetType);
return constructCreateFolder(DistinguishedFolderIdNameType.CALENDAR,
calendarFolderType);
}
public CreateFolder constructCreateFolder(
DistinguishedFolderIdNameType parent, BaseFolderType folder) {
return constructCreateFolder(parent, Collections.singletonList(folder));
}
public CreateFolder constructCreateFolder(
DistinguishedFolderIdNameType parent,
Collection<? extends BaseFolderType> folders) {
CreateFolder createFolder = new CreateFolder();
TargetFolderIdType parentTargetFolderId = getParentTargetFolderId(parent);
createFolder.setParentFolderId(parentTargetFolderId);
NonEmptyArrayOfFoldersType folderArray = new NonEmptyArrayOfFoldersType();
folderArray.getFoldersAndCalendarFoldersAndContactsFolders().addAll(
folders);
createFolder.setFolders(folderArray);
return createFolder;
}
public CreateFolder constructCreateFolder(FolderIdType folderIdType,
BaseFolderType folder) {
CreateFolder createFolder = new CreateFolder();
TargetFolderIdType targetFolderIdType = new TargetFolderIdType();
targetFolderIdType.setFolderId(folderIdType);
createFolder.setParentFolderId(targetFolderIdType);
NonEmptyArrayOfFoldersType folderArray = new NonEmptyArrayOfFoldersType();
folderArray.getFoldersAndCalendarFoldersAndContactsFolders()
.add(folder);
createFolder.setFolders(folderArray);
return createFolder;
}
public GetFolder constructGetFolderByName(
DistinguishedFolderIdNameType parent) {
DistinguishedFolderIdType parentDistinguishedFolderId = getParentDistinguishedFolderId(parent);
return constructGetFolderById(parentDistinguishedFolderId);
}
public GetFolder constructGetFolderById(BaseFolderIdType folderIdType) {
GetFolder getFolder = new GetFolder();
NonEmptyArrayOfBaseFolderIdsType foldersArray = new NonEmptyArrayOfBaseFolderIdsType();
foldersArray.getFolderIdsAndDistinguishedFolderIds().add(folderIdType);
getFolder.setFolderIds(foldersArray);
FolderResponseShapeType responseShape = new FolderResponseShapeType();
responseShape.setBaseShape(DefaultShapeNamesType.ALL_PROPERTIES);
getFolder.setFolderShape(responseShape);
return getFolder;
}
public FindFolder constructFindFolder(DistinguishedFolderIdNameType parent, DefaultShapeNamesType folderShape, FolderQueryTraversalType folderQueryTraversalType) {
return constructFindFolder(parent, folderShape, folderQueryTraversalType, null);
}
public FindFolder constructFindFolder(DistinguishedFolderIdNameType parent,
DefaultShapeNamesType folderShape,
FolderQueryTraversalType folderQueryTraversalType,
RestrictionType restriction) {
Validate.notNull(parent, "parent cannot be null");
Validate.notNull(folderQueryTraversalType,
"traversal type cannot be null");
Validate.notNull(folderShape, "baseShape cannot be null");
FindFolder findFolder = new FindFolder();
findFolder.setTraversal(folderQueryTraversalType);
FolderResponseShapeType responseShape = new FolderResponseShapeType();
responseShape.setBaseShape(folderShape);
findFolder.setFolderShape(responseShape);
IndexedPageViewType pageView = constructIndexedPageView(INIT_BASE_OFFSET, EWSFindCountLimit, false);
findFolder.setIndexedPageFolderView(pageView);
DistinguishedFolderIdType parentDistinguishedFolderId = getParentDistinguishedFolderId(parent);
NonEmptyArrayOfBaseFolderIdsType array = new NonEmptyArrayOfBaseFolderIdsType();
array.getFolderIdsAndDistinguishedFolderIds().add(
parentDistinguishedFolderId);
findFolder.setParentFolderIds(array);
if (null != restriction) {
findFolder.setRestriction(restriction);
}
return findFolder;
}
public UpdateFolder constructRenameFolder(String newName,
FolderIdType folderId) {
FolderType folder = new FolderType();
folder.setDisplayName(newName);
PathToUnindexedFieldType path = new PathToUnindexedFieldType();
path.setFieldURI(UnindexedFieldURIType.FOLDER_DISPLAY_NAME);
ObjectFactory of = new ObjectFactory();
return constructUpdateFolderSetField(folder, of.createPath(path),
folderId);
}
protected UpdateFolder constructUpdateFolderDeleteExtendedProperty(
FolderIdType folderId, ExtendedPropertyType exProp) {
return constructUpdateFolderDeleteField(
getPathForExtendedPropertyType(exProp), folderId);
}
protected UpdateFolder constructUpdateFolderSetField(FolderType folder,
JAXBElement<? extends BasePathToElementType> path,
FolderIdType folderId) {
SetFolderFieldType changeDescription = new SetFolderFieldType();
changeDescription.setFolder(folder);
changeDescription.setPath(path);
return constructUpdateFolderInternal(changeDescription, folderId);
}
protected UpdateFolder constructUpdateFolderDeleteField(
JAXBElement<? extends BasePathToElementType> path,
FolderIdType folderId) {
DeleteFolderFieldType changeDescription = new DeleteFolderFieldType();
changeDescription.setPath(path);
return constructUpdateFolderInternal(changeDescription, folderId);
}
private UpdateFolder constructUpdateFolderInternal(
FolderChangeDescriptionType changeDescription, FolderIdType folderId) {
NonEmptyArrayOfFolderChangeDescriptionsType folderUpdates = new NonEmptyArrayOfFolderChangeDescriptionsType();
folderUpdates
.getAppendToFolderFieldsAndSetFolderFieldsAndDeleteFolderFields()
.add(changeDescription);
FolderChangeType folderChange = new FolderChangeType();
folderChange.setFolderId(folderId);
folderChange.setUpdates(folderUpdates);
NonEmptyArrayOfFolderChangesType changes = new NonEmptyArrayOfFolderChangesType();
changes.getFolderChanges().add(folderChange);
UpdateFolder updateRequest = new UpdateFolder();
updateRequest.setFolderChanges(changes);
return updateRequest;
}
public GetItem constructGetItemIds(Collection<ItemIdType> itemIds) {
Validate.isTrue(!CollectionUtils.isEmpty(itemIds),"itemIds cannot be empty");
Set<PathToExtendedFieldType> extendedPropertyPaths = getExtendedPropertyPaths();
ItemResponseShapeType responseShape = constructTextResponseShape(DefaultShapeNamesType.ID_ONLY, extendedPropertyPaths);
GetItem getItem = constructGetItem(itemIds, responseShape);
return getItem;
}
public GetItem constructGetItems(Collection<ItemIdType> itemIds) {
Validate.isTrue(!CollectionUtils.isEmpty(itemIds),"itemIds cannot be empty");
Set<PathToExtendedFieldType> extendedPropertyPaths = getExtendedPropertyPaths();
ItemResponseShapeType responseShape = constructTextResponseShape(DefaultShapeNamesType.ALL_PROPERTIES, extendedPropertyPaths);
GetItem getItem = constructGetItem(itemIds, responseShape);
return getItem;
}
/**
* public GetItem constructGetItems
*
* @param itemIds
* @param responseShape
* @return
*/
protected GetItem constructGetItem(Collection<ItemIdType> itemIds,
ItemResponseShapeType responseShape) {
GetItem getItem = new GetItem();
NonEmptyArrayOfBaseItemIdsType itemIdArray = new NonEmptyArrayOfBaseItemIdsType();
itemIdArray.getItemIdsAndOccurrenceItemIdsAndRecurringMasterItemIds()
.addAll(itemIds);
getItem.setItemIds(itemIdArray);
getItem.setItemShape(responseShape);
return getItem;
}
/**
* public DeleteItem constructDeleteItem
*
* @param itemIds
* - Contains an array of items, occurrence items, and recurring
* master items to delete from a mailbox in the Exchange store.
* The DeleteItem Operation can be performed on any item type
* @param disposalType
* - Describes how an item is deleted. This attribute is
* required.
* @param sendTo
* - Describes whether a calendar item deletion is communicated
* to attendees. This attribute is required when calendar items
* are deleted. This attribute is optional if non-calendar items
* are deleted.
* @param affectedTaskOccurrencesType
* - Describes whether a task instance or a task master is
* deleted by a DeleteItem Operation. This attribute is required
* when tasks are deleted. This attribute is optional when
* non-task items are deleted.
* @return
*/
protected DeleteItem constructDeleteItem(
Collection<? extends BaseItemIdType> itemIds,
DisposalType disposalType,
CalendarItemCreateOrDeleteOperationType sendTo,
AffectedTaskOccurrencesType affectedTaskOccurrencesType) {
Validate.notEmpty(itemIds, "must specify at least one itemId.");
Validate.notNull(disposalType, "disposalType cannot be null");
DeleteItem deleteItem = new DeleteItem();
if (null != affectedTaskOccurrencesType) {
deleteItem.setAffectedTaskOccurrences(affectedTaskOccurrencesType);
}
deleteItem.setDeleteType(disposalType);
NonEmptyArrayOfBaseItemIdsType arrayOfItemIds = new NonEmptyArrayOfBaseItemIdsType();
arrayOfItemIds
.getItemIdsAndOccurrenceItemIdsAndRecurringMasterItemIds()
.addAll(itemIds);
deleteItem.setItemIds(arrayOfItemIds);
if (null != sendTo) {
deleteItem.setSendMeetingCancellations(sendTo);
}
return deleteItem;
}
public DeleteItem constructDeleteCalendarItems(
Collection<? extends BaseItemIdType> itemIds,
DisposalType disposalType,
CalendarItemCreateOrDeleteOperationType sendTo) {
Validate.notNull(sendTo, "sendTo must be specified");
return constructDeleteItem(itemIds, disposalType, sendTo, null);
}
public DeleteItem constructDeleteCalendarItem(BaseItemIdType itemId,
DisposalType disposalType,
CalendarItemCreateOrDeleteOperationType sendTo) {
return constructDeleteCalendarItems(Collections.singletonList(itemId),
disposalType, sendTo);
}
public DeleteItem constructDeleteTaskItems(
Collection<? extends BaseItemIdType> itemIds,
DisposalType disposalType,
AffectedTaskOccurrencesType affectedTaskOccurrencesType) {
Validate.notNull(affectedTaskOccurrencesType,
"affectedTaskOccurrencesType must be specified");
return constructDeleteItem(itemIds, disposalType, null,
affectedTaskOccurrencesType);
}
public DeleteFolder constructDeleteFolder(BaseFolderIdType folderId, DisposalType disposalType) {
return constructDeleteFolder(Collections.singleton(folderId), disposalType);
}
public DeleteFolder constructDeleteFolder(
Collection<? extends BaseFolderIdType> folderIds,
DisposalType disposalType) {
Validate.notEmpty(folderIds, "folderIds cannot be empty");
DeleteFolder deleteFolder = new DeleteFolder();
deleteFolder.setDeleteType(disposalType);
NonEmptyArrayOfBaseFolderIdsType folderIdArray = new NonEmptyArrayOfBaseFolderIdsType();
folderIdArray.getFolderIdsAndDistinguishedFolderIds().addAll(folderIds);
deleteFolder.setFolderIds(folderIdArray);
return deleteFolder;
}
/**
*
* FindItem operations
*
*
* @param view
* @param responseShape
* @param traversal
*
* Shallow - Instructs the FindFolder operation to search only
* the identified folder and to return only the folder IDs for
* items that have not been deleted. This is called a shallow
* traversal. Deep - Instructs the FindFolder operation to search
* in all child folders of the identified parent folder and to
* return only the folder IDs for items that have not been
* deleted. This is called a deep traversal. SoftDeleted -
* Instructs the FindFolder operation to perform a shallow
* traversal search for deleted items.
*
* @param restriction
* @param sortOrderList
* @param folderIds
* @return
*/
protected FindItem constructIndexedPageViewFindItem(
IndexedPageViewType view, ItemResponseShapeType responseShape,
ItemQueryTraversalType traversal, RestrictionType restriction,
Collection<FieldOrderType> sortOrderList,
Collection<? extends BaseFolderIdType> folderIds) {
FindItem findItem = new FindItem();
findItem.setIndexedPageItemView(view);
findItem.setItemShape(responseShape);
findItem.setTraversal(traversal);
if (null != restriction) {
findItem.setRestriction(restriction);
}
if (!CollectionUtils.isEmpty(sortOrderList)) {
NonEmptyArrayOfFieldOrdersType sortOrder = new NonEmptyArrayOfFieldOrdersType();
sortOrder.getFieldOrders().addAll(sortOrderList);
findItem.setSortOrder(sortOrder);
}
if (!CollectionUtils.isEmpty(folderIds)) {
NonEmptyArrayOfBaseFolderIdsType parentFolderIds = new NonEmptyArrayOfBaseFolderIdsType();
parentFolderIds.getFolderIdsAndDistinguishedFolderIds().addAll(
folderIds);
findItem.setParentFolderIds(parentFolderIds);
}
return findItem;
}
protected FindItem constructCalendarViewFindItem(Date startTime, Date endTime, ItemResponseShapeType responseShape, ItemQueryTraversalType traversal,Collection<? extends BaseFolderIdType> folderIds) {
FindItem findItem = new FindItem();
findItem.setCalendarView(constructCalendarView(startTime, endTime));
findItem.setItemShape(responseShape);
findItem.setTraversal(traversal);
NonEmptyArrayOfBaseFolderIdsType array = new NonEmptyArrayOfBaseFolderIdsType();
array.getFolderIdsAndDistinguishedFolderIds().addAll(folderIds);
findItem.setParentFolderIds(array);
return findItem;
}
@Deprecated
protected FindItem constructCalendarViewFindItem(Date startTime,
Date endTime, ItemResponseShapeType responseShape,
ItemQueryTraversalType traversal, RestrictionType restriction,
Collection<FieldOrderType> sortOrderList,
Collection<? extends BaseFolderIdType> folderIds) {
log.warn("Restrictions and sort order may not be specified for a CalendarView AND WILL BE OMITTED FROM THIS REQUEST!!!");
return constructCalendarViewFindItem(startTime,endTime,responseShape,traversal,folderIds);
}
public FindItem constructCalendarViewFindItem(
CalendarViewType calendarView, ItemResponseShapeType responseShape,
ItemQueryTraversalType traversal, RestrictionType restriction,
List<FieldOrderType> sortOrderList) {
FindItem findItem = new FindItem();
findItem.setCalendarView(calendarView);
findItem.setItemShape(responseShape);
findItem.setTraversal(traversal);
NonEmptyArrayOfFieldOrdersType sortOrder = new NonEmptyArrayOfFieldOrdersType();
for (FieldOrderType fot : sortOrderList) {
sortOrder.getFieldOrders().add(fot);
}
findItem.setSortOrder(sortOrder);
return findItem;
}
/**
* see: http://msdn.microsoft.com/en-us/library/aa564515(v=exchg.140).aspx
*
* @param startTime
* @param endTime
* @return
*/
public CalendarViewType constructCalendarView(Date startTime, Date endTime) {
CalendarViewType calendarView = new CalendarViewType();
calendarView.setMaxEntriesReturned(getMaxFindItems());
calendarView.setStartDate(DateHelp
.convertDateToXMLGregorianCalendar(startTime));
calendarView.setEndDate(DateHelp
.convertDateToXMLGregorianCalendar(endTime));
return calendarView;
}
public IndexedPageViewType constructIndexedPageView(Integer start,
Integer length, Boolean reverse) {
IndexedPageViewType view = new IndexedPageViewType();
view.setMaxEntriesReturned(length);
view.setOffset(start);
if (reverse) {
view.setBasePoint(IndexBasePointType.END);
} else {
view.setBasePoint(IndexBasePointType.BEGINNING);
}
return view;
}
/**
* ItemResponseShapeType
*
* @param baseShape
* -DefaultShapeNamesType.ALL_PROPERTIES;
* -DefaultShapeNamesType.DEFAULT;
* -DefaultShapeNamesType.ID_ONLY;
*
* @param bodyType
* @param htmlToUtf8
* @param filterHtml
* @param includeMime
* @param exProps
* @return
*/
public ItemResponseShapeType constructResponseShapeExProps(
DefaultShapeNamesType baseShape, BodyTypeResponseType bodyType,
Boolean htmlToUtf8, Boolean filterHtml, Boolean includeMime,
Collection<ExtendedPropertyType> exProps) {
ItemResponseShapeType responseShape = new ItemResponseShapeType();
responseShape.setBaseShape(baseShape);
if (null != bodyType) {
responseShape.setBodyType(bodyType);
}
if (null != htmlToUtf8) {
responseShape.setConvertHtmlCodePageToUTF8(htmlToUtf8);
}
if (null != filterHtml) {
responseShape.setFilterHtmlContent(filterHtml);
}
if (null != includeMime) {
responseShape.setIncludeMimeContent(includeMime);
}
if (null != exProps) {
responseShape
.setAdditionalProperties(getPathsFromExtendedProps(exProps));
}
return responseShape;
}
public ItemResponseShapeType constructResponseShape(
DefaultShapeNamesType baseShape, BodyTypeResponseType bodyType,
Boolean htmlToUtf8, Boolean filterHtml, Boolean includeMime,
Collection<PathToExtendedFieldType> exPaths) {
NonEmptyArrayOfPathsToElementType additionalProperties = new NonEmptyArrayOfPathsToElementType();
if (!CollectionUtils.isEmpty(exPaths)) {
ObjectFactory of = new ObjectFactory();
for (PathToExtendedFieldType p : exPaths) {
JAXBElement<PathToExtendedFieldType> exFieldUri = of
.createExtendedFieldURI(p);
additionalProperties.getPaths().add(exFieldUri);
}
}
return constructResponseShape(baseShape, bodyType, htmlToUtf8, filterHtml, includeMime, additionalProperties);
}
public ItemResponseShapeType constructResponseShape(
DefaultShapeNamesType baseShape, BodyTypeResponseType bodyType,
Boolean htmlToUtf8, Boolean filterHtml, Boolean includeMime,
NonEmptyArrayOfPathsToElementType exProps) {
ItemResponseShapeType responseShape = new ItemResponseShapeType();
responseShape.setBaseShape(baseShape);
if (null != bodyType) {
responseShape.setBodyType(bodyType);
}
if (null != htmlToUtf8) {
responseShape.setConvertHtmlCodePageToUTF8(htmlToUtf8);
}
if (null != filterHtml) {
responseShape.setFilterHtmlContent(filterHtml);
}
if (null != includeMime) {
responseShape.setIncludeMimeContent(includeMime);
}
if (null != exProps && !CollectionUtils.isEmpty(exProps.getPaths()) ) {
responseShape.setAdditionalProperties(exProps);
}
return responseShape;
}
private NonEmptyArrayOfPathsToElementType getPathsFromExtendedProps(
Collection<ExtendedPropertyType> exProps) {
NonEmptyArrayOfPathsToElementType paths = new NonEmptyArrayOfPathsToElementType();
for (ExtendedPropertyType extendedPropertyType : exProps) {
paths.getPaths().add(
getPathForExtendedPropertyType(extendedPropertyType));
}
return paths;
}
// TODO one of these should be deprecated.
public ItemResponseShapeType constructTextResponseShape(DefaultShapeNamesType baseShape,
NonEmptyArrayOfPathsToElementType exProps) {
return constructResponseShape(baseShape, BodyTypeResponseType.TEXT, true, true, false, exProps);
}
public ItemResponseShapeType constructTextResponseShape(DefaultShapeNamesType baseShape,
Collection<PathToExtendedFieldType> exProps) {
return constructResponseShape(baseShape, BodyTypeResponseType.TEXT, true, true, false, exProps);
}
public ItemResponseShapeType constructResponseShape(
DefaultShapeNamesType baseShape,
Collection<PathToExtendedFieldType> exPaths) {
return constructResponseShape(baseShape, null, null, null, null,
exPaths);
}
public ItemResponseShapeType constructResponseShape(
DefaultShapeNamesType baseShape,
NonEmptyArrayOfPathsToElementType exProps) {
return constructResponseShape(baseShape, null, null, null, null,
exProps);
}
public ItemResponseShapeType constructResponseShapeWithExProps(
DefaultShapeNamesType baseShape,
Collection<ExtendedPropertyType> exProps) {
return constructResponseShapeExProps(baseShape, null, null, null, null,
exProps);
}
public ItemResponseShapeType constructResponseShape(
DefaultShapeNamesType baseShape) {
Collection<ExtendedPropertyType> exProps = null;
return constructResponseShapeExProps(baseShape, null, null, null, null,
exProps);
}
/**
* PARENT
*
* getParentTargetFolderId and getParentDistinguishedFolderId both accept
* DistinguishedFolderIdNameType
*
* ARCHIVEDELETEDITEMS ARCHIVEMSGFOLDERROOT ARCHIVERECOVERABLEITEMSDELETIONS
* ARCHIVERECOVERABLEITEMSPURGES ARCHIVERECOVERABLEITEMSROOT
* ARCHIVERECOVERABLEITEMSVERSIONS ARCHIVEROOT CALENDAR CONTACTS
* DELETEDITEMS DRAFTS INBOX JOURNAL JUNKEMAIL MSGFOLDERROOT NOTES OUTBOX
* PUBLICFOLDERSROOT RECOVERABLEITEMSDELETIONS RECOVERABLEITEMSPURGES
* RECOVERABLEITEMSROOT RECOVERABLEITEMSVERSIONS SEARCHFOLDERS SENTITEMS
* TASKS VOICEMAIL
*
* @param parent
*
* @return
*/
protected TargetFolderIdType getParentTargetFolderId(
DistinguishedFolderIdNameType parent) {
TargetFolderIdType targetFolderIdType = new TargetFolderIdType();
targetFolderIdType
.setDistinguishedFolderId(getParentDistinguishedFolderId(parent));
return targetFolderIdType;
}
/**
* @param parent
* @return
*/
protected DistinguishedFolderIdType getParentDistinguishedFolderId(
DistinguishedFolderIdNameType parent) {
DistinguishedFolderIdType distinguishedFolderIdType = new DistinguishedFolderIdType();
distinguishedFolderIdType.setId(parent);
return distinguishedFolderIdType;
}
public DistinguishedFolderIdType getPrimaryCalendarDistinguishedFolderId() {
return getParentDistinguishedFolderId(DistinguishedFolderIdNameType.CALENDAR);
}
public DistinguishedFolderIdType getPrimaryContactsDistinguishedFolderId() {
return getParentDistinguishedFolderId(DistinguishedFolderIdNameType.CONTACTS);
}
public DistinguishedFolderIdType getPrimaryTasksDistinguishedFolderId() {
return getParentDistinguishedFolderId(DistinguishedFolderIdNameType.TASKS);
}
public DistinguishedFolderIdType getPrimaryNotesDistinguishedFolderId() {
return getParentDistinguishedFolderId(DistinguishedFolderIdNameType.NOTES);
}
public DistinguishedFolderIdType getPrimaryJournalDistinguishedFolderId() {
return getParentDistinguishedFolderId(DistinguishedFolderIdNameType.JOURNAL);
}
/**
*
*
* @param extendedPropertyType
* @return
*/
public JAXBElement<PathToExtendedFieldType> getPathForExtendedPropertyType(
ExtendedPropertyType extendedPropertyType) {
ObjectFactory objectFactory = new ObjectFactory();
JAXBElement<PathToExtendedFieldType> extendedFieldURI = objectFactory
.createExtendedFieldURI(extendedPropertyType
.getExtendedFieldURI());
return extendedFieldURI;
}
protected FindItem constructIndexedPageViewFindFirstItemIdsShallow(RestrictionType restriction,NonEmptyArrayOfPathsToElementType exProps, Collection<? extends BaseFolderIdType> folderIds) {
return constructIndexedPageViewFindItemIdsShallow(INIT_BASE_OFFSET, getMaxFindItems(), restriction, exProps, folderIds);
}
private FindItem constructIndexedPageViewFindItemIdsShallow(int offset, int maxItems, RestrictionType restriction,NonEmptyArrayOfPathsToElementType exProps, Collection<? extends BaseFolderIdType> folderIds) {
return constructIndexedPageViewFindItem(offset, maxItems, DefaultShapeNamesType.ID_ONLY, ItemQueryTraversalType.SHALLOW, restriction, exProps, folderIds);
}
private FindItem constructIndexedPageViewFindItem(int offset, int maxItems, DefaultShapeNamesType baseShape, ItemQueryTraversalType traversalType, RestrictionType restriction,NonEmptyArrayOfPathsToElementType exProps, Collection<? extends BaseFolderIdType> folderIds) {
if(maxItems > EWSFindCountLimit){
log.warn("The default policy in Exchange limits the page size to 1000 items. Setting the page size to a value that is greater than this number has no practical effect. --http://msdn.microsoft.com/en-us/library/office/jj945066(v=exchg.150).aspx#bk_PolicyParameters");
}
//use indexed view as restrictions cannot be applied to calendar view
IndexedPageViewType view = constructIndexedPageView(offset,maxItems,false);
//only return id, note you can return a limited set of additional properties
// see:http://msdn.microsoft.com/en-us/library/exchange/aa563810(v=exchg.140).aspx
ItemResponseShapeType responseShape = constructResponseShape(baseShape,exProps);
FieldOrderType sortOrder = constructSortOrder();
List<FieldOrderType> sortOrderList = Collections.singletonList(sortOrder);
//FindItem findItem = constructIndexedPageViewFindItem(view, responseShape, ItemQueryTraversalType.ASSOCIATED, restriction, sortOrderList, folderIds);
FindItem findItem = constructIndexedPageViewFindItem(view, responseShape, traversalType, restriction, sortOrderList, folderIds);
return findItem;
}
protected FieldOrderType constructSortOrder() {
ObjectFactory of = getObjectFactory();
//set sort order (earliest items first)
FieldOrderType sortOrder = new FieldOrderType();
sortOrder.setOrder(SortDirectionType.ASCENDING);
PathToUnindexedFieldType path = new PathToUnindexedFieldType();
path.setFieldURI(UnindexedFieldURIType.ITEM_ITEM_ID);
JAXBElement<PathToUnindexedFieldType> sortPath = of.createFieldURI(path);
sortOrder.setPath(sortPath);
return sortOrder;
}
protected ObjectFactory getObjectFactory() {
ObjectFactory of = new ObjectFactory();
return of;
}
public Set<PathToExtendedFieldType> getExtendedPropertyPaths() {
return new HashSet<PathToExtendedFieldType>();
}
public NonEmptyArrayOfPathsToElementType getAdditionalExtendedProperties() {
NonEmptyArrayOfPathsToElementType aProps = new NonEmptyArrayOfPathsToElementType();
Set<PathToExtendedFieldType> extendedPropertyPaths = getExtendedPropertyPaths();
if(!CollectionUtils.isEmpty(extendedPropertyPaths)){
for(PathToExtendedFieldType p: extendedPropertyPaths) {
JAXBElement<PathToExtendedFieldType> j = getObjectFactory().createExtendedFieldURI(p);
aProps.getPaths().add(j);
}
}
return aProps;
}
protected JAXBElement<IsLessThanOrEqualToType> getCalendarItemEndRestriction( Date endTime) {
ObjectFactory of = getObjectFactory();
IsLessThanOrEqualToType endType = new IsLessThanOrEqualToType();
XMLGregorianCalendar end = DateHelp.convertDateToXMLGregorianCalendar(endTime);
PathToUnindexedFieldType endPath = new PathToUnindexedFieldType();
endPath.setFieldURI(UnindexedFieldURIType.CALENDAR_END);
JAXBElement<PathToUnindexedFieldType> endFieldURI = of.createFieldURI(endPath);
endType.setPath(endFieldURI);
FieldURIOrConstantType endConstant = new FieldURIOrConstantType();
ConstantValueType endValue = new ConstantValueType();
endValue.setValue(end.toXMLFormat());
endConstant.setConstant(endValue);
endType.setFieldURIOrConstant(endConstant);
JAXBElement<IsLessThanOrEqualToType> endSearchExpression = of.createIsLessThanOrEqualTo(endType);
return endSearchExpression;
}
protected JAXBElement<IsGreaterThanOrEqualToType> getCalendarItemStartRestriction(Date startTime) {
ObjectFactory of = getObjectFactory();
IsGreaterThanOrEqualToType startType = new IsGreaterThanOrEqualToType();
XMLGregorianCalendar start = DateHelp.convertDateToXMLGregorianCalendar(startTime);
PathToUnindexedFieldType startPath = new PathToUnindexedFieldType();
startPath.setFieldURI(UnindexedFieldURIType.CALENDAR_START);
JAXBElement<PathToUnindexedFieldType> startFieldURI = of.createFieldURI(startPath);
startType.setPath(startFieldURI);
FieldURIOrConstantType startConstant = new FieldURIOrConstantType();
ConstantValueType startValue = new ConstantValueType();
startValue.setValue(start.toXMLFormat());
startConstant.setConstant(startValue);
startType.setFieldURIOrConstant(startConstant);
JAXBElement<IsGreaterThanOrEqualToType> startSearchExpression = of.createIsGreaterThanOrEqualTo(startType);
return startSearchExpression;
}
public FindItem constructFindFirstItemIdSet(Collection<FolderIdType> folderIds) {
return constructFindAllItemIds(INIT_BASE_OFFSET, getMaxFindItems(), folderIds);
}
public FindItem constructFindNextItemIdSet(int offset, Collection<FolderIdType> folderIds) {
return constructFindAllItemIds(offset, getMaxFindItems(), folderIds);
}
public FindItem constructFindAllItemIds(int offset, int maxItems, Collection<FolderIdType> folderIds) {
//FindAllItems = no restriction
RestrictionType restriction = null;
NonEmptyArrayOfPathsToElementType exProps = new NonEmptyArrayOfPathsToElementType();
return constructIndexedPageViewFindItemIdsShallow(offset, maxItems, restriction, exProps, folderIds);
}
/**
* @param startTime
* @param endTime
* @param of
* @return
*/
protected RestrictionType constructFindCalendarItemsByDateRangeRestriction(Date startTime, Date endTime) {
ObjectFactory of = getObjectFactory();
JAXBElement<IsGreaterThanOrEqualToType> startSearchExpression = getCalendarItemStartRestriction(startTime);
JAXBElement<IsLessThanOrEqualToType> endSearchExpression = getCalendarItemEndRestriction( endTime);
//and them all together
AndType andType = new AndType();
andType.getSearchExpressions().add(startSearchExpression);
andType.getSearchExpressions().add(endSearchExpression);
JAXBElement<AndType> andSearchExpression = of.createAnd(andType);
//3) create restriction and set (2) searchExpression
RestrictionType restrictionType = new RestrictionType();
restrictionType.setSearchExpression(andSearchExpression);
return restrictionType;
}
/**
* FindItem operations
*/
public FindItem constructFindItemIdsByDateRange(Date startTime, Date endTime, Collection<FolderIdType> folderIds) {
Collection<? extends BaseFolderIdType> baseFolderIds = folderIds;
if(CollectionUtils.isEmpty(baseFolderIds)) {
DistinguishedFolderIdType distinguishedFolderIdType = new DistinguishedFolderIdType();
distinguishedFolderIdType.setId(DistinguishedFolderIdNameType.CALENDAR);
baseFolderIds = Collections.singleton(distinguishedFolderIdType);
}
RestrictionType restriction = constructFindCalendarItemsByDateRangeRestriction(startTime, endTime);
return constructIndexedPageViewFindFirstItemIdsShallow(restriction,getAdditionalExtendedProperties(), baseFolderIds);
}
public FindItem constructFindCalendarItemIdsByDateRange(Date startTime, Date endTime, Collection<FolderIdType> folderIds) {
Collection<? extends BaseFolderIdType> baseFolderIds = folderIds;
if(CollectionUtils.isEmpty(baseFolderIds)) {
DistinguishedFolderIdType distinguishedFolderIdType = new DistinguishedFolderIdType();
distinguishedFolderIdType.setId(DistinguishedFolderIdNameType.CALENDAR);
baseFolderIds = Collections.singleton(distinguishedFolderIdType);
}
ItemResponseShapeType responseShape = constructResponseShape(DefaultShapeNamesType.ID_ONLY, getAdditionalExtendedProperties());
return constructCalendarViewFindItem(startTime, endTime, responseShape, ItemQueryTraversalType.SHALLOW, baseFolderIds);
}
public NonEmptyArrayOfPathsToElementType getAdditionalProperties() {
// TODO strongly suggest you override this
return null;
}
public Collection<ExtendedPropertyType> getExtendedProperties() {
// TODO strongly suggest you override this
List<ExtendedPropertyType> exProps = new ArrayList<ExtendedPropertyType>();
return exProps;
}
}
| |
package dk.itu.ejuuragr.fitness;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import com.anji_ahni.integration.Activator;
/**
* Various static helper methods used throughout
* the project.
*
*/
public class Utilities {
/**
* Will instantiate an object using a constructor with the given parameter types and values.
* @param className The full qualifying name of the Class to instantiate.
* @param params The list of parameters for the constructor, or null to use the no-args
* constructor.
* @param constructor The actual types required by the constructor (the given params should
* be of these types or subtypes thereof). Can be left as null if the wanted constructor
* matches the actual types of the given parameters.
* @return The instantiated object which can then be cast to its actual type.
*/
public static Object instantiateObject(String className, Object[] params, Class<?>[] constructor) {
Object result = null;
try {
if(params == null || params.length == 0) {
result = Class.forName(className).newInstance();
} else {
Constructor<?> con = Class.forName(className).getDeclaredConstructor(constructor == null ? getClasses(params) : constructor);
result = con.newInstance(params);
}
} catch (NoSuchMethodException | SecurityException
| ClassNotFoundException | InstantiationException
| IllegalAccessException | IllegalArgumentException
| InvocationTargetException e) {
e.printStackTrace();
}
return result;
}
private static Class<?>[] getClasses(Object[] objects) {
Class<?>[] result = new Class[objects.length];
for(int i = 0; i < objects.length; i++)
result[i] = objects[i].getClass();
return result;
}
public static double[][] deepCopy(double[][] original) {
if (original == null) {
return null;
}
final double[][] result = new double[original.length][];
for (int i = 0; i < original.length; i++) {
result[i] = Arrays.copyOf(original[i], original[i].length);
}
return result;
}
/**
* Takes a 2D array and returns the same elements
* in a 1D array structure.
* @param arrays The 2D array to flatten.
* @return A 1D array of those arrays appended.
*/
public static double[] flatten(double[][] arrays){
int offset = 0;
double[] result = new double[totalLength(arrays)];
for(int i = 0; i < arrays.length; i++) {
System.arraycopy(arrays[i], 0, result, offset, arrays[i].length);
offset += arrays[i].length;
}
return result;
}
/**
* Copies everything from fromArray to toArray starting.
* @param fromArray The array to copy every element from.
* @param toArray The array to insert the elements into.
* @param offset The index to start at in the toArray.
*/
public static void copy(double[] fromArray, double[] toArray, int offset) {
// System.out.println(Arrays.toString(fromArray)+", "+Arrays.toString(toArray)+" offset="+offset);
System.arraycopy(fromArray,0,toArray,offset,fromArray.length);
}
/**
* Copies a subsection of the given array into a new
* array.
* @param fromArray The array to copy from.
* @param start The first index to copy (inclusive).
* @param end The index to copy to (exclusive).
* @return A new array with the specified content copied into.
*/
public static double[] copy(double[] fromArray, int start, int end) {
double[] result = new double[end-start];
System.arraycopy(fromArray, start, result, 0, result.length);
return result;
}
/**
* Makes a String representation of the given array using
* the given format for each element.
* @param array The array to make a String of.
* @param format The way to format each double, e.g. "%.2f"
* @return The string...
*/
public static String toString(double[] array, String format) {
StringBuilder b = new StringBuilder();
b.append('[');
for(int i = 0; i < array.length; i++) {
b.append(String.format(format, array[i]));
if(i < array.length - 1)
b.append(", ");
}
b.append(']');
return b.toString();
}
/**
* Makes a String of the given array with two digits precision.
* @param array The array to make a String of.
* @return The String...
*/
public static String toString(double[] array) {
return Utilities.toString(array,"%.2f");
}
public static String toString(double[][] array, String format) {
StringBuilder b = new StringBuilder();
b.append('[');
for(int i = 0; i < array.length; i++) {
b.append(Utilities.toString(array[i],format));
if(i < array.length - 1)
b.append(", ");
}
b.append(']');
return b.toString();
}
/**
* Makes a pretty String from a 2D double array
* @param array
* @return
*/
public static String toString(double[][] array) {
return Utilities.toString(array, "%.2f");
}
/**
* Count the total number of elements in a 2 dimensional matrix
* @param arrays The 2d matrix to count
* @return The total number of elements in the 2d matrix
*/
public static int totalLength(double[][] arrays) {
int count = 0;
for(int i = 0; i < arrays.length; i++)
count += arrays[i].length;
return count;
}
/**
* Find the index of the element with the highest value
* @param array The array to search through
* @return The index of the element with the highest value
*/
public static int maxPos(double[] array) {
int maxpos = 0;
double value = Double.MIN_VALUE;
for(int i = 0; i < array.length; i++) {
if(array[i] > value) {
maxpos = i;
value = array[i];
}
}
return maxpos;
}
/**
* Return a new array of the same length where all
* elements sum to 1.0 and the relation between the
* original elements are preserved.
* @param array
* @return
*/
public static double[] normalize(double[] array) {
double[] result = new double[array.length];
double sum = 0.0;
for(int i = 0; i < array.length; i++)
sum += array[i];
for(int i = 0; i < array.length; i++)
result[i] = array[i] / sum;
return result;
}
/**
* Normalized manhattan distance:
* Compares two vectors and calculates a similarity between them.
* Only works for strictly positive numbers each between 0.0 and 1.0.
* @param v1 the first vector
* @param v2 the second vector
* @return A number between 0.0 and 1.0 of how similar the two vectors
* are (in the space of each variable being between 0.0 and 1.0).
*/
public static double emilarity(double[] v1, double[] v2){
if(v1.length != v2.length)
throw new IllegalArgumentException("The arrays must be of the same length");
double numerator = 0;
for(int i = 0; i < v1.length; i++)
numerator += Math.abs(v1[i] - v2[i]);
return 1.0 - (numerator / v1.length);
}
public static double euclideanDistance(double[] v1, double[] v2) {
double sqSum = 0;
for(int i = 0; i < v1.length; i++) {
sqSum += Math.pow(v1[i] - v2[i], 2);
}
return Math.sqrt(sqSum);
}
public static double clamp(double value, double min, double max) {
if(value < min)
return min;
if(value > max)
return max;
return value;
}
public static double cosineSimilarity(double[] v1, double[] v2) {
double sum = Double.MIN_VALUE;
double usum =Double.MIN_VALUE;
double vsum = Double.MIN_VALUE;
for(int i = 0; i < v1.length; i++) {
sum += v1[i] * v2[i];
usum += v1[i] * v1[i];
vsum += v2[i] * v2[i];
}
// System.out.println(sum+","+usum+","+vsum);
double r = sum / Math.sqrt(usum * vsum);
return r == Double.POSITIVE_INFINITY ? 1 : r;
}
public static class ActivatorProxy implements com.anji.integration.Activator{
private Activator substrate;
public ActivatorProxy(Activator substrate){
this.substrate = substrate;
}
@Override
public String getXmlRootTag() {
return substrate.getXmlRootTag();
}
@Override
public String getXmld() {
return substrate.getXmld();
}
@Override
public double[] next() {
return (double[])substrate.next();
}
@Override
public double[] next(double[] stimuli) {
return substrate.next(stimuli);
}
@Override
public double[][] next(double[][] stimuli) {
return substrate.next(stimuli);
}
@Override
public String toXml() {
return substrate.toXml();
}
@Override
public void reset() {
substrate.reset();
}
@Override
public String getName() {
return substrate.getName();
}
@Override
public double getMinResponse() {
return substrate.getMinResponse();
}
@Override
public double getMaxResponse() {
return substrate.getMaxResponse();
}
@Override
public int getInputDimension() {
return substrate.getInputCount();
}
@Override
public int getOutputDimension() {
return substrate.getOutputCount();
}
}
/**
* Just testing some stuff
* @param args
*/
public static void main(String[] args) {
double[] v1 = new double[]{0,0,0,0,0};
double[] v2 = new double[]{0.5,0.5,0.5,0.5,0.5};
double[] v3 = new double[]{1,1,1,1,1};
double[] v4 = new double[]{0,0.25,0.5,0.75,1};
double[] v5 = new double[]{1,0.75,0.5,0.25,0};
System.out.println(Arrays.toString(v1)+" VS "+Arrays.toString(v2)+": "+cosineSimilarity(v1,v2));
System.out.println(Arrays.toString(v1)+" VS "+Arrays.toString(v3)+": "+cosineSimilarity(v1,v3));
System.out.println(Arrays.toString(v2)+" VS "+Arrays.toString(v3)+": "+cosineSimilarity(v2,v3));
System.out.println(Arrays.toString(v1)+" VS "+Arrays.toString(v1)+": "+cosineSimilarity(v1,v1));
System.out.println(Arrays.toString(v3)+" VS "+Arrays.toString(v3)+": "+cosineSimilarity(v3,v3));
System.out.println(Arrays.toString(v1)+" VS "+Arrays.toString(v4)+": "+cosineSimilarity(v1,v4));
System.out.println(Arrays.toString(v3)+" VS "+Arrays.toString(v4)+": "+cosineSimilarity(v1,v4));
System.out.println(Arrays.toString(v4)+" VS "+Arrays.toString(v5)+": "+cosineSimilarity(v4,v5));
System.out.println(Arrays.toString(v5)+" VS "+Arrays.toString(v4)+": "+cosineSimilarity(v5,v4));
}
}
| |
package org.cagrid.grape.utils;
import gov.nih.nci.cagrid.common.portal.PortalLookAndFeel;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.DateFormat;
import java.util.Vector;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTextArea;
import javax.swing.ScrollPaneConstants;
import javax.swing.SwingUtilities;
import org.cagrid.grape.GridApplication;
import org.cagrid.grape.utils.errors.ErrorContainer;
import org.cagrid.grape.utils.errors.ErrorDialogTable;
import org.cagrid.grape.utils.errors.ErrorDialogTableListener;
/**
* PortalErrorDialog
* Dialog for displaying / queueing up errors and detail messages
*
* @author <A HREF="MAILTO:ervin@bmi.osu.edu">David W. Ervin</A>
*
* @created Oct 2, 2006
* @version $Id: CompositeErrorDialog.java,v 1.1 2007-11-06 15:53:42 hastings Exp $
*/
public class CompositeErrorDialog extends JDialog {
private static Frame ownerFrame = null;
private static Vector<ErrorContainer> errors = null;
private static CompositeErrorDialog dialog = null;
private static String lastFileLocation = null;
private static Object errorAdditionMutex = new Object();
private ErrorContainer currentError = null;
private boolean showingErrorDetails = false;
private boolean showingErrorException = false;
private ErrorDialogTable errorTable = null;
private JScrollPane errorScrollPane = null;
private JTextArea detailTextArea = null;
private JScrollPane detailScrollPane = null;
private JButton clearButton = null;
private JPanel mainPanel = null;
private JButton hideDialogButton = null;
private JButton logErrorsButton = null;
private JPanel buttonPanel = null;
private JSplitPane errorsSplitPane = null;
private CompositeErrorDialog(Frame parentFrame) {
super(parentFrame);
initialize();
}
private void initialize() {
setTitle("Errors");
this.setContentPane(getMainPanel());
pack();
}
public static void setOwnerFrame(Frame frame) {
ownerFrame = frame;
}
private static Frame getOwnerFrame() {
if (ownerFrame == null) {
return GridApplication.getContext().getApplication();
}
return ownerFrame;
}
/**
* Only message is required. Detail will be shown when asked for, exception shown
* when asked for, each only if != null
*
* @param message
* @param detail
* @param error
*/
private static void addError(final String message, final String detail, final Throwable error) {
if (dialog == null) {
dialog = new CompositeErrorDialog(getOwnerFrame());
}
Runnable r = new Runnable() {
public void run() {
synchronized (errorAdditionMutex) {
dialog.setAlwaysOnTop(true);
ErrorContainer container = new ErrorContainer(message, detail, error);
if (errors == null) {
errors = new Vector<ErrorContainer>();
}
errors.add(container);
dialog.getErrorTable().addError(container);
if (!dialog.isVisible()) {
dialog.setModal(true);
// dialog.pack();
dialog.setSize(500, 450);
// attempt to center the dialog
centerDialog();
dialog.setVisible(true);
}
}
}
};
SwingUtilities.invokeLater(r);
}
/**
* Shows an error message from an exception. The message presented will
* be the exception's message, or the exception's class name
* if no message is present
*
* @param ex
*/
public static void showErrorDialog(Throwable ex) {
String message = ex.getMessage();
if (message == null) {
message = ex.getClass().getName();
}
addError(message, null, ex);
}
/**
* Shows an error message with no detail or exception
*
* @param error
*/
public static void showErrorDialog(String error) {
addError(error, null, null);
}
/**
* Shows an error message with details
* @param error
* @param detail
*/
public static void showErrorDialog(String error, String detail) {
addError(error, detail, null);
}
/**
* Shows an error message with multi-line details
*
* @param error
* @param detail
*/
public static void showErrorDialog(String error, String[] detail) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < detail.length; i++) {
builder.append(detail[i]).append("\n");
}
addError(error, builder.toString(), null);
}
/**
* Shows an error message with an exception
*
* @param message
* @param ex
*/
public static void showErrorDialog(String message, Throwable ex) {
addError(message, null, ex);
}
/**
* Shows an error message with a multi-line detail message and exception
*
* @param message
* @param details
* @param ex
*/
public static void showErrorDialog(String message, String[] details, Throwable ex) {
StringBuilder builder = new StringBuilder();
for (int i = 0; details != null && i < details.length; i++) {
builder.append(details[i]).append("\n");
}
addError(message, builder.toString(), ex);
}
/**
* Shows an error message with a detail message and exception
*
* @param message
* @param details
* @param ex
*/
public static void showErrorDialog(String message, String details, Throwable ex) {
addError(message, details, ex);
}
private ErrorDialogTable getErrorTable() {
if (errorTable == null) {
errorTable = new ErrorDialogTable();
errorTable.addErrorTableListener(new ErrorDialogTableListener() {
public void showDetailsClicked(ErrorContainer container) {
if (container == currentError && showingErrorDetails) {
getErrorsSplitPane().setDividerLocation(1.0D);
getDetailTextArea().setText("");
showingErrorDetails = false;
showingErrorException = false;
} else {
currentError = container;
showingErrorDetails = true;
showingErrorException = false;
getErrorsSplitPane().setDividerLocation(0.5D);
getDetailTextArea().setText(container.getDetail());
getDetailTextArea().setCaretPosition(0);
}
}
public void showErrorClicked(ErrorContainer container) {
if (container == currentError && showingErrorException) {
getErrorsSplitPane().setDividerLocation(1.0D);
getDetailTextArea().setText("");
showingErrorDetails = false;
showingErrorException = false;
} else {
currentError = container;
showingErrorException = true;
showingErrorDetails = false;
StringWriter writer = new StringWriter();
PrintWriter printWriter = new PrintWriter(writer);
container.getError().printStackTrace(printWriter);
getErrorsSplitPane().setDividerLocation(0.5D);
getDetailTextArea().setText(writer.getBuffer().toString());
getDetailTextArea().setCaretPosition(0);
}
}
});
}
return errorTable;
}
/**
* This method initializes jScrollPane
*
* @return javax.swing.JScrollPane
*/
private JScrollPane getErrorScrollPane() {
if (errorScrollPane == null) {
errorScrollPane = new JScrollPane();
errorScrollPane.setBorder(javax.swing.BorderFactory.createTitledBorder(
null, "Errors", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION,
javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor()));
errorScrollPane.setViewportView(getErrorTable());
errorScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
}
return errorScrollPane;
}
/**
* This method initializes jTextArea
*
* @return javax.swing.JTextArea
*/
private JTextArea getDetailTextArea() {
if (detailTextArea == null) {
detailTextArea = new JTextArea();
detailTextArea.setEditable(false);
detailTextArea.setWrapStyleWord(true);
detailTextArea.setLineWrap(true);
}
return detailTextArea;
}
/**
* This method initializes jScrollPane
*
* @return javax.swing.JScrollPane
*/
private JScrollPane getDetailScrollPane() {
if (detailScrollPane == null) {
detailScrollPane = new JScrollPane();
detailScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
detailScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
detailScrollPane.setBorder(javax.swing.BorderFactory.createTitledBorder(
null, "Detail", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION,
javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor()));
detailScrollPane.setViewportView(getDetailTextArea());
}
return detailScrollPane;
}
/**
* This method initializes jButton
*
* @return javax.swing.JButton
*/
private JButton getClearButton() {
if (clearButton == null) {
clearButton = new JButton();
clearButton.setText("Clear");
clearButton.setToolTipText("Clears the dialog of any errors and closes it");
clearButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
errors.clear();
getErrorTable().clearTable();
getDetailTextArea().setText("");
getErrorsSplitPane().setDividerLocation(1.0D);
dispose();
}
});
}
return clearButton;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getMainPanel() {
if (mainPanel == null) {
GridBagConstraints gridBagConstraints1 = new GridBagConstraints();
gridBagConstraints1.gridx = 0;
gridBagConstraints1.fill = GridBagConstraints.HORIZONTAL;
gridBagConstraints1.gridy = 1;
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.gridy = 0;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0D;
gridBagConstraints.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints.gridx = 0;
mainPanel = new JPanel();
mainPanel.setLayout(new GridBagLayout());
mainPanel.add(getErrorsSplitPane(), gridBagConstraints);
mainPanel.add(getButtonPanel(), gridBagConstraints1);
}
return mainPanel;
}
/**
* This method initializes jButton
*
* @return javax.swing.JButton
*/
private JButton getHideDialogButton() {
if (hideDialogButton == null) {
hideDialogButton = new JButton();
hideDialogButton.setToolTipText(
"Simply hides the dialog, preserving all displayed errors");
hideDialogButton.setText("Hide");
hideDialogButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
getDetailTextArea().setText("");
getErrorsSplitPane().setDividerLocation(1.0D);
dispose();
}
});
}
return hideDialogButton;
}
private JButton getLogErrorsButton() {
if (logErrorsButton == null) {
logErrorsButton = new JButton();
logErrorsButton.setText("Log Errors");
logErrorsButton.setToolTipText("Allows saving the error dialog's contents to disk");
logErrorsButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
saveLogFile();
}
});
}
return logErrorsButton;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getButtonPanel() {
if (buttonPanel == null) {
GridBagConstraints gridBagConstraints3 = new GridBagConstraints();
gridBagConstraints3.gridx = 2;
gridBagConstraints3.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints3.gridy = 0;
GridBagConstraints gridBagConstraints2 = new GridBagConstraints();
gridBagConstraints2.gridx = 1;
gridBagConstraints2.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints2.gridy = 0;
GridBagConstraints gridBagConstraints1 = new GridBagConstraints();
gridBagConstraints1.gridx = 0;
gridBagConstraints1.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints1.anchor = GridBagConstraints.WEST;
gridBagConstraints1.weightx = 1.0D;
gridBagConstraints1.gridy = 0;
buttonPanel = new JPanel();
buttonPanel.setLayout(new GridBagLayout());
buttonPanel.add(getLogErrorsButton(), gridBagConstraints1);
buttonPanel.add(getHideDialogButton(), gridBagConstraints2);
buttonPanel.add(getClearButton(), gridBagConstraints3);
}
return buttonPanel;
}
/**
* This method initializes jSplitPane
*
* @return javax.swing.JSplitPane
*/
private JSplitPane getErrorsSplitPane() {
if (errorsSplitPane == null) {
errorsSplitPane = new JSplitPane();
errorsSplitPane.setOrientation(javax.swing.JSplitPane.VERTICAL_SPLIT);
errorsSplitPane.setResizeWeight(1.0D);
errorsSplitPane.setTopComponent(getErrorScrollPane());
errorsSplitPane.setBottomComponent(getDetailScrollPane());
errorsSplitPane.setOneTouchExpandable(true);
}
return errorsSplitPane;
}
private static void centerDialog() {
// Determine the new location of the window
Frame owner = getOwnerFrame();
if (owner != null) {
int w = owner.getSize().width;
int h = owner.getSize().height;
int x = owner.getLocationOnScreen().x;
int y = owner.getLocationOnScreen().y;
Dimension dim = dialog.getSize();
dialog.setLocation(w / 2 + x - dim.width / 2, h / 2 + y - dim.height / 2);
}
}
private void saveLogFile() {
String nl = System.getProperty("line.separator");
JFileChooser chooser = new JFileChooser(lastFileLocation);
int choice = chooser.showSaveDialog(dialog);
if (choice == JFileChooser.APPROVE_OPTION) {
File file = chooser.getSelectedFile();
lastFileLocation = file.getAbsolutePath();
StringBuilder text = new StringBuilder();
synchronized (errors) {
for (ErrorContainer container : errors) {
text.append(container.getMessage()).append(" -- ")
.append(DateFormat.getDateTimeInstance().format(
container.getErrorDate())).append(nl);
if (container.getDetail() != null) {
text.append("DETAILS:").append(nl);
String[] details = container.getDetail().split("\n");
for (String detail : details) {
text.append(detail).append(nl);
}
}
if (container.getError() != null) {
text.append("EXCEPTION:").append(nl);
StringWriter writer = new StringWriter();
PrintWriter printWriter = new PrintWriter(writer);
container.getError().printStackTrace(printWriter);
String[] lines = writer.getBuffer().toString().split("\n");
for (String line : lines) {
text.append(line).append(nl);
}
}
text.append("---- ---- ---- ----").append(nl);
}
}
try {
FileWriter writer = new FileWriter(file);
writer.write(text.toString());
writer.flush();
writer.close();
} catch (IOException ex) {
ex.printStackTrace();
showErrorDialog(ex);
}
}
}
public static void main(String[] args) {
JFrame frame = new JFrame();
frame.setTitle("HELLO THERE");
frame.setSize(new Dimension(400,400));
frame.setVisible(true);
CompositeErrorDialog.setOwnerFrame(frame);
String message = "";
for (int i = 0; i < 30; i++) {
message += "This is line " + i + "\n";
}
CompositeErrorDialog.showErrorDialog("This is an error");
CompositeErrorDialog.showErrorDialog(new Exception("This is an exception with a short message"));
CompositeErrorDialog.showErrorDialog(new Exception(message));
CompositeErrorDialog.showErrorDialog("This is an error with a long message", message.split("\n"));
CompositeErrorDialog.showErrorDialog("This is an error with a null exception", message.split("\n"), null);
CompositeErrorDialog.showErrorDialog("This is an error with null message", (Exception) null);
CompositeErrorDialog.showErrorDialog("This is an error with null message and exception", (String) null, null);
/*
for (int i = 0; i < 10; i++) {
try {
Thread.sleep(5000);
} catch (Exception ex) {
ex.printStackTrace();
}
ErrorDialog.showErrorDialog("Test error", message,
new Exception(message));
}
*/
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.vector.complex.impl;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.complex.MapVector;
import org.apache.drill.exec.vector.complex.StateTool;
import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
import com.google.common.base.Preconditions;
public class ComplexWriterImpl extends AbstractFieldWriter implements ComplexWriter {
// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ComplexWriterImpl.class);
private SingleMapWriter mapRoot;
private SingleListWriter listRoot;
private final MapVector container;
Mode mode = Mode.INIT;
private final String name;
private final boolean unionEnabled;
private enum Mode { INIT, MAP, LIST };
public ComplexWriterImpl(String name, MapVector container, boolean unionEnabled){
super(null);
this.name = name;
this.container = container;
this.unionEnabled = unionEnabled;
}
public ComplexWriterImpl(String name, MapVector container){
this(name, container, false);
}
@Override
public MaterializedField getField() {
return container.getField();
}
@Override
public int getValueCapacity() {
return container.getValueCapacity();
}
private void check(Mode... modes){
StateTool.check(mode, modes);
}
@Override
public void reset(){
setPosition(0);
}
@Override
public void close() throws Exception {
clear();
mapRoot.close();
if (listRoot != null) {
listRoot.close();
}
}
@Override
public void clear(){
switch(mode){
case MAP:
mapRoot.clear();
break;
case LIST:
listRoot.clear();
break;
}
}
@Override
public void setValueCount(int count){
switch(mode){
case MAP:
mapRoot.setValueCount(count);
break;
case LIST:
listRoot.setValueCount(count);
break;
}
}
@Override
public void setPosition(int index){
super.setPosition(index);
switch(mode){
case MAP:
mapRoot.setPosition(index);
break;
case LIST:
listRoot.setPosition(index);
break;
}
}
public MapWriter directMap(){
Preconditions.checkArgument(name == null);
switch(mode){
case INIT:
MapVector map = (MapVector) container;
mapRoot = new SingleMapWriter(map, this, unionEnabled);
mapRoot.setPosition(idx());
mode = Mode.MAP;
break;
case MAP:
break;
default:
check(Mode.INIT, Mode.MAP);
}
return mapRoot;
}
@Override
public MapWriter rootAsMap() {
switch(mode){
case INIT:
MapVector map = container.addOrGet(name, Types.required(MinorType.MAP), MapVector.class);
mapRoot = new SingleMapWriter(map, this, unionEnabled);
mapRoot.setPosition(idx());
mode = Mode.MAP;
break;
case MAP:
break;
default:
check(Mode.INIT, Mode.MAP);
}
return mapRoot;
}
@Override
public void allocate() {
if(mapRoot != null) {
mapRoot.allocate();
} else if(listRoot != null) {
listRoot.allocate();
}
}
@Override
public ListWriter rootAsList() {
switch(mode){
case INIT:
listRoot = new SingleListWriter(name, container, this);
listRoot.setPosition(idx());
mode = Mode.LIST;
break;
case LIST:
break;
default:
check(Mode.INIT, Mode.MAP);
}
return listRoot;
}
private static class VectorAccessibleFacade extends MapVector {
private final VectorContainer vc;
public VectorAccessibleFacade(VectorContainer vc) {
super("", null, null);
this.vc = vc;
}
@Override
public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
final ValueVector v = vc.addOrGet(name, type, clazz);
putChild(name, v);
return this.typeify(v, clazz);
}
}
public static ComplexWriter getWriter(String name, VectorContainer container){
VectorAccessibleFacade vc = new VectorAccessibleFacade(container);
return new ComplexWriterImpl(name, vc);
}
}
| |
package org.alfasoftware.morf.metadata;
import static org.alfasoftware.morf.metadata.DataSetUtils.record;
import static org.alfasoftware.morf.metadata.SchemaUtils.column;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.sql.Date;
import org.alfasoftware.morf.dataset.BaseRecordMatcher;
import org.alfasoftware.morf.dataset.Record;
import org.alfasoftware.morf.metadata.DataSetUtils.RecordBuilder;
import org.alfasoftware.morf.metadata.DataSetUtils.RecordDecorator;
import org.joda.time.LocalDate;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.junit.runners.model.Statement;
/**
* Tests {@link DataSetUtils}.
*
* @author Copyright (c) CHP Consulting Ltd. 2017
*/
public class TestDataSetUtils {
private static final String INTEGER_COLUMN = "A";
private static final String STRING_COLUMN = "B";
private static final String BIG_DECIMAL_COLUMN = "C";
private static final String BOOLEAN_COLUMN = "D";
private static final String DATE_COLUMN = "E";
private static final String LOCAL_DATE_COLUMN = "F";
private static final String LONG_COLUMN = "H";
private static final String BLOB_COLUMN = "I";
private static final String UNTYPED_COLUMN = "J";
private static final Date DATE = java.sql.Date.valueOf("2010-07-02");
private static final Boolean BOOLEAN = true;
private static final BigDecimal BIG_DECIMAL = new BigDecimal("10.000");
private static final String STRING = "2";
private static final Integer INTEGER = 1;
private static final LocalDate LOCAL_DATE = new LocalDate();
private static final Long LONG = 3333333333333333333L;
private static final byte[] BYTE_ARRAY = new byte[]{ -127, 127 };
private static final String VALUE = "erg";
public TestRule syncronisation = (base, description) -> new Statement() {
@Override
public void evaluate() throws Throwable {
synchronized (CaseInsensitiveString.class) {
base.evaluate();
}
}
};
@SuppressWarnings("deprecation")
private static final RecordBuilder BASE_RECORD = record()
.setInteger(INTEGER_COLUMN, INTEGER)
.setString(STRING_COLUMN, STRING)
.setBigDecimal(BIG_DECIMAL_COLUMN, BIG_DECIMAL)
.setBoolean(BOOLEAN_COLUMN, BOOLEAN)
.setDate(DATE_COLUMN, DATE)
.setLocalDate(LOCAL_DATE_COLUMN, LOCAL_DATE)
.setLong(LONG_COLUMN, LONG)
.setByteArray(BLOB_COLUMN, BYTE_ARRAY)
.value(UNTYPED_COLUMN, VALUE);
private static final RecordBuilder BASE_RECORD_BY_OBJECT = record()
.setObject(INTEGER_COLUMN, INTEGER)
.setObject(STRING_COLUMN, STRING)
.setObject(BIG_DECIMAL_COLUMN, BIG_DECIMAL)
.setObject(BOOLEAN_COLUMN, BOOLEAN)
.setObject(DATE_COLUMN, DATE)
.setObject(LOCAL_DATE_COLUMN, LOCAL_DATE)
.setObject(LONG_COLUMN, LONG)
.setObject(BLOB_COLUMN, BYTE_ARRAY)
.setObject(UNTYPED_COLUMN, VALUE);
@Test
public void testValues() {
assertThat(
BASE_RECORD.getValues(),
containsInAnyOrder(
new DataValueBean(LOCAL_DATE_COLUMN, LOCAL_DATE),
new DataValueBean(LONG_COLUMN, LONG),
new DataValueBean(BLOB_COLUMN.toLowerCase(), BYTE_ARRAY), // Ensure equals is type insensitive
new DataValueBean(UNTYPED_COLUMN, VALUE),
new DataValueBean(INTEGER_COLUMN, INTEGER),
new DataValueBean(STRING_COLUMN, STRING),
new DataValueBean(BIG_DECIMAL_COLUMN, BIG_DECIMAL),
new DataValueBean(BOOLEAN_COLUMN, BOOLEAN),
new DataValueBean(DATE_COLUMN, DATE)
)
);
}
@Test
public void testValuesDecorator() {
assertThat(
RecordDecorator.of( // Nested decorators
RecordDecorator.of(BASE_RECORD)
.setString(STRING_COLUMN.toLowerCase(), "Overriden") // Ensure overriding honours case insensitivity
.setBoolean(BOOLEAN_COLUMN, !BOOLEAN))
.setLong(LONG_COLUMN, 125123L)
.getValues(),
containsInAnyOrder(
new DataValueBean(BIG_DECIMAL_COLUMN, BIG_DECIMAL),
new DataValueBean(BOOLEAN_COLUMN, !BOOLEAN),
new DataValueBean(DATE_COLUMN, DATE),
new DataValueBean(LOCAL_DATE_COLUMN, LOCAL_DATE),
new DataValueBean(INTEGER_COLUMN, INTEGER),
new DataValueBean(STRING_COLUMN, "Overriden"),
new DataValueBean(LONG_COLUMN, 125123L),
new DataValueBean(BLOB_COLUMN, BYTE_ARRAY),
new DataValueBean(UNTYPED_COLUMN, VALUE)
)
);
}
/**
* Check that getters get the right responses for all the type conversions they should support.
*/
@Test
public void testValueAndObjectGetters() {
assertThat(BASE_RECORD, originalMatcher());
assertThat(BASE_RECORD_BY_OBJECT, originalMatcher());
}
@Test
public void testIntegerGetters() {
assertEquals(-1, record().setInteger(INTEGER_COLUMN, -1).getInteger(INTEGER_COLUMN).intValue());
assertEquals(0, record().setInteger(INTEGER_COLUMN, 0).getInteger(INTEGER_COLUMN).intValue());
assertEquals(1, record().setInteger(INTEGER_COLUMN, 1).getInteger(INTEGER_COLUMN).intValue());
assertEquals(INTEGER, BASE_RECORD.getInteger(INTEGER_COLUMN));
assertEquals(INTEGER.longValue(), BASE_RECORD.getLong(INTEGER_COLUMN).longValue());
assertEquals(INTEGER.doubleValue(), BASE_RECORD.getDouble(INTEGER_COLUMN).doubleValue(), 0.00001);
assertEquals(new BigDecimal(INTEGER), BASE_RECORD.getBigDecimal(INTEGER_COLUMN));
}
@Test
public void testBigDecimalGetters() {
assertEquals(BigDecimal.ZERO, record().setBigDecimal(BIG_DECIMAL_COLUMN, BigDecimal.ZERO).getBigDecimal(BIG_DECIMAL_COLUMN));
assertEquals(BigDecimal.ONE, record().setBigDecimal(BIG_DECIMAL_COLUMN, BigDecimal.ONE).getBigDecimal(BIG_DECIMAL_COLUMN));
assertEquals(BIG_DECIMAL.longValue(), BASE_RECORD.getLong(BIG_DECIMAL_COLUMN).longValue());
assertEquals(BIG_DECIMAL.intValue(), BASE_RECORD.getInteger(BIG_DECIMAL_COLUMN).intValue());
assertEquals(BIG_DECIMAL.doubleValue(), BASE_RECORD.getDouble(BIG_DECIMAL_COLUMN).doubleValue(), 0.00001);
assertEquals(BIG_DECIMAL, BASE_RECORD.getBigDecimal(BIG_DECIMAL_COLUMN));
}
@Test
public void testLongGetters() {
assertEquals(-1L, record().setLong(LONG_COLUMN, -1L).getLong(LONG_COLUMN).longValue());
assertEquals(0L, record().setLong(LONG_COLUMN, 0L).getLong(LONG_COLUMN).longValue());
assertEquals(1L, record().setLong(LONG_COLUMN, 1L).getLong(LONG_COLUMN).longValue());
assertEquals(LONG, BASE_RECORD.getLong(LONG_COLUMN));
assertEquals(LONG.intValue(), BASE_RECORD.getInteger(LONG_COLUMN).intValue());
assertEquals(LONG.doubleValue(), BASE_RECORD.getDouble(LONG_COLUMN).doubleValue(), 0.00001);
assertEquals(new BigDecimal(LONG), BASE_RECORD.getBigDecimal(LONG_COLUMN));
}
@Test
public void testByteArrayGetters() {
assertArrayEquals(BYTE_ARRAY, BASE_RECORD.getByteArray(BLOB_COLUMN));
}
@Test
public void testDoubleGetters() {
String col = "CoL";
assertEquals(0.1123D, record().setDouble(col, 0.1123D).getDouble(col).doubleValue(), 0.00001);
assertEquals(new BigDecimal("0.1123"), record().setDouble(col, 0.1123D).getBigDecimal(col).setScale(4, RoundingMode.HALF_UP));
}
@Test
public void testStringGetters() {
String col = "CoL";
assertEquals("010.00", record().setString(col, "010.00").getString(col));
assertEquals(10L, record().setString(col, "10") .getLong(col).longValue());
assertEquals(10, record().setString(col, "10") .getInteger(col).intValue());
assertEquals(10D, record().setString(col, "10") .getDouble(col).doubleValue(), 0.00001);
assertEquals(new BigDecimal("10.00"), record().setString(col, "010.00").getBigDecimal(col));
assertEquals(new LocalDate(2009, 12, 31), record().setString(col, "2009-12-31").getLocalDate(col));
assertEquals(java.sql.Date.valueOf("2009-12-31"), record().setString(col, "2009-12-31").getDate(col));
assertEquals(true, record().setString(col, "true") .getBoolean(col));
assertEquals(false, record().setString(col, "false").getBoolean(col));
assertEquals(false, record().setString(col, "tru") .getBoolean(col));
byte[] blobValue = new byte[] { 1, 2, 3, 4, 5 };
assertEquals(encodeToBase64String(blobValue), record().setString(col, encodeToBase64String(blobValue)).getString(col));
assertArrayEquals(blobValue, record().setString(col, encodeToBase64String(blobValue)).getByteArray(col));
}
@Test
public void testBooleanGetters() {
String col = "CoL";
assertEquals(true, record().setBoolean(col, true).getBoolean(col));
assertEquals(false, record().setBoolean(col, false).getBoolean(col));
assertEquals("true", record().setBoolean(col, true).getString(col));
assertEquals("false", record().setBoolean(col, false).getString(col));
}
@Test
public void testDateGetters() {
String col = "CoL";
Date dateValue = java.sql.Date.valueOf("1977-10-10");
LocalDate localDateValue = new LocalDate(1977, 10, 10);
assertEquals(dateValue, record().setDate(col, dateValue).getDate(col));
assertEquals(dateValue.toString(), record().setDate(col, dateValue).getString(col));
assertEquals(localDateValue, record().setDate(col, dateValue).getLocalDate(col));
}
@Test
public void testLocalDateGetters() {
String col = "CoL";
Date dateValue = java.sql.Date.valueOf("1977-10-10");
LocalDate localDateValue = new LocalDate(1977, 10, 10);
assertEquals(localDateValue, record().setLocalDate(col, localDateValue).getLocalDate(col));
assertEquals(localDateValue.toString(), record().setLocalDate(col, localDateValue).getString(col));
assertEquals(dateValue, record().setLocalDate(col, localDateValue).getDate(col));
}
/**
* Check all data types arrive unchanged if unmodified.
*/
@Test
public void testIdentityDecorator() {
assertThat(
RecordDecorator.of(BASE_RECORD),
originalMatcher()
);
}
/**
* Check that a zero capacity decorator is permitted.
*/
@Test
public void testIdentityDecoratorWithZeroCapacity() {
assertThat(
RecordDecorator.ofWithInitialCapacity(BASE_RECORD, 0),
originalMatcher()
);
}
/**
* Check that array resizing of decorators works.
*/
@Test
public void testDecoratorExpansion() {
assertThat(
RecordDecorator.ofWithInitialCapacity(BASE_RECORD, 1)
.setString("additional1", "TEST1")
.setString("additional2", "TEST2")
.setString("additional3", "TEST3"),
originalMatcher()
.withValue("additional1", "TEST1")
.withValue("ADDITIONAL2", "TEST2")
.withValue("Additional3", "TEST3")
);
}
/**
* Ensures that we do indeed get null when there is no value.
*/
@Test
public void testNoValueFromDecorator() {
assertNull(RecordDecorator.of(record()).getInteger(INTEGER_COLUMN));
}
/**
* Ensures that we do indeed get null when null is explicitly specified in the underlying record.
*/
@Test
public void testNullValueFromDecorator() {
assertNull(RecordDecorator.of(record().setInteger(INTEGER_COLUMN, null)).getInteger(INTEGER_COLUMN));
}
/**
* Ensures that we can null-out a record even if the underlying has a value
*/
@Test
public void testNullOverrideFromDecorator() {
assertNull(RecordDecorator.of(record().setInteger(INTEGER_COLUMN, 3)).setInteger(INTEGER_COLUMN, null).getInteger(INTEGER_COLUMN));
}
/**
* Test overrides of all types using string input values.
*/
@SuppressWarnings("deprecation")
@Test
public void testOverrideAndExtendUsingValue() {
assertThat(
RecordDecorator.of(BASE_RECORD)
.value(INTEGER_COLUMN, "3")
.value(BIG_DECIMAL_COLUMN, "4.23")
.value(BOOLEAN_COLUMN, "false")
.value(DATE_COLUMN, "1990-01-01")
.value(LOCAL_DATE_COLUMN, "1990-01-02")
.value(LONG_COLUMN, "2")
.value(BLOB_COLUMN, encodeToBase64String(new byte[] { -126, 126 })),
mutatedMatcher()
);
}
/*
* Test overrides of all types using typed input values.
*/
@Test
public void testOverrideAndExtendUsingTyped() {
assertThat(
RecordDecorator.of(BASE_RECORD)
.setInteger(INTEGER_COLUMN, 3) // Type match
.setString(BIG_DECIMAL_COLUMN, "4.23") // Type conversion
.setBoolean(BOOLEAN_COLUMN, false)
.setLocalDate(DATE_COLUMN, new LocalDate(1990, 1, 1)) // Type conversion
.setDate(LOCAL_DATE_COLUMN, java.sql.Date.valueOf("1990-01-02")) // Type conversion
.setInteger(LONG_COLUMN, 2) // Type conversion
.setByteArray(BLOB_COLUMN, new byte[] { -126, 126 }),
mutatedMatcher()
);
}
/*
* Tests that if we specify a value twice, we get the new value
*/
@Test
public void testOverwriteValue() {
assertEquals("B", record().setString("a", "A").setString("a", "B").getString("a"));
}
/*
* Tests that if we specify a value twice and the second is a null, we get null
*/
@SuppressWarnings("deprecation")
@Test
public void testOverwriteValueWithNull() {
assertNull(record().setString("a", "A").value("a", null).getString("a"));
}
/*
* Tests that if we specify a value twice where the first was a null, we get the new value
*/
@SuppressWarnings("deprecation")
@Test
public void testOverwriteNullWithValue() {
assertEquals("B", record().setString("a", null).value("a", "B").getString("a"));
}
/**
* Ensures that internment works correctly when serialization comes into play,
* with the resulting objects showing equivalence.
*/
@Test
public void testSerializationDeserialization() throws IOException, ClassNotFoundException {
Record copy = serializeAndDeserialize(BASE_RECORD);
assertThat(copy, equalTo(BASE_RECORD));
}
/**
* Ensures that metadata is correctly interned after deserialization.
*/
@Test
public void testInternedMetadataAfterDeserialization() throws ClassNotFoundException, IOException {
RecordBuilderImpl original = (RecordBuilderImpl) record()
.setString("one", "1")
.setString("two", "2")
.setString("three", "3");
RecordBuilderImpl copy = (RecordBuilderImpl) serializeAndDeserialize(original);
// Extend the original
original.setString("four", "4");
// We'll get an NPE here if readResolve isn't implemented on DataValueLookupMetadata
// The result should share the metadata with original
copy.setString("four", "4");
assertThat(copy, equalTo(original));
// This will fail if we've not correctly interned the metadata object (e.g.
// we've cheated and created a coherent copy)
assertTrue(original.hasSameMetadata(copy));
}
private RecordBuilder serializeAndDeserialize(RecordBuilder record) throws IOException, ClassNotFoundException {
try (ByteArrayOutputStream bao = new ByteArrayOutputStream();
ObjectOutputStream oo = new ObjectOutputStream(bao)) {
oo.writeObject(record);
oo.flush();
try (ByteArrayInputStream bai = new ByteArrayInputStream(bao.toByteArray());
ObjectInputStream oi = new ObjectInputStream(bai)) {
return (RecordBuilder) oi.readObject();
}
}
}
private BaseRecordMatcher originalMatcher() {
return BaseRecordMatcher.create()
.withValue(INTEGER_COLUMN, INTEGER.toString())
.withObject(column(INTEGER_COLUMN, DataType.INTEGER), INTEGER)
.withValue(STRING_COLUMN, STRING)
.withObject(column(STRING_COLUMN, DataType.STRING), STRING)
.withValue(BIG_DECIMAL_COLUMN, "10")
.withObject(column(BIG_DECIMAL_COLUMN, DataType.DECIMAL, 13, BIG_DECIMAL.scale()), BIG_DECIMAL)
.withValue(BOOLEAN_COLUMN, BOOLEAN.toString())
.withObject(column(BOOLEAN_COLUMN, DataType.BOOLEAN), BOOLEAN)
.withValue(DATE_COLUMN, DATE.toString())
.withObject(column(DATE_COLUMN, DataType.DATE), DATE)
.withValue(LOCAL_DATE_COLUMN, LOCAL_DATE.toString())
.withObject(column(LOCAL_DATE_COLUMN, DataType.DATE), java.sql.Date.valueOf(LOCAL_DATE.toString()))
.withValue(LONG_COLUMN, LONG.toString())
.withObject(column(LONG_COLUMN, DataType.BIG_INTEGER), LONG)
.withValue(BLOB_COLUMN, encodeToBase64String(BYTE_ARRAY))
.withObject(column(BLOB_COLUMN, DataType.BLOB), BYTE_ARRAY)
.withValue(UNTYPED_COLUMN, VALUE)
.withObject(column(UNTYPED_COLUMN, DataType.STRING), VALUE);
}
private BaseRecordMatcher mutatedMatcher() {
return BaseRecordMatcher.create()
.withValue(INTEGER_COLUMN, "3")
.withObject(column(INTEGER_COLUMN, DataType.INTEGER), 3)
.withValue(STRING_COLUMN, STRING)
.withObject(column(STRING_COLUMN, DataType.STRING), STRING)
.withValue(BIG_DECIMAL_COLUMN, "4.23")
.withObject(column(BIG_DECIMAL_COLUMN, DataType.DECIMAL, 13, 2), new BigDecimal("4.23"))
.withValue(BOOLEAN_COLUMN, "false")
.withObject(column(BOOLEAN_COLUMN, DataType.BOOLEAN), false)
.withValue(DATE_COLUMN, "1990-01-01")
.withObject(column(DATE_COLUMN, DataType.DATE), java.sql.Date.valueOf("1990-01-01"))
.withValue(LOCAL_DATE_COLUMN, "1990-01-02")
.withObject(column(LOCAL_DATE_COLUMN, DataType.DATE), java.sql.Date.valueOf("1990-01-02"))
.withValue(LONG_COLUMN, "2")
.withObject(column(LONG_COLUMN, DataType.BIG_INTEGER), 2L)
.withValue(BLOB_COLUMN, encodeToBase64String(new byte[] { -126, 126 }))
.withObject(column(BLOB_COLUMN, DataType.BLOB), new byte[] { -126, 126 })
.withValue(UNTYPED_COLUMN, VALUE)
.withObject(column(UNTYPED_COLUMN, DataType.STRING), VALUE);
}
private String encodeToBase64String(byte[] toEncode) {
return new String(java.util.Base64.getEncoder().encode(toEncode));
}
}
| |
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import com.xerox.amazonws.sdb.Domain;
import com.xerox.amazonws.sdb.DomainMetadataResult;
import com.xerox.amazonws.sdb.Item;
import com.xerox.amazonws.sdb.ItemAttribute;
import com.xerox.amazonws.sdb.ItemListener;
import com.xerox.amazonws.sdb.ListDomainsResult;
import com.xerox.amazonws.sdb.QueryResult;
import com.xerox.amazonws.sdb.QueryWithAttributesResult;
import com.xerox.amazonws.sdb.SDBException;
import com.xerox.amazonws.sdb.SimpleDB;
/**
* Sample application demonstrating various operations against SDS.
*
*/
public class sdbShell {
static int itemCount;
/**
* Executes specified query against given domain while demonstrating pagination.
*
* @param domain query domain
* @param queryString query string
* @param maxResults maximum number of values to return per page of results
*/
private static void executeQuery(Domain domain, String queryString, int maxResults) {
String nextToken = "";
do {
try {
QueryResult result = domain.listItems(queryString, nextToken, maxResults);
List<Item> items = result.getItemList();
for (Item i : items) {
System.out.println(i.getIdentifier());
}
nextToken = result.getNextToken();
}
catch (SDBException ex) {
System.out.println("Query '" + queryString + "' Failure: ");
ex.printStackTrace();
}
} while (nextToken != null && nextToken.trim().length() > 0);
System.out.println("Done.");
}
/**
* Executes specified query against given domain.
*
* @param domain query domain
* @param queryString query string
*/
private static void executeQuery(Domain domain, String queryString) {
executeQuery(domain, queryString, 0);
}
/**
* Main execution body.
*
* @param args command line arguments (none required or processed)
*/
public static void main(String [] args) {
if (args.length < 2) {
System.err.println("usage: sdbShell <access key> <secret key> [command file]");
System.exit(1);
}
try {
String awsAccessId = args[0];
String awsSecretKey = args[1];
if (awsAccessId == null || awsAccessId.trim().length() == 0)
{
System.out.println("Access key not set");
return;
}
if (awsSecretKey == null || awsSecretKey.trim().length() == 0)
{
System.out.println("Secret key not set");
return;
}
SimpleDB sds = new SimpleDB(awsAccessId, awsSecretKey, true);
sds.setSignatureVersion(2);
InputStream iStr = System.in;
if (args.length > 2) {
iStr = new FileInputStream(args[2]);
}
BufferedReader rdr = new BufferedReader(new InputStreamReader(iStr));
boolean done = false;
Domain dom = null;
while (!done) {
System.out.print("sdbShell> ");
String line = rdr.readLine();
if (line == null) { // exit, if end of input
System.exit(0);
}
StringTokenizer st = new StringTokenizer(line);
if (st.countTokens() == 0) {
continue;
}
String cmd = st.nextToken().toLowerCase();
if (cmd.equals("q") || cmd.equals("quit")) {
done = true;
}
else if (cmd.equals("h") || cmd.equals("?") || cmd.equals("help")) {
showHelp();
}
else if (cmd.equals("d") || cmd.equals("domains")) {
ListDomainsResult result = sds.listDomains();
List<Domain> domains = result.getDomainList();
for (Domain d : domains) {
System.out.println(d.getName());
}
}
else if (cmd.equals("ad") || cmd.equals("adddomain")) {
if (st.countTokens() != 1) {
System.out.println("Error: need domain name.");
continue;
}
Domain d = sds.createDomain(st.nextToken());
}
else if (cmd.equals("dd") || cmd.equals("deletedomain")) {
if (st.countTokens() != 1) {
System.out.println("Error: need domain name.");
continue;
}
sds.deleteDomain(st.nextToken());
}
else if (cmd.equals("sd") || cmd.equals("setdomain")) {
if (st.countTokens() != 1) {
System.out.println("Error: need domain name.");
continue;
}
dom = sds.getDomain(st.nextToken());
}
else if (cmd.equals("dm") || cmd.equals("domainmetadata")) {
if (checkDomain(dom)) {
DomainMetadataResult metadata = dom.getMetadata();
System.out.println("Domain Metadata for : "+dom.getName());
System.out.println(" ItemCount: "+metadata.getItemCount());
System.out.println(" AttributeNameCount: "+metadata.getAttributeNameCount());
System.out.println(" AttributeValueCount: "+metadata.getAttributeValueCount());
System.out.println(" ItemNamesSizeBytes: "+metadata.getItemNamesSizeBytes());
System.out.println(" AttributeNamesSizeBytes: "+metadata.getAttributeNamesSizeBytes());
System.out.println(" AttributeValuesSizeBytes: "+metadata.getAttributeValuesSizeBytes());
System.out.println(" Timestamp: "+metadata.getTimestamp());
}
}
else if (cmd.equals("aa") || cmd.equals("addattr")) {
if (checkDomain(dom)) {
if (st.countTokens() < 3) {
System.out.println("Error: need item id, attribute name and value.");
continue;
}
Item item = dom.getItem(st.nextToken());
List<ItemAttribute> list = new ArrayList<ItemAttribute>();
String key = st.nextToken();
String value = st.nextToken();
if (line.indexOf('"') > -1) {
value = line.substring(line.indexOf('"')+1, line.lastIndexOf('"'));
}
list.add(new ItemAttribute(key, value, false));
item.putAttributes(list);
}
}
else if (cmd.equals("ra") || cmd.equals("replaceattr")) {
if (checkDomain(dom)) {
if (st.countTokens() < 3) {
System.out.println("Error: need item id, attribute name and value.");
continue;
}
Item item = dom.getItem(st.nextToken());
List<ItemAttribute> list = new ArrayList<ItemAttribute>();
String key = st.nextToken();
String value = st.nextToken();
if (line.indexOf('"') > -1) {
value = line.substring(line.indexOf('"')+1, line.lastIndexOf('"'));
}
list.add(new ItemAttribute(key, value, true));
item.putAttributes(list);
}
}
else if (cmd.equals("da") || cmd.equals("deleteattr")) {
if (checkDomain(dom)) {
if (st.countTokens() != 2) {
System.out.println("Error: need item id and attribute name.");
continue;
}
Item item = dom.getItem(st.nextToken());
List<ItemAttribute> list = new ArrayList<ItemAttribute>();
list.add(new ItemAttribute(st.nextToken(), null, true));
item.deleteAttributes(list);
}
}
else if (cmd.equals("di") || cmd.equals("deleteitem")) {
if (checkDomain(dom)) {
if (st.countTokens() != 1) {
System.out.println("Error: need item id.");
continue;
}
dom.deleteItem(st.nextToken());
}
}
else if (cmd.equals("i") || cmd.equals("item")) {
if (checkDomain(dom)) {
if (st.countTokens() != 1) {
System.out.println("Error: need item id.");
continue;
}
Item item = dom.getItem(st.nextToken());
List<ItemAttribute> attrs = item.getAttributes(new ArrayList<String>());
System.out.println("Item : "+item.getIdentifier());
for (ItemAttribute attr : attrs) {
System.out.println(" "+attr.getName()+" = "+attr.getValue());
}
}
}
else if (cmd.equals("gi") || cmd.equals("getitems")) {
if (checkDomain(dom)) {
itemCount = 0;
dom.setMaxThreads(20);
// long start = System.currentTimeMillis();
//dom.listItemsAttributes("", new ItemListener() {
dom.listItemsWithAttributes("", null, new ItemListener() {
public synchronized void itemAvailable(String id, List<ItemAttribute> attrs) {
System.out.println("Item : "+id);
for (ItemAttribute attr : attrs) {
System.out.println(" "+attr.getName()+" = "+filter(attr.getValue()));
}
itemCount++;
}
});
// long end = System.currentTimeMillis();
// System.out.println("Time : "+((int)(end-start)/1000.0));
// System.out.println("Number of items returned : "+itemCount);
}
}
else if (cmd.equals("ga") || cmd.equals("getattributes")) {
if (checkDomain(dom)) {
itemCount = 0;
// long start = System.currentTimeMillis();
String nextToken = null;
do {
QueryWithAttributesResult qwar = dom.listItemsWithAttributes("", null, nextToken, 250);
Map<String, List<ItemAttribute>> items = qwar.getItems();
for (String id : items.keySet()) {
System.out.println("Item : "+id);
for (ItemAttribute attr : items.get(id)) {
System.out.println(" "+attr.getName()+" = "+filter(attr.getValue()));
}
itemCount++;
}
nextToken = qwar.getNextToken();
} while (nextToken != null && !nextToken.trim().equals(""));
// long end = System.currentTimeMillis();
// System.out.println("Time : "+((int)(end-start)/1000.0));
// System.out.println("Number of items returned : "+itemCount);
}
}
else if (cmd.equals("select")) {
if (checkDomain(dom)) {
itemCount = 0;
// long start = System.currentTimeMillis();
String nextToken = null;
do {
QueryWithAttributesResult qwar = dom.selectItems(line, nextToken);
Map<String, List<ItemAttribute>> items = qwar.getItems();
for (String id : items.keySet()) {
System.out.println("Item : "+id);
for (ItemAttribute attr : items.get(id)) {
System.out.println(" "+attr.getName()+" = "+filter(attr.getValue()));
}
itemCount++;
}
nextToken = qwar.getNextToken();
System.out.println("Box Usage :"+qwar.getBoxUsage());
} while (nextToken != null && !nextToken.trim().equals(""));
// long end = System.currentTimeMillis();
// System.out.println("Time : "+((int)(end-start)/1000.0));
// System.out.println("Number of items returned : "+itemCount);
}
}
else if (cmd.equals("l") || cmd.equals("list")) {
if (checkDomain(dom)) executeQuery(dom, null);
}
else {
if (checkDomain(dom)) executeQuery(dom, line);
}
}
} catch (Exception ex) {
ex.printStackTrace();
if (ex.getCause() != null) {
System.err.println("caused by : ");
ex.getCause().printStackTrace();
}
}
}
private static boolean checkDomain(Domain dom) {
if (dom == null) {
System.out.println("domain must be set!");
return false;
}
return true;
}
private static void showHelp() {
System.out.println("SimpleDB Shell Commands:");
System.out.println("adddomain(ad) <domain name> : add new domain");
System.out.println("deletedomain(dd) <domain name> : delete domain (not functional in SDS yet)");
System.out.println("domains(d) : list domains");
System.out.println("setdomain(sd) <domain name> : set current domain");
System.out.println("domainmetadata(dm) : show current domain metadata");
System.out.println("addattr(aa) <item id> <attr name> <attr value> : add attribute to item in current domain");
System.out.println("replaceattr(ra) <item id> <attr name> <attr value> : replace attribute to item in current domain");
System.out.println("deleteattr(da) <item id> <attr name> : delete attribute of item in current domain");
System.out.println("deleteitem(di) <item id> : delete item in current domain");
System.out.println("list(l) or <filter string> : lists items matching filter in current domain");
System.out.println("item(i) <item id> : shows item attributes");
System.out.println("select <expression> : runs a SQL like query against the domain specified");
System.out.println("getitems(gi) : shows attributes for multiple items");
System.out.println("help(h,?) : show help");
System.out.println("quit(q) : exit the shell");
}
private static String filter(String val) {
if (val.length() == 0) return val; // fast exit
StringBuilder ret = new StringBuilder();
char [] chars = new char[val.length()];
val.getChars(0, val.length(), chars, 0);
for (int i=0; i<chars.length; i++) {
if (!(chars[i]>0 && chars[i]<128)) {
ret.append("\\u");
ret.append(Integer.toHexString(chars[i]));
}
else {
ret.append(chars[i]);
}
}
return ret.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop.impl.igfs;
import java.lang.reflect.Field;
import java.net.URI;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.FileSystemConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper;
import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration;
import org.apache.ignite.igfs.IgfsIpcEndpointType;
import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest;
import org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
import org.junit.Test;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheMode.REPLICATED;
import static org.apache.ignite.events.EventType.EVT_JOB_MAPPED;
import static org.apache.ignite.events.EventType.EVT_TASK_FAILED;
import static org.apache.ignite.events.EventType.EVT_TASK_FINISHED;
/**
* IPC cache test.
*/
public class IgniteHadoopFileSystemIpcCacheSelfTest extends IgfsCommonAbstractTest {
/** Path to test hadoop configuration. */
private static final String HADOOP_FS_CFG = "modules/core/src/test/config/hadoop/core-site.xml";
/** Group size. */
public static final int GRP_SIZE = 128;
/** Started grid counter. */
private static int cnt;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
FileSystemConfiguration igfsCfg = new FileSystemConfiguration();
igfsCfg.setName("igfs");
igfsCfg.setManagementPort(FileSystemConfiguration.DFLT_MGMT_PORT + cnt);
igfsCfg.setDataCacheConfiguration(dataCacheConfiguration());
igfsCfg.setMetaCacheConfiguration(metaCacheConfiguration());
IgfsIpcEndpointConfiguration endpointCfg = new IgfsIpcEndpointConfiguration();
endpointCfg.setType(IgfsIpcEndpointType.SHMEM);
endpointCfg.setPort(IpcSharedMemoryServerEndpoint.DFLT_IPC_PORT + cnt);
igfsCfg.setIpcEndpointConfiguration(endpointCfg);
igfsCfg.setBlockSize(512 * 1024); // Together with group blocks mapper will yield 64M per node groups.
cfg.setFileSystemConfiguration(igfsCfg);
cfg.setCacheConfiguration(dataCacheConfiguration());
cfg.setIncludeEventTypes(EVT_TASK_FAILED, EVT_TASK_FINISHED, EVT_JOB_MAPPED);
TcpCommunicationSpi commSpi = new TcpCommunicationSpi();
commSpi.setSharedMemoryPort(-1);
cfg.setCommunicationSpi(commSpi);
cnt++;
return cfg;
}
/**
* Gets cache configuration.
*
* @return Cache configuration.
*/
private CacheConfiguration dataCacheConfiguration() {
CacheConfiguration ccfg = defaultCacheConfiguration();
ccfg.setName("partitioned");
ccfg.setCacheMode(PARTITIONED);
ccfg.setNearConfiguration(null);
ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
ccfg.setAffinityMapper(new IgfsGroupDataBlocksKeyMapper(GRP_SIZE));
ccfg.setBackups(0);
ccfg.setAtomicityMode(TRANSACTIONAL);
return ccfg;
}
/**
* Gets cache configuration.
*
* @return Cache configuration.
*/
private CacheConfiguration metaCacheConfiguration() {
CacheConfiguration ccfg = defaultCacheConfiguration();
ccfg.setName("replicated");
ccfg.setCacheMode(REPLICATED);
ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
ccfg.setAtomicityMode(TRANSACTIONAL);
return ccfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
startGrids(4);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
G.stopAll(true);
}
/**
* Test how IPC cache map works.
*
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
@Test
public void testIpcCache() throws Exception {
Field cacheField = HadoopIgfsIpcIo.class.getDeclaredField("ipcCache");
cacheField.setAccessible(true);
Field activeCntField = HadoopIgfsIpcIo.class.getDeclaredField("activeCnt");
activeCntField.setAccessible(true);
Map<String, HadoopIgfsIpcIo> cache = (Map<String, HadoopIgfsIpcIo>)cacheField.get(null);
cache.clear(); // avoid influence of previous tests in the same process.
String name = "igfs:" + getTestIgniteInstanceName(0) + "@";
Configuration cfg = new Configuration();
cfg.addResource(U.resolveIgniteUrl(HADOOP_FS_CFG));
cfg.setBoolean("fs.igfs.impl.disable.cache", true);
cfg.setBoolean(String.format(HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_EMBED, name), true);
// Ensure that existing IO is reused.
FileSystem fs1 = FileSystem.get(new URI("igfs://" + name + "/"), cfg);
assertEquals(1, cache.size());
HadoopIgfsIpcIo io = null;
System.out.println("CACHE: " + cache);
for (String key : cache.keySet()) {
if (key.contains("10500")) {
io = cache.get(key);
break;
}
}
assert io != null;
assertEquals(1, ((AtomicInteger)activeCntField.get(io)).get());
// Ensure that when IO is used by multiple file systems and one of them is closed, IO is not stopped.
FileSystem fs2 = FileSystem.get(new URI("igfs://" + name + "/abc"), cfg);
assertEquals(1, cache.size());
assertEquals(2, ((AtomicInteger)activeCntField.get(io)).get());
fs2.close();
assertEquals(1, cache.size());
assertEquals(1, ((AtomicInteger)activeCntField.get(io)).get());
Field stopField = HadoopIgfsIpcIo.class.getDeclaredField("stopping");
stopField.setAccessible(true);
assert !(Boolean)stopField.get(io);
// Ensure that IO is stopped when nobody else is need it.
fs1.close();
assert cache.isEmpty();
assert (Boolean)stopField.get(io);
}
}
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class JobEntrySerializationHelper implements Serializable {
private static final long serialVersionUID = -3924431164206698711L;
private static final String INDENT_STRING = " ";
/**
* This method will perform the work that used to be done by hand in each kettle input meta for: readData(Node node).
* We handle all primitive types, complex user types, arrays, lists and any number of nested object levels, via
* recursion of this method.
*
* @param object
* The object to be persisted
* @param node
* The node to 'attach' our XML to
*/
public static void read( Object object, Node node ) {
// get this classes declared fields, public, private, protected, package, everything, but not super
Field[] declaredFields = getAllDeclaredFields( object.getClass() );
for ( Field field : declaredFields ) {
// ignore fields which are final, static or transient
if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() )
|| Modifier.isTransient( field.getModifiers() ) ) {
continue;
}
// if the field is not accessible (private), we'll open it up so we can operate on it
boolean accessible = field.isAccessible();
if ( !accessible ) {
field.setAccessible( true );
}
try {
// check if we're going to try to read an array
if ( field.getType().isArray() ) {
try {
// get the node (if available) for the field
Node fieldNode = XMLHandler.getSubNode( node, field.getName() );
if ( fieldNode == null ) {
// doesn't exist (this is possible if fields were empty/null when persisted)
continue;
}
// get the Java classname for the array elements
String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" );
Class<?> clazz = null;
// primitive types require special handling
if ( fieldClassName.equals( "boolean" ) ) {
clazz = boolean.class;
} else if ( fieldClassName.equals( "int" ) ) {
clazz = int.class;
} else if ( fieldClassName.equals( "float" ) ) {
clazz = float.class;
} else if ( fieldClassName.equals( "double" ) ) {
clazz = double.class;
} else if ( fieldClassName.equals( "long" ) ) {
clazz = long.class;
} else {
// normal, non primitive array class
clazz = Class.forName( fieldClassName );
}
// get the child nodes for the field
NodeList childrenNodes = fieldNode.getChildNodes();
// create a new, appropriately sized array
int arrayLength = 0;
for ( int i = 0; i < childrenNodes.getLength(); i++ ) {
Node child = childrenNodes.item( i );
// ignore TEXT_NODE, they'll cause us to have a larger count than reality, even if they are empty
if ( child.getNodeType() != Node.TEXT_NODE ) {
arrayLength++;
}
}
// create a new instance of our array
Object array = Array.newInstance( clazz, arrayLength );
// set the new array on the field (on object, passed in)
field.set( object, array );
int arrayIndex = 0;
for ( int i = 0; i < childrenNodes.getLength(); i++ ) {
Node child = childrenNodes.item( i );
if ( child.getNodeType() == Node.TEXT_NODE ) {
continue;
}
// roll through all of our array elements setting them as encountered
if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz ) ) {
Constructor<?> constructor = clazz.getConstructor( String.class );
Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) );
Array.set( array, arrayIndex++, instance );
} else if ( Boolean.class.isAssignableFrom( clazz ) || boolean.class.isAssignableFrom( clazz ) ) {
Object value = Boolean.valueOf( XMLHandler.getTagAttribute( child, "value" ) );
Array.set( array, arrayIndex++, value );
} else if ( Integer.class.isAssignableFrom( clazz ) || int.class.isAssignableFrom( clazz ) ) {
Object value = Integer.valueOf( XMLHandler.getTagAttribute( child, "value" ) );
Array.set( array, arrayIndex++, value );
} else if ( Float.class.isAssignableFrom( clazz ) || float.class.isAssignableFrom( clazz ) ) {
Object value = Float.valueOf( XMLHandler.getTagAttribute( child, "value" ) );
Array.set( array, arrayIndex++, value );
} else if ( Double.class.isAssignableFrom( clazz ) || double.class.isAssignableFrom( clazz ) ) {
Object value = Double.valueOf( XMLHandler.getTagAttribute( child, "value" ) );
Array.set( array, arrayIndex++, value );
} else if ( Long.class.isAssignableFrom( clazz ) || long.class.isAssignableFrom( clazz ) ) {
Object value = Long.valueOf( XMLHandler.getTagAttribute( child, "value" ) );
Array.set( array, arrayIndex++, value );
} else {
// create an instance of 'fieldClassName'
Object instance = clazz.newInstance();
// add the instance to the array
Array.set( array, arrayIndex++, instance );
// read child, the same way as the parent
read( instance, child );
}
}
} catch ( Throwable t ) {
t.printStackTrace();
// TODO: log this
}
} else if ( Collection.class.isAssignableFrom( field.getType() ) ) {
// handle collections
try {
// get the node (if available) for the field
Node fieldNode = XMLHandler.getSubNode( node, field.getName() );
if ( fieldNode == null ) {
// doesn't exist (this is possible if fields were empty/null when persisted)
continue;
}
// get the Java classname for the array elements
String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" );
Class<?> clazz = Class.forName( fieldClassName );
// create a new, appropriately sized array, we already know it's a collection
@SuppressWarnings( "unchecked" )
Collection<Object> collection = (Collection<Object>) field.getType().newInstance();
field.set( object, collection );
// iterate over all of the array elements and add them one by one as encountered
NodeList childrenNodes = fieldNode.getChildNodes();
for ( int i = 0; i < childrenNodes.getLength(); i++ ) {
Node child = childrenNodes.item( i );
if ( child.getNodeType() == Node.TEXT_NODE ) {
continue;
}
// create an instance of 'fieldClassName'
if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz )
|| Boolean.class.isAssignableFrom( clazz ) ) {
Constructor<?> constructor = clazz.getConstructor( String.class );
Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) );
collection.add( instance );
} else {
// read child, the same way as the parent
Object instance = clazz.newInstance();
// add the instance to the array
collection.add( instance );
read( instance, child );
}
}
} catch ( Throwable t ) {
t.printStackTrace();
// TODO: log this
}
} else {
// we're handling a regular field (not an array or list)
try {
String value = XMLHandler.getTagValue( node, field.getName() );
if ( value == null ) {
continue;
}
if ( field.isAnnotationPresent( Password.class ) ) {
value = Encr.decryptPasswordOptionallyEncrypted( value );
}
// System.out.println("Setting " + field.getName() + "(" + field.getType().getSimpleName() + ") = " + value
// + " on: " + object.getClass().getName());
if ( field.getType().isPrimitive() && "".equals( value ) ) {
// skip setting of primitives if we see null
} else if ( "".equals( value ) ) {
field.set( object, value );
} else if ( field.getType().isPrimitive() ) {
// special primitive handling
if ( double.class.isAssignableFrom( field.getType() ) ) {
field.set( object, Double.parseDouble( value ) );
} else if ( float.class.isAssignableFrom( field.getType() ) ) {
field.set( object, Float.parseFloat( value ) );
} else if ( long.class.isAssignableFrom( field.getType() ) ) {
field.set( object, Long.parseLong( value ) );
} else if ( int.class.isAssignableFrom( field.getType() ) ) {
field.set( object, Integer.parseInt( value ) );
} else if ( byte.class.isAssignableFrom( field.getType() ) ) {
field.set( object, value.getBytes() );
} else if ( boolean.class.isAssignableFrom( field.getType() ) ) {
field.set( object, "true".equalsIgnoreCase( value ) );
}
} else if ( String.class.isAssignableFrom( field.getType() )
|| Number.class.isAssignableFrom( field.getType() )
|| Boolean.class.isAssignableFrom( field.getType() ) ) {
Constructor<?> constructor = field.getType().getConstructor( String.class );
Object instance = constructor.newInstance( value );
field.set( object, instance );
} else {
// we don't know what we're handling, but we'll give it a shot
Node fieldNode = XMLHandler.getSubNode( node, field.getName() );
if ( fieldNode == null ) {
// doesn't exist (this is possible if fields were empty/null when persisted)
continue;
}
// get the Java classname for the array elements
String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" );
Class<?> clazz = Class.forName( fieldClassName );
Object instance = clazz.newInstance();
field.set( object, instance );
read( instance, fieldNode );
}
} catch ( Throwable t ) {
// TODO: log this
t.printStackTrace();
}
}
} finally {
if ( !accessible ) {
field.setAccessible( false );
}
}
}
}
/**
* This method will perform the work that used to be done by hand in each kettle input meta for: getXML(). We handle
* all primitive types, complex user types, arrays, lists and any number of nested object levels, via recursion of
* this method.
*
* @param object
* @param buffer
*/
public static void write( Object object, int indentLevel, StringBuffer buffer ) {
// don't even attempt to persist
if ( object == null ) {
return;
}
// get this classes declared fields, public, private, protected, package, everything, but not super
Field[] declaredFields = getAllDeclaredFields( object.getClass() );
for ( Field field : declaredFields ) {
// ignore fields which are final, static or transient
if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() )
|| Modifier.isTransient( field.getModifiers() ) ) {
continue;
}
// if the field is not accessible (private), we'll open it up so we can operate on it
boolean accessible = field.isAccessible();
if ( !accessible ) {
field.setAccessible( true );
}
try {
Object fieldValue = field.get( object );
// no value? null? skip it!
if ( fieldValue == null || "".equals( fieldValue ) ) {
continue;
}
if ( field.isAnnotationPresent( Password.class ) && String.class.isAssignableFrom( field.getType() ) ) {
fieldValue = Encr.encryptPasswordIfNotUsingVariables( String.class.cast( fieldValue ) );
}
if ( field.getType().isPrimitive() || String.class.isAssignableFrom( field.getType() )
|| Number.class.isAssignableFrom( field.getType() ) || Boolean.class.isAssignableFrom( field.getType() ) ) {
indent( buffer, indentLevel );
buffer.append( XMLHandler.addTagValue( field.getName(), fieldValue.toString() ) );
} else if ( field.getType().isArray() ) {
// write array values
int length = Array.getLength( fieldValue );
// open node (add class name attribute)
indent( buffer, indentLevel );
buffer.append(
"<" + field.getName() + " class=\"" + fieldValue.getClass().getComponentType().getName() + "\">" )
.append( Const.CR );
for ( int i = 0; i < length; i++ ) {
Object childObject = Array.get( fieldValue, i );
// handle all strings/numbers
if ( String.class.isAssignableFrom( childObject.getClass() )
|| Number.class.isAssignableFrom( childObject.getClass() ) ) {
indent( buffer, indentLevel + 1 );
buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() );
buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR );
} else if ( Boolean.class.isAssignableFrom( childObject.getClass() )
|| boolean.class.isAssignableFrom( childObject.getClass() ) ) {
// handle booleans (special case)
indent( buffer, indentLevel + 1 );
buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() );
buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR );
} else {
// array element is a user defined/complex type, recurse into it
indent( buffer, indentLevel + 1 );
buffer.append( "<" + fieldValue.getClass().getComponentType().getSimpleName() + ">" ).append( Const.CR );
write( childObject, indentLevel + 1, buffer );
indent( buffer, indentLevel + 1 );
buffer.append( "</" + fieldValue.getClass().getComponentType().getSimpleName() + ">" ).append( Const.CR );
}
}
// close node
buffer.append( " </" + field.getName() + ">" ).append( Const.CR );
} else if ( Collection.class.isAssignableFrom( field.getType() ) ) {
// write collection values
Collection<?> collection = (Collection<?>) fieldValue;
if ( collection.size() == 0 ) {
continue;
}
Class<?> listClass = collection.iterator().next().getClass();
// open node (add class name attribute)
indent( buffer, indentLevel );
buffer.append( "<" + field.getName() + " class=\"" + listClass.getName() + "\">" ).append( Const.CR );
for ( Object childObject : collection ) {
// handle all strings/numbers
if ( String.class.isAssignableFrom( childObject.getClass() )
|| Number.class.isAssignableFrom( childObject.getClass() ) ) {
indent( buffer, indentLevel + 1 );
buffer.append( "<" ).append( listClass.getSimpleName() );
buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR );
} else if ( Boolean.class.isAssignableFrom( childObject.getClass() )
|| boolean.class.isAssignableFrom( childObject.getClass() ) ) {
// handle booleans (special case)
indent( buffer, indentLevel + 1 );
buffer.append( "<" ).append( listClass.getSimpleName() );
buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR );
} else {
// array element is a user defined/complex type, recurse into it
indent( buffer, indentLevel + 1 );
buffer.append( "<" + listClass.getSimpleName() + ">" ).append( Const.CR );
write( childObject, indentLevel + 1, buffer );
indent( buffer, indentLevel + 1 );
buffer.append( "</" + listClass.getSimpleName() + ">" ).append( Const.CR );
}
}
// close node
indent( buffer, indentLevel );
buffer.append( "</" + field.getName() + ">" ).append( Const.CR );
} else {
// if we don't now what it is, let's treat it like a first class citizen and try to write it out
// open node (add class name attribute)
indent( buffer, indentLevel );
buffer.append( "<" + field.getName() + " class=\"" + fieldValue.getClass().getName() + "\">" ).append(
Const.CR );
write( fieldValue, indentLevel + 1, buffer );
// close node
indent( buffer, indentLevel );
buffer.append( "</" + field.getName() + ">" ).append( Const.CR );
}
} catch ( Throwable t ) {
t.printStackTrace();
// TODO: log this
} finally {
if ( !accessible ) {
field.setAccessible( false );
}
}
}
}
/**
* Get all declared fields of the provided class including any inherited class fields.
*
* @param aClass
* Class to look up fields for
* @return All declared fields for the class provided
*/
private static Field[] getAllDeclaredFields( Class<?> aClass ) {
List<Field> fields = new ArrayList<Field>();
while ( aClass != null ) {
fields.addAll( Arrays.asList( aClass.getDeclaredFields() ) );
aClass = aClass.getSuperclass();
}
return fields.toArray( new Field[0] );
}
/**
* Handle saving of the input (object) to the kettle repository using the most simple method available, by calling
* write and then saving the xml as an attribute.
*
* @param object
* @param rep
* @param id_job
* @param id_jobentry
* @throws KettleException
*/
public static void saveRep( Object object, Repository rep, ObjectId id_job, ObjectId id_jobentry )
throws KettleException {
StringBuffer sb = new StringBuffer( 1024 );
sb.append( "<job-xml>" );
write( object, 0, sb );
sb.append( "</job-xml>" );
rep.saveJobEntryAttribute( id_job, id_jobentry, "job-xml", sb.toString() );
}
/**
* Handle reading of the input (object) from the kettle repository by getting the xml from the repository attribute
* string and then re-hydrate the object with our already existing read method.
*
* @param object
* @param rep
* @param id_job
* @param databases
* @param slaveServers
* @throws KettleException
*/
public static void loadRep( Object object, Repository rep, ObjectId id_job, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
String xml = rep.getJobEntryAttributeString( id_job, "job-xml" );
ByteArrayInputStream bais = new ByteArrayInputStream( xml.getBytes() );
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse( bais );
read( object, doc.getDocumentElement() );
} catch ( ParserConfigurationException ex ) {
throw new KettleException( ex.getMessage(), ex );
} catch ( SAXException ex ) {
throw new KettleException( ex.getMessage(), ex );
} catch ( IOException ex ) {
throw new KettleException( ex.getMessage(), ex );
}
}
private static void indent( StringBuffer sb, int indentLevel ) {
for ( int i = 0; i < indentLevel; i++ ) {
sb.append( INDENT_STRING );
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.ConfigurationException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerDiagnosticsUpdateEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperation;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationExecutor;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandler;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerModule;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.DefaultLinuxContainerRuntime;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.DelegatingLinuxContainerRuntime;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.DockerLinuxContainerRuntime;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntime;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerExecutionException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerLivenessContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerPrepareContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerReacquisitionContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerSignalContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerStartContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.DeletionAsUserContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.LocalizerStartContext;
import org.apache.hadoop.yarn.server.nodemanager.util.CgroupsLCEResourcesHandler;
import org.apache.hadoop.yarn.server.nodemanager.util.DefaultLCEResourcesHandler;
import org.apache.hadoop.yarn.server.nodemanager.util.LCEResourcesHandler;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*;
/**
* <p>This class provides {@link Container} execution using a native
* {@code container-executor} binary. By using a helper written it native code,
* this class is able to do several things that the
* {@link DefaultContainerExecutor} cannot, such as execution of applications
* as the applications' owners, provide localization that takes advantage of
* mapping the application owner to a UID on the execution host, resource
* management through Linux CGROUPS, and Docker support.</p>
*
* <p>If {@code hadoop.security.authetication} is set to {@code simple},
* then the
* {@code yarn.nodemanager.linux-container-executor.nonsecure-mode.limit-users}
* property will determine whether the {@code LinuxContainerExecutor} runs
* processes as the application owner or as the default user, as set in the
* {@code yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user}
* property.</p>
*
* <p>The {@code LinuxContainerExecutor} will manage applications through an
* appropriate {@link LinuxContainerRuntime} instance. This class uses a
* {@link DelegatingLinuxContainerRuntime} instance, which will delegate calls
* to either a {@link DefaultLinuxContainerRuntime} instance or a
* {@link DockerLinuxContainerRuntime} instance, depending on the job's
* configuration.</p>
*
* @see LinuxContainerRuntime
* @see DelegatingLinuxContainerRuntime
* @see DefaultLinuxContainerRuntime
* @see DockerLinuxContainerRuntime
* @see DockerLinuxContainerRuntime#isDockerContainerRequested
*/
public class LinuxContainerExecutor extends ContainerExecutor {
private static final Logger LOG =
LoggerFactory.getLogger(LinuxContainerExecutor.class);
private String nonsecureLocalUser;
private Pattern nonsecureLocalUserPattern;
private LCEResourcesHandler resourcesHandler;
private boolean containerSchedPriorityIsSet = false;
private int containerSchedPriorityAdjustment = 0;
private boolean containerLimitUsers;
private ResourceHandler resourceHandlerChain;
private LinuxContainerRuntime linuxContainerRuntime;
/**
* The container exit code.
*/
public enum ExitCode {
SUCCESS(0),
INVALID_ARGUMENT_NUMBER(1),
INVALID_COMMAND_PROVIDED(3),
INVALID_NM_ROOT_DIRS(5),
SETUID_OPER_FAILED(6),
UNABLE_TO_EXECUTE_CONTAINER_SCRIPT(7),
UNABLE_TO_SIGNAL_CONTAINER(8),
INVALID_CONTAINER_PID(9),
OUT_OF_MEMORY(18),
INITIALIZE_USER_FAILED(20),
PATH_TO_DELETE_IS_NULL(21),
INVALID_CONTAINER_EXEC_PERMISSIONS(22),
INVALID_CONFIG_FILE(24),
SETSID_OPER_FAILED(25),
WRITE_PIDFILE_FAILED(26),
WRITE_CGROUP_FAILED(27),
TRAFFIC_CONTROL_EXECUTION_FAILED(28),
DOCKER_RUN_FAILED(29),
ERROR_OPENING_DOCKER_FILE(30),
ERROR_READING_DOCKER_FILE(31),
FEATURE_DISABLED(32),
COULD_NOT_CREATE_SCRIPT_COPY(33),
COULD_NOT_CREATE_CREDENTIALS_FILE(34),
COULD_NOT_CREATE_WORK_DIRECTORIES(35),
COULD_NOT_CREATE_APP_LOG_DIRECTORIES(36),
COULD_NOT_CREATE_TMP_DIRECTORIES(37),
ERROR_CREATE_CONTAINER_DIRECTORIES_ARGUMENTS(38);
private final int code;
ExitCode(int exitCode) {
this.code = exitCode;
}
/**
* Get the exit code as an int.
* @return the exit code as an int
*/
public int getExitCode() {
return code;
}
@Override
public String toString() {
return String.valueOf(code);
}
}
/**
* Default constructor to allow for creation through reflection.
*/
public LinuxContainerExecutor() {
}
/**
* Create a LinuxContainerExecutor with a provided
* {@link LinuxContainerRuntime}. Used primarily for testing.
*
* @param linuxContainerRuntime the runtime to use
*/
public LinuxContainerExecutor(LinuxContainerRuntime linuxContainerRuntime) {
this.linuxContainerRuntime = linuxContainerRuntime;
}
@Override
public void setConf(Configuration conf) {
super.setConf(conf);
resourcesHandler = getResourcesHandler(conf);
containerSchedPriorityIsSet = false;
if (conf.get(YarnConfiguration.NM_CONTAINER_EXECUTOR_SCHED_PRIORITY)
!= null) {
containerSchedPriorityIsSet = true;
containerSchedPriorityAdjustment = conf
.getInt(YarnConfiguration.NM_CONTAINER_EXECUTOR_SCHED_PRIORITY,
YarnConfiguration.DEFAULT_NM_CONTAINER_EXECUTOR_SCHED_PRIORITY);
}
nonsecureLocalUser = conf.get(
YarnConfiguration.NM_NONSECURE_MODE_LOCAL_USER_KEY,
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LOCAL_USER);
nonsecureLocalUserPattern = Pattern.compile(
conf.get(YarnConfiguration.NM_NONSECURE_MODE_USER_PATTERN_KEY,
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_USER_PATTERN));
containerLimitUsers = conf.getBoolean(
YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS,
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LIMIT_USERS);
if (!containerLimitUsers) {
LOG.warn(YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS +
": impersonation without authentication enabled");
}
}
private LCEResourcesHandler getResourcesHandler(Configuration conf) {
LCEResourcesHandler handler = ReflectionUtils.newInstance(
conf.getClass(YarnConfiguration.NM_LINUX_CONTAINER_RESOURCES_HANDLER,
DefaultLCEResourcesHandler.class, LCEResourcesHandler.class), conf);
// Stop using CgroupsLCEResourcesHandler
// use the resource handler chain instead
// ResourceHandlerModule will create the cgroup cpu module if
// CgroupsLCEResourcesHandler is set
if (handler instanceof CgroupsLCEResourcesHandler) {
handler =
ReflectionUtils.newInstance(DefaultLCEResourcesHandler.class, conf);
}
handler.setConf(conf);
return handler;
}
void verifyUsernamePattern(String user) {
if (!UserGroupInformation.isSecurityEnabled() &&
!nonsecureLocalUserPattern.matcher(user).matches()) {
throw new IllegalArgumentException("Invalid user name '" + user + "'," +
" it must match '" + nonsecureLocalUserPattern.pattern() + "'");
}
}
String getRunAsUser(String user) {
if (UserGroupInformation.isSecurityEnabled() ||
!containerLimitUsers) {
return user;
} else {
return nonsecureLocalUser;
}
}
/**
* Get the path to the {@code container-executor} binary. The path will
* be absolute.
*
* @param conf the {@link Configuration}
* @return the path to the {@code container-executor} binary
*/
protected String getContainerExecutorExecutablePath(Configuration conf) {
String yarnHomeEnvVar =
System.getenv(ApplicationConstants.Environment.HADOOP_YARN_HOME.key());
File hadoopBin = new File(yarnHomeEnvVar, "bin");
String defaultPath =
new File(hadoopBin, "container-executor").getAbsolutePath();
return null == conf
? defaultPath
: conf.get(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH,
defaultPath);
}
/**
* Add a niceness level to the process that will be executed. Adds
* {@code -n <nice>} to the given command. The niceness level will be
* taken from the
* {@code yarn.nodemanager.container-executer.os.sched.prioity} property.
*
* @param command the command to which to add the niceness setting.
*/
protected void addSchedPriorityCommand(List<String> command) {
if (containerSchedPriorityIsSet) {
command.addAll(Arrays.asList("nice", "-n",
Integer.toString(containerSchedPriorityAdjustment)));
}
}
protected PrivilegedOperationExecutor getPrivilegedOperationExecutor() {
return PrivilegedOperationExecutor.getInstance(getConf());
}
@Override
public void init() throws IOException {
Configuration conf = super.getConf();
// Send command to executor which will just start up,
// verify configuration/permissions and exit
try {
PrivilegedOperation checkSetupOp = new PrivilegedOperation(
PrivilegedOperation.OperationType.CHECK_SETUP);
PrivilegedOperationExecutor privilegedOperationExecutor =
getPrivilegedOperationExecutor();
privilegedOperationExecutor.executePrivilegedOperation(checkSetupOp,
false);
} catch (PrivilegedOperationException e) {
int exitCode = e.getExitCode();
LOG.warn("Exit code from container executor initialization is : "
+ exitCode, e);
throw new IOException("Linux container executor not configured properly"
+ " (error=" + exitCode + ")", e);
}
try {
resourceHandlerChain = ResourceHandlerModule
.getConfiguredResourceHandlerChain(conf);
if (LOG.isDebugEnabled()) {
LOG.debug("Resource handler chain enabled = " + (resourceHandlerChain
!= null));
}
if (resourceHandlerChain != null) {
LOG.debug("Bootstrapping resource handler chain");
resourceHandlerChain.bootstrap(conf);
}
} catch (ResourceHandlerException e) {
LOG.error("Failed to bootstrap configured resource subsystems! ", e);
throw new IOException(
"Failed to bootstrap configured resource subsystems!");
}
try {
if (linuxContainerRuntime == null) {
LinuxContainerRuntime runtime = new DelegatingLinuxContainerRuntime();
runtime.initialize(conf);
this.linuxContainerRuntime = runtime;
}
} catch (ContainerExecutionException e) {
LOG.error("Failed to initialize linux container runtime(s)!", e);
throw new IOException("Failed to initialize linux container runtime(s)!");
}
resourcesHandler.init(this);
}
@Override
public void startLocalizer(LocalizerStartContext ctx)
throws IOException, InterruptedException {
Path nmPrivateContainerTokensPath = ctx.getNmPrivateContainerTokens();
InetSocketAddress nmAddr = ctx.getNmAddr();
String user = ctx.getUser();
String appId = ctx.getAppId();
String locId = ctx.getLocId();
LocalDirsHandlerService dirsHandler = ctx.getDirsHandler();
List<String> localDirs = dirsHandler.getLocalDirs();
List<String> logDirs = dirsHandler.getLogDirs();
verifyUsernamePattern(user);
String runAsUser = getRunAsUser(user);
PrivilegedOperation initializeContainerOp = new PrivilegedOperation(
PrivilegedOperation.OperationType.INITIALIZE_CONTAINER);
List<String> prefixCommands = new ArrayList<>();
addSchedPriorityCommand(prefixCommands);
initializeContainerOp.appendArgs(
runAsUser,
user,
Integer.toString(
PrivilegedOperation.RunAsUserCommand.INITIALIZE_CONTAINER
.getValue()),
appId,
nmPrivateContainerTokensPath.toUri().getPath().toString(),
StringUtils.join(PrivilegedOperation.LINUX_FILE_PATH_SEPARATOR,
localDirs),
StringUtils.join(PrivilegedOperation.LINUX_FILE_PATH_SEPARATOR,
logDirs));
File jvm = // use same jvm as parent
new File(new File(System.getProperty("java.home"), "bin"), "java");
initializeContainerOp.appendArgs(jvm.toString());
initializeContainerOp.appendArgs("-classpath");
initializeContainerOp.appendArgs(System.getProperty("java.class.path"));
String javaLibPath = System.getProperty("java.library.path");
if (javaLibPath != null) {
initializeContainerOp.appendArgs("-Djava.library.path=" + javaLibPath);
}
initializeContainerOp.appendArgs(ContainerLocalizer.getJavaOpts(getConf()));
List<String> localizerArgs = new ArrayList<>();
buildMainArgs(localizerArgs, user, appId, locId, nmAddr, localDirs);
initializeContainerOp.appendArgs(localizerArgs);
try {
Configuration conf = super.getConf();
PrivilegedOperationExecutor privilegedOperationExecutor =
getPrivilegedOperationExecutor();
privilegedOperationExecutor.executePrivilegedOperation(prefixCommands,
initializeContainerOp, null, null, false, true);
} catch (PrivilegedOperationException e) {
int exitCode = e.getExitCode();
LOG.warn("Exit code from container " + locId + " startLocalizer is : "
+ exitCode, e);
throw new IOException("Application " + appId + " initialization failed" +
" (exitCode=" + exitCode + ") with output: " + e.getOutput(), e);
}
}
/**
* Set up the {@link ContainerLocalizer}.
*
* @param command the current ShellCommandExecutor command line
* @param user localization user
* @param appId localized app id
* @param locId localizer id
* @param nmAddr nodemanager address
* @param localDirs list of local dirs
* @see ContainerLocalizer#buildMainArgs
*/
@VisibleForTesting
public void buildMainArgs(List<String> command, String user, String appId,
String locId, InetSocketAddress nmAddr, List<String> localDirs) {
ContainerLocalizer.buildMainArgs(command, user, appId, locId, nmAddr,
localDirs);
}
@Override
public void prepareContainer(ContainerPrepareContext ctx) throws IOException {
ContainerRuntimeContext.Builder builder =
new ContainerRuntimeContext.Builder(ctx.getContainer());
builder.setExecutionAttribute(LOCALIZED_RESOURCES,
ctx.getLocalizedResources())
.setExecutionAttribute(USER, ctx.getUser())
.setExecutionAttribute(CONTAINER_LOCAL_DIRS,
ctx.getContainerLocalDirs())
.setExecutionAttribute(CONTAINER_RUN_CMDS, ctx.getCommands())
.setExecutionAttribute(CONTAINER_ID_STR,
ctx.getContainer().getContainerId().toString());
try {
linuxContainerRuntime.prepareContainer(builder.build());
} catch (ContainerExecutionException e) {
throw new IOException("Unable to prepare container: ", e);
}
}
@Override
public int launchContainer(ContainerStartContext ctx)
throws IOException, ConfigurationException {
Container container = ctx.getContainer();
String user = ctx.getUser();
verifyUsernamePattern(user);
ContainerId containerId = container.getContainerId();
resourcesHandler.preExecute(containerId,
container.getResource());
String resourcesOptions = resourcesHandler.getResourcesOption(containerId);
String tcCommandFile = null;
try {
if (resourceHandlerChain != null) {
List<PrivilegedOperation> ops = resourceHandlerChain
.preStart(container);
if (ops != null) {
List<PrivilegedOperation> resourceOps = new ArrayList<>();
resourceOps.add(new PrivilegedOperation(
PrivilegedOperation.OperationType.ADD_PID_TO_CGROUP,
resourcesOptions));
for (PrivilegedOperation op : ops) {
switch (op.getOperationType()) {
case ADD_PID_TO_CGROUP:
resourceOps.add(op);
break;
case TC_MODIFY_STATE:
tcCommandFile = op.getArguments().get(0);
break;
default:
LOG.warn("PrivilegedOperation type unsupported in launch: "
+ op.getOperationType());
}
}
if (resourceOps.size() > 1) {
//squash resource operations
try {
PrivilegedOperation operation = PrivilegedOperationExecutor
.squashCGroupOperations(resourceOps);
resourcesOptions = operation.getArguments().get(0);
} catch (PrivilegedOperationException e) {
LOG.error("Failed to squash cgroup operations!", e);
throw new ResourceHandlerException(
"Failed to squash cgroup operations!");
}
}
}
}
} catch (ResourceHandlerException e) {
LOG.error("ResourceHandlerChain.preStart() failed!", e);
throw new IOException("ResourceHandlerChain.preStart() failed!", e);
}
try {
Path pidFilePath = getPidFilePath(containerId);
if (pidFilePath != null) {
ContainerRuntimeContext runtimeContext = buildContainerRuntimeContext(
ctx, pidFilePath, resourcesOptions, tcCommandFile);
linuxContainerRuntime.launchContainer(runtimeContext);
} else {
LOG.info(
"Container was marked as inactive. Returning terminated error");
return ContainerExecutor.ExitCode.TERMINATED.getExitCode();
}
} catch (ContainerExecutionException e) {
int exitCode = e.getExitCode();
LOG.warn("Exit code from container " + containerId + " is : " + exitCode);
// 143 (SIGTERM) and 137 (SIGKILL) exit codes means the container was
// terminated/killed forcefully. In all other cases, log the
// output
if (exitCode != ContainerExecutor.ExitCode.FORCE_KILLED.getExitCode()
&& exitCode != ContainerExecutor.ExitCode.TERMINATED.getExitCode()) {
LOG.warn("Exception from container-launch with container ID: "
+ containerId + " and exit code: " + exitCode, e);
StringBuilder builder = new StringBuilder();
builder.append("Exception from container-launch.\n");
builder.append("Container id: " + containerId + "\n");
builder.append("Exit code: " + exitCode + "\n");
if (!Optional.fromNullable(e.getErrorOutput()).or("").isEmpty()) {
builder.append("Exception message: " + e.getErrorOutput() + "\n");
}
builder.append("Stack trace: "
+ StringUtils.stringifyException(e) + "\n");
String output = e.getOutput();
if (output != null && !e.getOutput().isEmpty()) {
builder.append("Shell output: " + output + "\n");
}
String diagnostics = builder.toString();
logOutput(diagnostics);
container.handle(new ContainerDiagnosticsUpdateEvent(containerId,
diagnostics));
if (exitCode ==
ExitCode.INVALID_CONTAINER_EXEC_PERMISSIONS.getExitCode() ||
exitCode ==
ExitCode.INVALID_CONFIG_FILE.getExitCode() ||
exitCode ==
ExitCode.COULD_NOT_CREATE_SCRIPT_COPY.getExitCode() ||
exitCode ==
ExitCode.COULD_NOT_CREATE_CREDENTIALS_FILE.getExitCode() ||
exitCode ==
ExitCode.COULD_NOT_CREATE_WORK_DIRECTORIES.getExitCode() ||
exitCode ==
ExitCode.COULD_NOT_CREATE_APP_LOG_DIRECTORIES.getExitCode() ||
exitCode ==
ExitCode.COULD_NOT_CREATE_TMP_DIRECTORIES.getExitCode()) {
throw new ConfigurationException(
"Linux Container Executor reached unrecoverable exception", e);
}
} else {
container.handle(new ContainerDiagnosticsUpdateEvent(containerId,
"Container killed on request. Exit code is " + exitCode));
}
return exitCode;
} finally {
resourcesHandler.postExecute(containerId);
try {
if (resourceHandlerChain != null) {
resourceHandlerChain.postComplete(containerId);
}
} catch (ResourceHandlerException e) {
LOG.warn("ResourceHandlerChain.postComplete failed for " +
"containerId: " + containerId + ". Exception: " + e);
}
}
return 0;
}
private ContainerRuntimeContext buildContainerRuntimeContext(
ContainerStartContext ctx, Path pidFilePath,
String resourcesOptions, String tcCommandFile) {
List<String> prefixCommands = new ArrayList<>();
addSchedPriorityCommand(prefixCommands);
Container container = ctx.getContainer();
ContainerRuntimeContext.Builder builder = new ContainerRuntimeContext
.Builder(container);
if (prefixCommands.size() > 0) {
builder.setExecutionAttribute(CONTAINER_LAUNCH_PREFIX_COMMANDS,
prefixCommands);
}
builder.setExecutionAttribute(LOCALIZED_RESOURCES,
ctx.getLocalizedResources())
.setExecutionAttribute(RUN_AS_USER, getRunAsUser(ctx.getUser()))
.setExecutionAttribute(USER, ctx.getUser())
.setExecutionAttribute(APPID, ctx.getAppId())
.setExecutionAttribute(CONTAINER_ID_STR,
container.getContainerId().toString())
.setExecutionAttribute(CONTAINER_WORK_DIR, ctx.getContainerWorkDir())
.setExecutionAttribute(NM_PRIVATE_CONTAINER_SCRIPT_PATH,
ctx.getNmPrivateContainerScriptPath())
.setExecutionAttribute(NM_PRIVATE_TOKENS_PATH,
ctx.getNmPrivateTokensPath())
.setExecutionAttribute(PID_FILE_PATH, pidFilePath)
.setExecutionAttribute(LOCAL_DIRS, ctx.getLocalDirs())
.setExecutionAttribute(LOG_DIRS, ctx.getLogDirs())
.setExecutionAttribute(FILECACHE_DIRS, ctx.getFilecacheDirs())
.setExecutionAttribute(USER_LOCAL_DIRS, ctx.getUserLocalDirs())
.setExecutionAttribute(CONTAINER_LOCAL_DIRS, ctx.getContainerLocalDirs())
.setExecutionAttribute(CONTAINER_LOG_DIRS, ctx.getContainerLogDirs())
.setExecutionAttribute(RESOURCES_OPTIONS, resourcesOptions);
if (tcCommandFile != null) {
builder.setExecutionAttribute(TC_COMMAND_FILE, tcCommandFile);
}
return builder.build();
}
@Override
public String[] getIpAndHost(Container container) {
return linuxContainerRuntime.getIpAndHost(container);
}
@Override
public int reacquireContainer(ContainerReacquisitionContext ctx)
throws IOException, InterruptedException {
ContainerId containerId = ctx.getContainerId();
try {
//Resource handler chain needs to reacquire container state
//as well
if (resourceHandlerChain != null) {
try {
resourceHandlerChain.reacquireContainer(containerId);
} catch (ResourceHandlerException e) {
LOG.warn("ResourceHandlerChain.reacquireContainer failed for " +
"containerId: " + containerId + " Exception: " + e);
}
}
return super.reacquireContainer(ctx);
} finally {
resourcesHandler.postExecute(containerId);
if (resourceHandlerChain != null) {
try {
resourceHandlerChain.postComplete(containerId);
} catch (ResourceHandlerException e) {
LOG.warn("ResourceHandlerChain.postComplete failed for " +
"containerId: " + containerId + " Exception: " + e);
}
}
}
}
@Override
public boolean signalContainer(ContainerSignalContext ctx)
throws IOException {
Container container = ctx.getContainer();
String user = ctx.getUser();
String pid = ctx.getPid();
Signal signal = ctx.getSignal();
verifyUsernamePattern(user);
String runAsUser = getRunAsUser(user);
ContainerRuntimeContext runtimeContext = new ContainerRuntimeContext
.Builder(container)
.setExecutionAttribute(RUN_AS_USER, runAsUser)
.setExecutionAttribute(USER, user)
.setExecutionAttribute(PID, pid)
.setExecutionAttribute(SIGNAL, signal)
.build();
try {
linuxContainerRuntime.signalContainer(runtimeContext);
} catch (ContainerExecutionException e) {
int retCode = e.getExitCode();
if (retCode == PrivilegedOperation.ResultCode.INVALID_CONTAINER_PID
.getValue()) {
return false;
}
LOG.warn("Error in signalling container " + pid + " with " + signal
+ "; exit = " + retCode, e);
logOutput(e.getOutput());
throw new IOException("Problem signalling container " + pid + " with "
+ signal + "; output: " + e.getOutput() + " and exitCode: "
+ retCode, e);
}
return true;
}
@Override
public void deleteAsUser(DeletionAsUserContext ctx) {
String user = ctx.getUser();
Path dir = ctx.getSubDir();
List<Path> baseDirs = ctx.getBasedirs();
verifyUsernamePattern(user);
String runAsUser = getRunAsUser(user);
String dirString = dir == null ? "" : dir.toUri().getPath();
PrivilegedOperation deleteAsUserOp = new PrivilegedOperation(
PrivilegedOperation.OperationType.DELETE_AS_USER, (String) null);
deleteAsUserOp.appendArgs(
runAsUser,
user,
Integer.toString(PrivilegedOperation.
RunAsUserCommand.DELETE_AS_USER.getValue()),
dirString);
List<String> pathsToDelete = new ArrayList<String>();
if (baseDirs == null || baseDirs.size() == 0) {
LOG.info("Deleting absolute path : " + dir);
pathsToDelete.add(dirString);
} else {
for (Path baseDir : baseDirs) {
Path del = dir == null ? baseDir : new Path(baseDir, dir);
LOG.info("Deleting path : " + del);
pathsToDelete.add(del.toString());
deleteAsUserOp.appendArgs(baseDir.toUri().getPath());
}
}
try {
Configuration conf = super.getConf();
PrivilegedOperationExecutor privilegedOperationExecutor =
getPrivilegedOperationExecutor();
privilegedOperationExecutor.executePrivilegedOperation(deleteAsUserOp,
false);
} catch (PrivilegedOperationException e) {
int exitCode = e.getExitCode();
LOG.error("DeleteAsUser for " + StringUtils.join(" ", pathsToDelete)
+ " returned with exit code: " + exitCode, e);
}
}
@Override
protected File[] readDirAsUser(String user, Path dir) {
List<File> files = new ArrayList<>();
PrivilegedOperation listAsUserOp = new PrivilegedOperation(
PrivilegedOperation.OperationType.LIST_AS_USER, (String)null);
String runAsUser = getRunAsUser(user);
String dirString = "";
if (dir != null) {
dirString = dir.toUri().getPath();
}
listAsUserOp.appendArgs(runAsUser, user,
Integer.toString(
PrivilegedOperation.RunAsUserCommand.LIST_AS_USER.getValue()),
dirString);
try {
PrivilegedOperationExecutor privOpExecutor =
getPrivilegedOperationExecutor();
String results =
privOpExecutor.executePrivilegedOperation(listAsUserOp, true);
for (String file: results.split("\n")) {
// The container-executor always dumps its log output to stdout, which
// includes 3 lines that start with "main : "
if (!file.startsWith("main :")) {
files.add(new File(new File(dirString), file));
}
}
} catch (PrivilegedOperationException e) {
LOG.error("ListAsUser for " + dir + " returned with exit code: "
+ e.getExitCode(), e);
}
return files.toArray(new File[files.size()]);
}
@Override
public void symLink(String target, String symlink) {
}
@Override
public boolean isContainerAlive(ContainerLivenessContext ctx)
throws IOException {
String user = ctx.getUser();
String pid = ctx.getPid();
Container container = ctx.getContainer();
// Send a test signal to the process as the user to see if it's alive
return signalContainer(new ContainerSignalContext.Builder()
.setContainer(container)
.setUser(user)
.setPid(pid)
.setSignal(Signal.NULL)
.build());
}
/**
* Mount a CGROUPS controller at the requested mount point and create
* a hierarchy for the NodeManager to manage.
*
* @param cgroupKVs a key-value pair of the form
* {@code controller=mount-path}
* @param hierarchy the top directory of the hierarchy for the NodeManager
* @throws IOException if there is a problem mounting the CGROUPS
*/
public void mountCgroups(List<String> cgroupKVs, String hierarchy)
throws IOException {
try {
PrivilegedOperation mountCGroupsOp = new PrivilegedOperation(
PrivilegedOperation.OperationType.MOUNT_CGROUPS, hierarchy);
Configuration conf = super.getConf();
mountCGroupsOp.appendArgs(cgroupKVs);
PrivilegedOperationExecutor privilegedOperationExecutor =
getPrivilegedOperationExecutor();
privilegedOperationExecutor.executePrivilegedOperation(mountCGroupsOp,
false);
} catch (PrivilegedOperationException e) {
int exitCode = e.getExitCode();
LOG.warn("Exception in LinuxContainerExecutor mountCgroups ", e);
throw new IOException("Problem mounting cgroups " + cgroupKVs +
"; exit code = " + exitCode + " and output: " + e.getOutput(),
e);
}
}
}
| |
package signature;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* A directed acyclic graph that is the core data structure of a signature. It
* is the DAG that is canonized by sorting its layers of nodes.
*
* @author maclean
*
*/
public class DAG implements Iterable<List<DAG.Node>> {
/**
* The direction up and down the DAG. UP is from leaves to root.
*
*/
public enum Direction { UP, DOWN };
/**
* A node of the directed acyclic graph
*
*/
public class Node implements VisitableDAG {
/**
* The index of the vertex in the graph. Note that for signatures that
* cover only part of the graph (with a height less than the diameter)
* this index may have to be mapped to the original index
*/
public final int vertexIndex;
/**
* The parent nodes in the DAG
*/
public final List<Node> parents;
/**
* The child nodes in the DAG
*/
public final List<Node> children;
/**
* What layer this node is in
*/
public final int layer;
/**
* Labels for the edges between this node and the parent nodes
*/
public final Map<Integer, Integer> edgeColors;
/**
* The final computed invariant, used for sorting children when printing
*/
public int invariant;
/**
* Make a Node that refers to a vertex, in a layer, and with a label.
*
* @param vertexIndex the graph vertex index
* @param layer the layer of this Node
*/
public Node(int vertexIndex, int layer) {
this.vertexIndex = vertexIndex;
this.layer = layer;
this.parents = new ArrayList<Node>();
this.children = new ArrayList<Node>();
this.edgeColors = new HashMap<Integer, Integer>();
}
public void addParent(Node node) {
this.parents.add(node);
}
public void addChild(Node node) {
this.children.add(node);
}
public void addEdgeColor(int partnerIndex, int edgeColor) {
this.edgeColors.put(partnerIndex, edgeColor);
}
public void accept(DAGVisitor visitor) {
visitor.visit(this);
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
public String toString() {
StringBuffer parentString = new StringBuffer();
parentString.append('[');
for (Node parent : this.parents) {
parentString.append(parent.vertexIndex).append(',');
}
if (parentString.length() > 1) {
parentString.setCharAt(parentString.length() - 1, ']');
} else {
parentString.append(']');
}
StringBuffer childString = new StringBuffer();
childString.append('[');
for (Node child : this.children) {
childString.append(child.vertexIndex).append(',');
}
if (childString.length() > 1) {
childString.setCharAt(childString.length() - 1, ']');
} else {
childString.append(']');
}
return vertexIndex + " "
+ " (" + parentString + ", " + childString + ")";
}
}
/**
* An arc of the directed acyclic graph.
*
*/
public class Arc {
public final int a;
public final int b;
public Arc(int a, int b) {
this.a = a;
this.b = b;
}
public boolean equals(Object other) {
if (other instanceof Arc) {
Arc o = (Arc) other;
return (this.a == o.a && this.b == o.b)
|| (this.a == o.b && this.b == o.a);
} else {
return false;
}
}
}
/**
* Comparator for nodes based on String labels.
*
*/
public class NodeStringLabelComparator implements Comparator<Node> {
/**
* The labels for vertices.
*/
public String[] vertexLabels;
public NodeStringLabelComparator(String[] vertexLabels) {
this.vertexLabels = vertexLabels;
}
public int compare(Node o1, Node o2) {
String o1s = this.vertexLabels[o1.vertexIndex];
String o2s = this.vertexLabels[o2.vertexIndex];
int c = o1s.compareTo(o2s);
if (c == 0) {
if (o1.invariant < o2.invariant) {
return -1;
} else if (o1.invariant > o2.invariant) {
return 1;
} else {
return 0;
}
} else {
return c;
}
}
}
/**
* Comparator for nodes based on Integer labels.
*
*/
public class NodeIntegerLabelComparator implements Comparator<Node> {
/**
* The labels for vertices.
*/
public int[] vertexLabels;
public NodeIntegerLabelComparator(int[] vertexLabels) {
this.vertexLabels = vertexLabels;
}
public int compare(Node o1, Node o2) {
int o1n = this.vertexLabels[o1.vertexIndex];
int o2n = this.vertexLabels[o2.vertexIndex];
int c = (o1n == o2n)? 0 :(o1n < o2n? -1 : 1);
if (c == 0) {
if (o1.invariant < o2.invariant) {
return -1;
} else if (o1.invariant > o2.invariant) {
return 1;
} else {
return 0;
}
} else {
return c;
}
}
}
/**
* Used to sort nodes, it is public so that the AbstractVertexSignature
* can use it
*/
public Comparator<Node> nodeComparator;
/**
* The layers of the DAG
*/
private List<List<Node>> layers;
/**
* The counts of parents for vertices
*/
private int[] parentCounts;
/**
* The counts of children for vertices
*/
private int[] childCounts;
private Invariants invariants;
/**
* Convenience reference to the nodes of the DAG
*/
private List<DAG.Node> nodes;
/**
* A convenience record of the number of vertices
*/
private int vertexCount;
/**
* Create a DAG from a graph, starting at the root vertex.
*
* @param rootVertexIndex the vertex to start from
* @param graphVertexCount the number of vertices in the original graph
*/
public DAG(int rootVertexIndex, int graphVertexCount) {
this.layers = new ArrayList<List<Node>>();
this.nodes = new ArrayList<Node>();
List<Node> rootLayer = new ArrayList<Node>();
Node rootNode = new Node(rootVertexIndex, 0);
rootLayer.add(rootNode);
this.layers.add(rootLayer);
this.nodes.add(rootNode);
this.vertexCount = 1;
this.parentCounts = new int[graphVertexCount];
this.childCounts = new int[graphVertexCount];
}
public Iterator<List<Node>> iterator() {
return layers.iterator();
}
public List<DAG.Node> getRootLayer() {
return this.layers.get(0);
}
public DAG.Node getRoot() {
return this.layers.get(0).get(0);
}
public Invariants copyInvariants() {
return (Invariants) this.invariants.clone();
}
/**
* Initialize the invariants, assuming that the vertex count for the
* signature is the same as the length of the label array.
*/
public void initializeWithStringLabels(String[] vertexLabels) {
vertexCount = vertexLabels.length;
this.invariants = new Invariants(vertexCount, nodes.size());
List<InvariantIntStringPair> pairs =
new ArrayList<InvariantIntStringPair>();
for (int i = 0; i < vertexCount; i++) {
String l = vertexLabels[i];
int p = parentCounts[i];
pairs.add(new InvariantIntStringPair(l, p, i));
}
Collections.sort(pairs);
if (pairs.size() == 0) return;
nodeComparator = new NodeStringLabelComparator(vertexLabels);
int order = 1;
InvariantIntStringPair first = pairs.get(0);
invariants.setVertexInvariant(first.getOriginalIndex(), order);
for (int i = 1; i < pairs.size(); i++) {
InvariantIntStringPair a = pairs.get(i - 1);
InvariantIntStringPair b = pairs.get(i);
if (!a.equals(b)) {
order++;
}
invariants.setVertexInvariant(b.getOriginalIndex(), order);
}
}
public void initializeWithIntLabels(int[] vertexLabels) {
vertexCount = vertexLabels.length;
this.invariants = new Invariants(vertexCount, nodes.size());
List<InvariantIntIntPair> pairs = new ArrayList<InvariantIntIntPair>();
for (int i = 0; i < vertexCount; i++) {
int l = vertexLabels[i];
int p = parentCounts[i];
pairs.add(new InvariantIntIntPair(l, p, i));
}
Collections.sort(pairs);
if (pairs.size() == 0) return;
nodeComparator = new NodeIntegerLabelComparator(vertexLabels);
int order = 1;
InvariantIntIntPair first = pairs.get(0);
invariants.setVertexInvariant(first.getOriginalIndex(), order);
for (int i = 1; i < pairs.size(); i++) {
InvariantIntIntPair a = pairs.get(i - 1);
InvariantIntIntPair b = pairs.get(i);
if (!a.equals(b)) {
order++;
}
invariants.setVertexInvariant(b.getOriginalIndex(), order);
}
}
public void setColor(int vertexIndex, int color) {
this.invariants.setColor(vertexIndex, color);
}
public int occurences(int vertexIndex) {
int count = 0;
for (Node node : nodes) {
if (node.vertexIndex == vertexIndex) {
count++;
}
}
return count;
}
public void setInvariants(Invariants invariants) {
// this.invariants = invariants;
this.invariants.colors = invariants.colors.clone();
this.invariants.nodeInvariants = invariants.nodeInvariants.clone();
this.invariants.vertexInvariants = invariants.vertexInvariants.clone();
}
/**
* Create and return a DAG.Node, while setting some internal references to
* the same data. Does not add the node to a layer.
*
* @param vertexIndex the index of the vertex in the original graph
* @param layer the index of the layer
* @return the new node
*/
public DAG.Node makeNode(int vertexIndex, int layer) {
DAG.Node node = new DAG.Node(vertexIndex, layer);
this.nodes.add(node);
return node;
}
/**
* Create and return a DAG.Node, while setting some internal references to
* the same data. Note: also adds the node to a layer, creating it if
* necessary.
*
* @param vertexIndex the index of the vertex in the original graph
* @param layer the index of the layer
* @return the new node
*/
public DAG.Node makeNodeInLayer(int vertexIndex, int layer) {
DAG.Node node = this.makeNode(vertexIndex, layer);
if (layers.size() <= layer) {
this.layers.add(new ArrayList<DAG.Node>());
}
this.layers.get(layer).add(node);
return node;
}
public void addRelation(DAG.Node childNode, DAG.Node parentNode) {
childNode.parents.add(parentNode);
parentCounts[childNode.vertexIndex]++;
childCounts[parentNode.vertexIndex]++;
parentNode.children.add(childNode);
}
public int[] getParentsInFinalString() {
int[] counts = new int[vertexCount];
getParentsInFinalString(
counts, getRoot(), null, new ArrayList<DAG.Arc>());
return counts;
}
private void getParentsInFinalString(int[] counts, DAG.Node node,
DAG.Node parent, List<DAG.Arc> arcs) {
if (parent != null) {
counts[node.vertexIndex]++;
}
Collections.sort(node.children, nodeComparator);
for (DAG.Node child : node.children) {
DAG.Arc arc = new Arc(node.vertexIndex, child.vertexIndex);
if (arcs.contains(arc)) {
continue;
} else {
arcs.add(arc);
getParentsInFinalString(counts, child, node, arcs);
}
}
}
/**
* Count the occurrences of each vertex index in the final signature string.
* Since duplicate DAG edges are removed, this count will not be the same as
* the simple count of occurrences in the DAG before printing.
*
* @return
*/
public int[] getOccurrences() {
int[] occurences = new int[vertexCount];
getOccurences(occurences, getRoot(), null, new ArrayList<DAG.Arc>());
return occurences;
}
private void getOccurences(int[] occurences, DAG.Node node,
DAG.Node parent, List<DAG.Arc> arcs) {
occurences[node.vertexIndex]++;
Collections.sort(node.children, nodeComparator);
for (DAG.Node child : node.children) {
DAG.Arc arc = new Arc(node.vertexIndex, child.vertexIndex);
if (arcs.contains(arc)) {
continue;
} else {
arcs.add(arc);
getOccurences(occurences, child, node, arcs);
}
}
}
public List<InvariantInt> getInvariantPairs(int[] parents) {
List<InvariantInt> pairs = new ArrayList<InvariantInt>();
for (int i = 0; i < this.vertexCount; i++) {
if (invariants.getColor(i) == -1
&& parents[i] >= 2) {
pairs.add(
new InvariantInt(
invariants.getVertexInvariant(i), i));
}
}
Collections.sort(pairs);
return pairs;
}
public int colorFor(int vertexIndex) {
return this.invariants.getColor(vertexIndex);
}
public void accept(DAGVisitor visitor) {
this.getRoot().accept(visitor);
}
public void addLayer(List<Node> layer) {
this.layers.add(layer);
}
public List<Integer> createOrbit(int[] parents) {
// get the orbits
Map<Integer, List<Integer>> orbits =
new HashMap<Integer, List<Integer>>();
for (int j = 0; j < vertexCount; j++) {
if (parents[j] >= 2) {
int invariant = invariants.getVertexInvariant(j);
List<Integer> orbit;
if (orbits.containsKey(invariant)) {
orbit = orbits.get(invariant);
} else {
orbit = new ArrayList<Integer>();
orbits.put(invariant, orbit);
}
orbit.add(j);
}
}
// System.out.println("Orbits " + orbits);
// find the largest orbit
if (orbits.isEmpty()) {
return new ArrayList<Integer>();
} else {
List<Integer> maxOrbit = null;
List<Integer> invariants = new ArrayList<Integer>(orbits.keySet());
Collections.sort(invariants);
for (int invariant : invariants) {
List<Integer> orbit = orbits.get(invariant);
if (maxOrbit == null || orbit.size() > maxOrbit.size()) {
maxOrbit = orbit;
}
}
return maxOrbit;
}
}
public void computeVertexInvariants() {
Map<Integer, int[]> layerInvariants = new HashMap<Integer, int[]>();
for (int i = 0; i < this.nodes.size(); i++) {
DAG.Node node = this.nodes.get(i);
int j = node.vertexIndex;
int[] layerInvariantsJ;
if (layerInvariants.containsKey(j)) {
layerInvariantsJ = layerInvariants.get(j);
} else {
layerInvariantsJ = new int[this.layers.size()];
layerInvariants.put(j, layerInvariantsJ);
}
layerInvariantsJ[node.layer] = invariants.getNodeInvariant(i);
}
List<InvariantArray> invariantLists = new ArrayList<InvariantArray>();
for (int i : layerInvariants.keySet()) {
InvariantArray invArr = new InvariantArray(layerInvariants.get(i), i);
invariantLists.add(invArr);
}
Collections.sort(invariantLists);
int order = 1;
int first = invariantLists.get(0).originalIndex;
invariants.setVertexInvariant(first, 1);
for (int i = 1; i < invariantLists.size(); i++) {
InvariantArray a = invariantLists.get(i - 1);
InvariantArray b = invariantLists.get(i);
if (!a.equals(b)) {
order++;
}
invariants.setVertexInvariant(b.originalIndex, order);
}
}
public void updateVertexInvariants() {
int[] oldInvariants = new int[vertexCount];
boolean invariantSame = true;
while (invariantSame) {
oldInvariants = invariants.getVertexInvariantCopy();
updateNodeInvariants(Direction.UP); // From the leaves to the root
// This is needed here otherwise there will be cases where a node
// invariant is reset when the tree is traversed down.
// This is not mentioned in Faulon's paper.
computeVertexInvariants();
updateNodeInvariants(Direction.DOWN); // From the root to the leaves
computeVertexInvariants();
invariantSame =
checkInvariantChange(
oldInvariants, invariants.getVertexInvariants());
// System.out.println(
// "invs\t" +
// java.util.Arrays.toString(invariants.getVertexInvariants()));
}
// finally, copy the node invariants into the nodes, for easy sorting
for (int i = 0; i < this.nodes.size(); i++) {
this.nodes.get(i).invariant = invariants.getNodeInvariant(i);
}
}
public boolean checkInvariantChange(int[] a, int[] b) {
for (int i = 0; i < vertexCount; i++) {
if (a[i] != b[i]) {
return true;
}
}
return false;
}
public void updateNodeInvariants(DAG.Direction direction) {
int start, end, increment;
if (direction == Direction.UP) {
start = this.layers.size() - 1;
// The root node is not included but it doesn't matter since it
// is always alone.
end = -1;
increment = -1;
} else {
start = 0;
end = this.layers.size();
increment = 1;
}
for (int i = start; i != end; i += increment) {
this.updateLayer(this.layers.get(i), direction);
}
}
public void updateLayer(List<DAG.Node> layer, DAG.Direction direction) {
List<InvariantList> nodeInvariantList =
new ArrayList<InvariantList>();
for (int i = 0; i < layer.size(); i++) {
DAG.Node layerNode = layer.get(i);
int x = layerNode.vertexIndex;
InvariantList nodeInvariant =
new InvariantList(nodes.indexOf(layerNode));
nodeInvariant.add(this.invariants.getColor(x));
nodeInvariant.add(this.invariants.getVertexInvariant(x));
List<Integer> relativeInvariants = new ArrayList<Integer>();
// If we go up we should check the children.
List<DAG.Node> relatives = (direction == Direction.UP) ?
layerNode.children : layerNode.parents;
for (Node relative : relatives) {
int j = this.nodes.indexOf(relative);
int inv = this.invariants.getNodeInvariant(j);
// System.out.println(layerNode.edgeColors + " getting " + relative.vertexIndex);
int edgeColor;
if (direction == Direction.UP) {
edgeColor = relative.edgeColors.get(layerNode.vertexIndex);
} else {
edgeColor = layerNode.edgeColors.get(relative.vertexIndex);
}
// relativeInvariants.add(inv * edgeColor);
// relativeInvariants.add(inv * (edgeColor + 1));
relativeInvariants.add(inv);
relativeInvariants.add(vertexCount + 1 + edgeColor);
}
Collections.sort(relativeInvariants);
nodeInvariant.addAll(relativeInvariants);
nodeInvariantList.add(nodeInvariant);
}
Collections.sort(nodeInvariantList);
// System.out.println(nodeInvariantList + " for layer " + layer + " " + direction);
int order = 1;
int first = nodeInvariantList.get(0).originalIndex;
this.invariants.setNodeInvariant(first, order);
for (int i = 1; i < nodeInvariantList.size(); i++) {
InvariantList a = nodeInvariantList.get(i - 1);
InvariantList b = nodeInvariantList.get(i);
if (!a.equals(b)) {
order++;
}
this.invariants.setNodeInvariant(b.originalIndex, order);
}
}
public String toString() {
StringBuffer buffer = new StringBuffer();
for (List<Node> layer : this) {
buffer.append(layer);
buffer.append("\n");
}
return buffer.toString();
}
}
| |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.design.widget;
import android.view.animation.Interpolator;
import androidx.annotation.NonNull;
/**
* This class offers a very small subset of {@code ValueAnimator}'s API, but works pre-v11 too.
* <p>
* You shouldn't not instantiate this directly. Instead use {@code ViewUtils.createAnimator()}.
*/
class ValueAnimatorCompat {
interface AnimatorUpdateListener {
/**
* <p>Notifies the occurrence of another frame of the animation.</p>
*
* @param animator The animation which was repeated.
*/
void onAnimationUpdate(ValueAnimatorCompat animator);
}
/**
* An animation listener receives notifications from an animation.
* Notifications indicate animation related events, such as the end or the
* repetition of the animation.
*/
interface AnimatorListener {
/**
* <p>Notifies the start of the animation.</p>
*
* @param animator The started animation.
*/
void onAnimationStart(ValueAnimatorCompat animator);
/**
* <p>Notifies the end of the animation. This callback is not invoked
* for animations with repeat count set to INFINITE.</p>
*
* @param animator The animation which reached its end.
*/
void onAnimationEnd(ValueAnimatorCompat animator);
/**
* <p>Notifies the cancellation of the animation. This callback is not invoked
* for animations with repeat count set to INFINITE.</p>
*
* @param animator The animation which was canceled.
*/
void onAnimationCancel(ValueAnimatorCompat animator);
}
static class AnimatorListenerAdapter implements AnimatorListener {
@Override
public void onAnimationStart(ValueAnimatorCompat animator) {
}
@Override
public void onAnimationEnd(ValueAnimatorCompat animator) {
}
@Override
public void onAnimationCancel(ValueAnimatorCompat animator) {
}
}
interface Creator {
@NonNull
ValueAnimatorCompat createAnimator();
}
static abstract class Impl {
interface AnimatorUpdateListenerProxy {
void onAnimationUpdate();
}
interface AnimatorListenerProxy {
void onAnimationStart();
void onAnimationEnd();
void onAnimationCancel();
}
abstract void start();
abstract boolean isRunning();
abstract void setInterpolator(Interpolator interpolator);
abstract void addListener(AnimatorListenerProxy listener);
abstract void addUpdateListener(AnimatorUpdateListenerProxy updateListener);
abstract void setIntValues(int from, int to);
abstract int getAnimatedIntValue();
abstract void setFloatValues(float from, float to);
abstract float getAnimatedFloatValue();
abstract void setDuration(long duration);
abstract void cancel();
abstract float getAnimatedFraction();
abstract void end();
abstract long getDuration();
}
private final Impl mImpl;
ValueAnimatorCompat(Impl impl) {
mImpl = impl;
}
public void start() {
mImpl.start();
}
public boolean isRunning() {
return mImpl.isRunning();
}
public void setInterpolator(Interpolator interpolator) {
mImpl.setInterpolator(interpolator);
}
public void addUpdateListener(final AnimatorUpdateListener updateListener) {
if (updateListener != null) {
mImpl.addUpdateListener(new Impl.AnimatorUpdateListenerProxy() {
@Override
public void onAnimationUpdate() {
updateListener.onAnimationUpdate(ValueAnimatorCompat.this);
}
});
} else {
mImpl.addUpdateListener(null);
}
}
public void addListener(final AnimatorListener listener) {
if (listener != null) {
mImpl.addListener(new Impl.AnimatorListenerProxy() {
@Override
public void onAnimationStart() {
listener.onAnimationStart(ValueAnimatorCompat.this);
}
@Override
public void onAnimationEnd() {
listener.onAnimationEnd(ValueAnimatorCompat.this);
}
@Override
public void onAnimationCancel() {
listener.onAnimationCancel(ValueAnimatorCompat.this);
}
});
} else {
mImpl.addListener(null);
}
}
public void setIntValues(int from, int to) {
mImpl.setIntValues(from, to);
}
public int getAnimatedIntValue() {
return mImpl.getAnimatedIntValue();
}
public void setFloatValues(float from, float to) {
mImpl.setFloatValues(from, to);
}
public float getAnimatedFloatValue() {
return mImpl.getAnimatedFloatValue();
}
public void setDuration(long duration) {
mImpl.setDuration(duration);
}
public void cancel() {
mImpl.cancel();
}
public float getAnimatedFraction() {
return mImpl.getAnimatedFraction();
}
public void end() {
mImpl.end();
}
public long getDuration() {
return mImpl.getDuration();
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.net;
import static org.chromium.base.CollectionUtil.newHashSet;
import android.content.Context;
import android.content.ContextWrapper;
import android.os.ConditionVariable;
import android.os.Handler;
import android.os.Looper;
import android.test.suitebuilder.annotation.SmallTest;
import org.chromium.base.PathUtils;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.base.test.util.Feature;
import org.chromium.net.CronetEngine.UrlRequestInfo;
import org.chromium.net.TestUrlRequestCallback.ResponseStep;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import java.util.concurrent.Executor;
/**
* Test CronetEngine.
*/
@JNINamespace("cronet")
public class CronetUrlRequestContextTest extends CronetTestBase {
// URLs used for tests.
private static final String TEST_URL = "http://127.0.0.1:8000";
private static final String URL_404 = "http://127.0.0.1:8000/notfound404";
private static final String MOCK_CRONET_TEST_FAILED_URL =
"http://mock.failed.request/-2";
private static final String MOCK_CRONET_TEST_SUCCESS_URL =
"http://mock.http/success.txt";
CronetTestFramework mTestFramework;
static class RequestThread extends Thread {
public TestUrlRequestCallback mCallback;
final CronetTestFramework mTestFramework;
final String mUrl;
final ConditionVariable mRunBlocker;
public RequestThread(
CronetTestFramework testFramework, String url, ConditionVariable runBlocker) {
mTestFramework = testFramework;
mUrl = url;
mRunBlocker = runBlocker;
}
@Override
public void run() {
mRunBlocker.block();
CronetEngine cronetEngine = mTestFramework.initCronetEngine();
mCallback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder =
new UrlRequest.Builder(mUrl, mCallback, mCallback.getExecutor(), cronetEngine);
urlRequestBuilder.build().start();
mCallback.blockForDone();
}
}
/**
* Callback that shutdowns the request context when request has succeeded
* or failed.
*/
class ShutdownTestUrlRequestCallback extends TestUrlRequestCallback {
@Override
public void onSucceeded(UrlRequest request, UrlResponseInfo info) {
super.onSucceeded(request, info);
mTestFramework.mCronetEngine.shutdown();
}
@Override
public void onFailed(UrlRequest request, UrlResponseInfo info, UrlRequestException error) {
super.onFailed(request, info, error);
mTestFramework.mCronetEngine.shutdown();
}
}
static class TestExecutor implements Executor {
private final LinkedList<Runnable> mTaskQueue = new LinkedList<Runnable>();
@Override
public void execute(Runnable task) {
mTaskQueue.add(task);
}
public void runAllTasks() {
try {
while (mTaskQueue.size() > 0) {
mTaskQueue.remove().run();
}
} catch (NoSuchElementException e) {
}
}
}
static class TestNetworkQualityListener
implements NetworkQualityRttListener, NetworkQualityThroughputListener {
int mRttObservationCount;
int mThroughputObservationCount;
@Override
public void onRttObservation(int rttMs, long when, int source) {
mRttObservationCount++;
}
@Override
public void onThroughputObservation(int throughputKbps, long when, int source) {
mThroughputObservationCount++;
}
public int rttObservationCount() {
return mRttObservationCount;
}
public int throughputObservationCount() {
return mThroughputObservationCount;
}
}
@SmallTest
@Feature({"Cronet"})
public void testConfigUserAgent() throws Exception {
String userAgentName = "User-Agent";
String userAgentValue = "User-Agent-Value";
CronetEngine.Builder cronetEngineBuilder = new CronetEngine.Builder(getContext());
if (testingJavaImpl()) {
cronetEngineBuilder.enableLegacyMode(true);
}
cronetEngineBuilder.setUserAgent(userAgentValue);
cronetEngineBuilder.setLibraryName("cronet_tests");
mTestFramework = startCronetTestFrameworkWithUrlAndCronetEngineBuilder(
TEST_URL, cronetEngineBuilder);
NativeTestServer.shutdownNativeTestServer(); // startNativeTestServer returns false if it's
// already running
assertTrue(NativeTestServer.startNativeTestServer(getContext()));
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder =
new UrlRequest.Builder(NativeTestServer.getEchoHeaderURL(userAgentName), callback,
callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
assertEquals(userAgentValue, callback.mResponseAsString);
}
@SmallTest
@Feature({"Cronet"})
@OnlyRunNativeCronet
// TODO(xunjieli): Remove annotation after crbug.com/539519 is fixed.
@SuppressWarnings("deprecation")
public void testDataReductionProxyEnabled() throws Exception {
mTestFramework = startCronetTestFrameworkAndSkipLibraryInit();
// Ensure native code is loaded before trying to start test server.
new CronetEngine.Builder(getContext()).setLibraryName("cronet_tests").build().shutdown();
assertTrue(NativeTestServer.startNativeTestServer(getContext()));
if (!NativeTestServer.isDataReductionProxySupported()) {
return;
}
String serverHostPort = NativeTestServer.getHostPort();
// Enable the Data Reduction Proxy and configure it to use the test
// server as its primary proxy, and to check successfully that this
// proxy is OK to use.
CronetEngine.Builder cronetEngineBuilder = new CronetEngine.Builder(getContext());
cronetEngineBuilder.enableDataReductionProxy("test-key");
cronetEngineBuilder.setDataReductionProxyOptions(serverHostPort, "unused.net:9999",
NativeTestServer.getFileURL("/secureproxychecksuccess.txt"));
cronetEngineBuilder.setLibraryName("cronet_tests");
mTestFramework.mCronetEngine = cronetEngineBuilder.build();
TestUrlRequestCallback callback = new TestUrlRequestCallback();
// Construct and start a request that can only be returned by the test
// server. This request will fail if the configuration logic for the
// Data Reduction Proxy is not used.
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
"http://DomainThatDoesnt.Resolve/datareductionproxysuccess.txt", callback,
callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
// Verify that the request is successful and that the Data Reduction
// Proxy logic configured to use the test server as its proxy.
assertEquals(200, callback.mResponseInfo.getHttpStatusCode());
assertEquals(serverHostPort, callback.mResponseInfo.getProxyServer());
assertEquals("http://DomainThatDoesnt.Resolve/datareductionproxysuccess.txt",
callback.mResponseInfo.getUrl());
}
@SmallTest
@Feature({"Cronet"})
// TODO(xunjieli): Remove annotation after crbug.com/539519 is fixed.
@SuppressWarnings("deprecation")
public void testRealTimeNetworkQualityObservationsNotEnabled() throws Exception {
mTestFramework = startCronetTestFramework();
TestNetworkQualityListener networkQualityListener = new TestNetworkQualityListener();
try {
mTestFramework.mCronetEngine.addRttListener(networkQualityListener);
fail("Should throw an exception.");
} catch (IllegalStateException e) {
}
try {
mTestFramework.mCronetEngine.addThroughputListener(networkQualityListener);
fail("Should throw an exception.");
} catch (IllegalStateException e) {
}
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest urlRequest = mTestFramework.mCronetEngine.createRequest(
TEST_URL, callback, callback.getExecutor());
urlRequest.start();
callback.blockForDone();
assertEquals(0, networkQualityListener.rttObservationCount());
assertEquals(0, networkQualityListener.throughputObservationCount());
mTestFramework.mCronetEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
// TODO(xunjieli): Remove annotation after crbug.com/539519 is fixed.
@SuppressWarnings("deprecation")
public void testRealTimeNetworkQualityObservationsListenerRemoved() throws Exception {
mTestFramework = startCronetTestFramework();
TestExecutor testExecutor = new TestExecutor();
TestNetworkQualityListener networkQualityListener = new TestNetworkQualityListener();
mTestFramework.mCronetEngine.enableNetworkQualityEstimatorForTesting(
true, true, testExecutor);
mTestFramework.mCronetEngine.addRttListener(networkQualityListener);
mTestFramework.mCronetEngine.addThroughputListener(networkQualityListener);
mTestFramework.mCronetEngine.removeRttListener(networkQualityListener);
mTestFramework.mCronetEngine.removeThroughputListener(networkQualityListener);
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest urlRequest = mTestFramework.mCronetEngine.createRequest(
TEST_URL, callback, callback.getExecutor());
urlRequest.start();
callback.blockForDone();
testExecutor.runAllTasks();
assertEquals(0, networkQualityListener.rttObservationCount());
assertEquals(0, networkQualityListener.throughputObservationCount());
mTestFramework.mCronetEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
// TODO(xunjieli): Remove annotation after crbug.com/539519 is fixed.
@SuppressWarnings("deprecation")
public void testRealTimeNetworkQualityObservations() throws Exception {
mTestFramework = startCronetTestFramework();
TestExecutor testExecutor = new TestExecutor();
TestNetworkQualityListener networkQualityListener = new TestNetworkQualityListener();
mTestFramework.mCronetEngine.enableNetworkQualityEstimatorForTesting(
true, true, testExecutor);
mTestFramework.mCronetEngine.addRttListener(networkQualityListener);
mTestFramework.mCronetEngine.addThroughputListener(networkQualityListener);
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest urlRequest = mTestFramework.mCronetEngine.createRequest(
TEST_URL, callback, callback.getExecutor());
urlRequest.start();
callback.blockForDone();
testExecutor.runAllTasks();
assertTrue(networkQualityListener.rttObservationCount() > 0);
assertTrue(networkQualityListener.throughputObservationCount() > 0);
mTestFramework.mCronetEngine.shutdown();
}
private static class TestRequestFinishedListener
implements CronetEngine.RequestFinishedListener {
private UrlRequestInfo mRequestInfo = null;
@Override
public void onRequestFinished(UrlRequestInfo requestInfo) {
assertNull("onRequestFinished called repeatedly", mRequestInfo);
assertNotNull(requestInfo);
mRequestInfo = requestInfo;
}
}
@SmallTest
@Feature({"Cronet"})
@SuppressWarnings("deprecation")
public void testRequestFinishedListener() throws Exception {
mTestFramework = startCronetTestFramework();
TestExecutor testExecutor = new TestExecutor();
TestRequestFinishedListener requestFinishedListener = new TestRequestFinishedListener();
mTestFramework.mCronetEngine.enableNetworkQualityEstimator(testExecutor);
mTestFramework.mCronetEngine.addRequestFinishedListener(requestFinishedListener);
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.addRequestAnnotation("request annotation")
.addRequestAnnotation(this)
.build()
.start();
callback.blockForDone();
testExecutor.runAllTasks();
CronetEngine.UrlRequestInfo requestInfo = requestFinishedListener.mRequestInfo;
assertNotNull("RequestFinishedListener must be called", requestInfo);
assertEquals(TEST_URL, requestInfo.getUrl());
assertNotNull(requestInfo.getResponseInfo());
assertEquals(newHashSet("request annotation", this), // Use sets for unordered comparison.
new HashSet<Object>(requestInfo.getAnnotations()));
CronetEngine.UrlRequestMetrics metrics = requestInfo.getMetrics();
assertNotNull("UrlRequestInfo.getMetrics() must not be null", metrics);
assertTrue(metrics.getTotalTimeMs() > 0);
assertTrue(metrics.getTotalTimeMs() >= metrics.getTtfbMs());
assertTrue(metrics.getReceivedBytesCount() > 0);
mTestFramework.mCronetEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
@SuppressWarnings("deprecation")
public void testRequestFinishedListenerFailedRequest() throws Exception {
String connectionRefusedUrl = "http://127.0.0.1:3";
mTestFramework = startCronetTestFramework();
TestExecutor testExecutor = new TestExecutor();
TestRequestFinishedListener requestFinishedListener = new TestRequestFinishedListener();
mTestFramework.mCronetEngine.enableNetworkQualityEstimator(testExecutor);
mTestFramework.mCronetEngine.addRequestFinishedListener(requestFinishedListener);
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(connectionRefusedUrl,
callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
assertTrue(callback.mOnErrorCalled);
testExecutor.runAllTasks();
CronetEngine.UrlRequestInfo requestInfo = requestFinishedListener.mRequestInfo;
assertNotNull("RequestFinishedListener must be called", requestInfo);
assertEquals(connectionRefusedUrl, requestInfo.getUrl());
assertTrue(requestInfo.getAnnotations().isEmpty());
CronetEngine.UrlRequestMetrics metrics = requestInfo.getMetrics();
assertNotNull("UrlRequestInfo.getMetrics() must not be null", metrics);
assertTrue(metrics.getTotalTimeMs() > 0);
assertNull(metrics.getTtfbMs());
assertTrue(metrics.getReceivedBytesCount() == null || metrics.getReceivedBytesCount() == 0);
mTestFramework.mCronetEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
@SuppressWarnings("deprecation")
public void testRequestFinishedListenerRemoved() throws Exception {
mTestFramework = startCronetTestFramework();
TestExecutor testExecutor = new TestExecutor();
TestRequestFinishedListener requestFinishedListener = new TestRequestFinishedListener();
mTestFramework.mCronetEngine.enableNetworkQualityEstimator(testExecutor);
mTestFramework.mCronetEngine.addRequestFinishedListener(requestFinishedListener);
mTestFramework.mCronetEngine.removeRequestFinishedListener(requestFinishedListener);
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
testExecutor.runAllTasks();
assertNull(
"RequestFinishedListener must not be called", requestFinishedListener.mRequestInfo);
mTestFramework.mCronetEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
@SuppressWarnings("deprecation")
public void testRequestFinishedListenerDisabled() throws Exception {
mTestFramework = startCronetTestFramework();
TestExecutor testExecutor = new TestExecutor();
TestRequestFinishedListener requestFinishedListener = new TestRequestFinishedListener();
try {
mTestFramework.mCronetEngine.addRequestFinishedListener(requestFinishedListener);
fail("addRequestFinishedListener unexpectedly succeeded "
+ "without a call to enableNetworkQualityEstimator()");
} catch (RuntimeException e) {
// Expected.
}
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
testExecutor.runAllTasks();
assertNull(
"RequestFinishedListener must not be called", requestFinishedListener.mRequestInfo);
mTestFramework.mCronetEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
public void testShutdown() throws Exception {
mTestFramework = startCronetTestFramework();
TestUrlRequestCallback callback = new ShutdownTestUrlRequestCallback();
// Block callback when response starts to verify that shutdown fails
// if there are active requests.
callback.setAutoAdvance(false);
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
UrlRequest urlRequest = urlRequestBuilder.build();
urlRequest.start();
try {
mTestFramework.mCronetEngine.shutdown();
fail("Should throw an exception");
} catch (Exception e) {
assertEquals("Cannot shutdown with active requests.",
e.getMessage());
}
callback.waitForNextStep();
assertEquals(ResponseStep.ON_RESPONSE_STARTED, callback.mResponseStep);
try {
mTestFramework.mCronetEngine.shutdown();
fail("Should throw an exception");
} catch (Exception e) {
assertEquals("Cannot shutdown with active requests.",
e.getMessage());
}
callback.startNextRead(urlRequest);
callback.waitForNextStep();
assertEquals(ResponseStep.ON_READ_COMPLETED, callback.mResponseStep);
try {
mTestFramework.mCronetEngine.shutdown();
fail("Should throw an exception");
} catch (Exception e) {
assertEquals("Cannot shutdown with active requests.",
e.getMessage());
}
// May not have read all the data, in theory. Just enable auto-advance
// and finish the request.
callback.setAutoAdvance(true);
callback.startNextRead(urlRequest);
callback.blockForDone();
}
@SmallTest
@Feature({"Cronet"})
public void testShutdownDuringInit() throws Exception {
final CronetTestFramework testFramework = startCronetTestFrameworkAndSkipLibraryInit();
final ConditionVariable block = new ConditionVariable(false);
// Post a task to main thread to block until shutdown is called to test
// scenario when shutdown is called right after construction before
// context is fully initialized on the main thread.
Runnable blockingTask = new Runnable() {
@Override
public void run() {
try {
block.block();
} catch (Exception e) {
fail("Caught " + e.getMessage());
}
}
};
// Ensure that test is not running on the main thread.
assertTrue(Looper.getMainLooper() != Looper.myLooper());
new Handler(Looper.getMainLooper()).post(blockingTask);
// Create new request context, but its initialization on the main thread
// will be stuck behind blockingTask.
final CronetEngine cronetEngine = testFramework.initCronetEngine();
// Unblock the main thread, so context gets initialized and shutdown on
// it.
block.open();
// Shutdown will wait for init to complete on main thread.
cronetEngine.shutdown();
// Verify that context is shutdown.
try {
cronetEngine.stopNetLog();
fail("Should throw an exception.");
} catch (Exception e) {
assertEquals("Engine is shut down.", e.getMessage());
}
}
@SmallTest
@Feature({"Cronet"})
public void testInitAndShutdownOnMainThread() throws Exception {
final CronetTestFramework testFramework = startCronetTestFrameworkAndSkipLibraryInit();
final ConditionVariable block = new ConditionVariable(false);
// Post a task to main thread to init and shutdown on the main thread.
Runnable blockingTask = new Runnable() {
@Override
public void run() {
// Create new request context, loading the library.
final CronetEngine cronetEngine = testFramework.initCronetEngine();
// Shutdown right after init.
cronetEngine.shutdown();
// Verify that context is shutdown.
try {
cronetEngine.stopNetLog();
fail("Should throw an exception.");
} catch (Exception e) {
assertEquals("Engine is shut down.", e.getMessage());
}
block.open();
}
};
new Handler(Looper.getMainLooper()).post(blockingTask);
// Wait for shutdown to complete on main thread.
block.block();
}
@SmallTest
@Feature({"Cronet"})
public void testMultipleShutdown() throws Exception {
mTestFramework = startCronetTestFramework();
try {
mTestFramework.mCronetEngine.shutdown();
mTestFramework.mCronetEngine.shutdown();
fail("Should throw an exception");
} catch (Exception e) {
assertEquals("Engine is shut down.", e.getMessage());
}
}
@SmallTest
@Feature({"Cronet"})
public void testShutdownAfterError() throws Exception {
mTestFramework = startCronetTestFramework();
TestUrlRequestCallback callback = new ShutdownTestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(MOCK_CRONET_TEST_FAILED_URL,
callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
assertTrue(callback.mOnErrorCalled);
}
@SmallTest
@Feature({"Cronet"})
public void testShutdownAfterCancel() throws Exception {
mTestFramework = startCronetTestFramework();
TestUrlRequestCallback callback = new TestUrlRequestCallback();
// Block callback when response starts to verify that shutdown fails
// if there are active requests.
callback.setAutoAdvance(false);
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
UrlRequest urlRequest = urlRequestBuilder.build();
urlRequest.start();
try {
mTestFramework.mCronetEngine.shutdown();
fail("Should throw an exception");
} catch (Exception e) {
assertEquals("Cannot shutdown with active requests.",
e.getMessage());
}
callback.waitForNextStep();
assertEquals(ResponseStep.ON_RESPONSE_STARTED, callback.mResponseStep);
urlRequest.cancel();
mTestFramework.mCronetEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
@OnlyRunNativeCronet // No netlogs for pure java impl
public void testNetLog() throws Exception {
Context context = getContext();
File directory = new File(PathUtils.getDataDirectory(context));
File file = File.createTempFile("cronet", "json", directory);
CronetEngine cronetEngine = new CronetUrlRequestContext(
new CronetEngine.Builder(context).setLibraryName("cronet_tests"));
// Start NetLog immediately after the request context is created to make
// sure that the call won't crash the app even when the native request
// context is not fully initialized. See crbug.com/470196.
cronetEngine.startNetLogToFile(file.getPath(), false);
// Start a request.
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder =
new UrlRequest.Builder(TEST_URL, callback, callback.getExecutor(), cronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
cronetEngine.stopNetLog();
assertTrue(file.exists());
assertTrue(file.length() != 0);
assertFalse(hasBytesInNetLog(file));
assertTrue(file.delete());
assertTrue(!file.exists());
}
@SmallTest
@Feature({"Cronet"})
public void testNetLogAfterShutdown() throws Exception {
mTestFramework = startCronetTestFramework();
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
mTestFramework.mCronetEngine.shutdown();
File directory = new File(PathUtils.getDataDirectory(getContext()));
File file = File.createTempFile("cronet", "json", directory);
try {
mTestFramework.mCronetEngine.startNetLogToFile(file.getPath(), false);
fail("Should throw an exception.");
} catch (Exception e) {
assertEquals("Engine is shut down.", e.getMessage());
}
assertFalse(hasBytesInNetLog(file));
assertTrue(file.delete());
assertTrue(!file.exists());
}
@SmallTest
@Feature({"Cronet"})
public void testNetLogStartMultipleTimes() throws Exception {
mTestFramework = startCronetTestFramework();
File directory = new File(PathUtils.getDataDirectory(getContext()));
File file = File.createTempFile("cronet", "json", directory);
// Start NetLog multiple times.
mTestFramework.mCronetEngine.startNetLogToFile(file.getPath(), false);
mTestFramework.mCronetEngine.startNetLogToFile(file.getPath(), false);
mTestFramework.mCronetEngine.startNetLogToFile(file.getPath(), false);
mTestFramework.mCronetEngine.startNetLogToFile(file.getPath(), false);
// Start a request.
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
mTestFramework.mCronetEngine.stopNetLog();
assertTrue(file.exists());
assertTrue(file.length() != 0);
assertFalse(hasBytesInNetLog(file));
assertTrue(file.delete());
assertTrue(!file.exists());
}
@SmallTest
@Feature({"Cronet"})
public void testNetLogStopMultipleTimes() throws Exception {
mTestFramework = startCronetTestFramework();
File directory = new File(PathUtils.getDataDirectory(getContext()));
File file = File.createTempFile("cronet", "json", directory);
mTestFramework.mCronetEngine.startNetLogToFile(file.getPath(), false);
// Start a request.
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
// Stop NetLog multiple times.
mTestFramework.mCronetEngine.stopNetLog();
mTestFramework.mCronetEngine.stopNetLog();
mTestFramework.mCronetEngine.stopNetLog();
mTestFramework.mCronetEngine.stopNetLog();
mTestFramework.mCronetEngine.stopNetLog();
assertTrue(file.exists());
assertTrue(file.length() != 0);
assertFalse(hasBytesInNetLog(file));
assertTrue(file.delete());
assertTrue(!file.exists());
}
@SmallTest
@Feature({"Cronet"})
@OnlyRunNativeCronet
public void testNetLogWithBytes() throws Exception {
Context context = getContext();
File directory = new File(PathUtils.getDataDirectory(context));
File file = File.createTempFile("cronet", "json", directory);
CronetEngine cronetEngine = new CronetUrlRequestContext(
new CronetEngine.Builder(context).setLibraryName("cronet_tests"));
// Start NetLog with logAll as true.
cronetEngine.startNetLogToFile(file.getPath(), true);
// Start a request.
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder =
new UrlRequest.Builder(TEST_URL, callback, callback.getExecutor(), cronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
cronetEngine.stopNetLog();
assertTrue(file.exists());
assertTrue(file.length() != 0);
assertTrue(hasBytesInNetLog(file));
assertTrue(file.delete());
assertTrue(!file.exists());
}
private boolean hasBytesInNetLog(File logFile) throws Exception {
BufferedReader logReader = new BufferedReader(new FileReader(logFile));
try {
String logLine;
while ((logLine = logReader.readLine()) != null) {
if (logLine.contains("\"hex_encoded_bytes\"")) {
return true;
}
}
return false;
} finally {
logReader.close();
}
}
private void enableCache(int cacheType) throws Exception {
String cacheTypeString = "";
if (cacheType == CronetEngine.Builder.HTTP_CACHE_DISK) {
cacheTypeString = CronetTestFramework.CACHE_DISK;
} else if (cacheType == CronetEngine.Builder.HTTP_CACHE_DISK_NO_HTTP) {
cacheTypeString = CronetTestFramework.CACHE_DISK_NO_HTTP;
} else if (cacheType == CronetEngine.Builder.HTTP_CACHE_IN_MEMORY) {
cacheTypeString = CronetTestFramework.CACHE_IN_MEMORY;
}
String[] commandLineArgs = {CronetTestFramework.CACHE_KEY, cacheTypeString};
mTestFramework = startCronetTestFrameworkWithUrlAndCommandLineArgs(null, commandLineArgs);
assertTrue(NativeTestServer.startNativeTestServer(getContext()));
}
private void checkRequestCaching(String url, boolean expectCached) {
checkRequestCaching(url, expectCached, false);
}
private void checkRequestCaching(String url, boolean expectCached,
boolean disableCache) {
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
url, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
if (disableCache) {
urlRequestBuilder.disableCache();
}
urlRequestBuilder.build().start();
callback.blockForDone();
assertEquals(expectCached, callback.mResponseInfo.wasCached());
}
@SmallTest
@Feature({"Cronet"})
@OnlyRunNativeCronet
public void testEnableHttpCacheDisabled() throws Exception {
enableCache(CronetEngine.Builder.HTTP_CACHE_DISABLED);
String url = NativeTestServer.getFileURL("/cacheable.txt");
checkRequestCaching(url, false);
checkRequestCaching(url, false);
checkRequestCaching(url, false);
}
@SmallTest
@Feature({"Cronet"})
public void testEnableHttpCacheInMemory() throws Exception {
enableCache(CronetEngine.Builder.HTTP_CACHE_IN_MEMORY);
String url = NativeTestServer.getFileURL("/cacheable.txt");
checkRequestCaching(url, false);
checkRequestCaching(url, true);
NativeTestServer.shutdownNativeTestServer();
checkRequestCaching(url, true);
}
@SmallTest
@Feature({"Cronet"})
public void testEnableHttpCacheDisk() throws Exception {
enableCache(CronetEngine.Builder.HTTP_CACHE_DISK);
String url = NativeTestServer.getFileURL("/cacheable.txt");
checkRequestCaching(url, false);
checkRequestCaching(url, true);
NativeTestServer.shutdownNativeTestServer();
checkRequestCaching(url, true);
}
@SmallTest
@Feature({"Cronet"})
@OnlyRunNativeCronet
public void testEnableHttpCacheDiskNoHttp() throws Exception {
enableCache(CronetEngine.Builder.HTTP_CACHE_DISABLED);
String url = NativeTestServer.getFileURL("/cacheable.txt");
checkRequestCaching(url, false);
checkRequestCaching(url, false);
checkRequestCaching(url, false);
}
@SmallTest
@Feature({"Cronet"})
public void testDisableCache() throws Exception {
enableCache(CronetEngine.Builder.HTTP_CACHE_DISK);
String url = NativeTestServer.getFileURL("/cacheable.txt");
// When cache is disabled, making a request does not write to the cache.
checkRequestCaching(url, false, true /** disable cache */);
checkRequestCaching(url, false);
// When cache is enabled, the second request is cached.
checkRequestCaching(url, false, true /** disable cache */);
checkRequestCaching(url, true);
// Shut down the server, next request should have a cached response.
NativeTestServer.shutdownNativeTestServer();
checkRequestCaching(url, true);
// Cache is disabled after server is shut down, request should fail.
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(
url, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
urlRequestBuilder.disableCache();
urlRequestBuilder.build().start();
callback.blockForDone();
assertNotNull(callback.mError);
assertEquals("Exception in CronetUrlRequest: net::ERR_CONNECTION_REFUSED",
callback.mError.getMessage());
}
@SmallTest
@Feature({"Cronet"})
public void testEnableHttpCacheDiskNewEngine() throws Exception {
enableCache(CronetEngine.Builder.HTTP_CACHE_DISK);
String url = NativeTestServer.getFileURL("/cacheable.txt");
checkRequestCaching(url, false);
checkRequestCaching(url, true);
NativeTestServer.shutdownNativeTestServer();
checkRequestCaching(url, true);
// Shutdown original context and create another that uses the same cache.
mTestFramework.mCronetEngine.shutdown();
mTestFramework.mCronetEngine = mTestFramework.getCronetEngineBuilder().build();
checkRequestCaching(url, true);
}
@SmallTest
@Feature({"Cronet"})
public void testInitEngineAndStartRequest() {
CronetTestFramework testFramework = startCronetTestFrameworkAndSkipLibraryInit();
// Immediately make a request after initializing the engine.
CronetEngine cronetEngine = testFramework.initCronetEngine();
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder =
new UrlRequest.Builder(TEST_URL, callback, callback.getExecutor(), cronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
assertEquals(200, callback.mResponseInfo.getHttpStatusCode());
}
@SmallTest
@Feature({"Cronet"})
public void testInitEngineStartTwoRequests() throws Exception {
CronetTestFramework testFramework = startCronetTestFrameworkAndSkipLibraryInit();
// Make two requests after initializing the context.
CronetEngine cronetEngine = testFramework.initCronetEngine();
int[] statusCodes = {0, 0};
String[] urls = {TEST_URL, URL_404};
for (int i = 0; i < 2; i++) {
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder urlRequestBuilder =
new UrlRequest.Builder(urls[i], callback, callback.getExecutor(), cronetEngine);
urlRequestBuilder.build().start();
callback.blockForDone();
statusCodes[i] = callback.mResponseInfo.getHttpStatusCode();
}
assertEquals(200, statusCodes[0]);
assertEquals(404, statusCodes[1]);
}
@SmallTest
@Feature({"Cronet"})
public void testInitTwoEnginesSimultaneously() throws Exception {
final CronetTestFramework testFramework = startCronetTestFrameworkAndSkipLibraryInit();
// Threads will block on runBlocker to ensure simultaneous execution.
ConditionVariable runBlocker = new ConditionVariable(false);
RequestThread thread1 = new RequestThread(testFramework, TEST_URL, runBlocker);
RequestThread thread2 = new RequestThread(testFramework, URL_404, runBlocker);
thread1.start();
thread2.start();
runBlocker.open();
thread1.join();
thread2.join();
assertEquals(200, thread1.mCallback.mResponseInfo.getHttpStatusCode());
assertEquals(404, thread2.mCallback.mResponseInfo.getHttpStatusCode());
}
@SmallTest
@Feature({"Cronet"})
public void testInitTwoEnginesInSequence() throws Exception {
final CronetTestFramework testFramework = startCronetTestFrameworkAndSkipLibraryInit();
ConditionVariable runBlocker = new ConditionVariable(true);
RequestThread thread1 = new RequestThread(testFramework, TEST_URL, runBlocker);
RequestThread thread2 = new RequestThread(testFramework, URL_404, runBlocker);
thread1.start();
thread1.join();
thread2.start();
thread2.join();
assertEquals(200, thread1.mCallback.mResponseInfo.getHttpStatusCode());
assertEquals(404, thread2.mCallback.mResponseInfo.getHttpStatusCode());
}
@SmallTest
@Feature({"Cronet"})
public void testInitDifferentEngines() throws Exception {
// Test that concurrently instantiating Cronet context's upon various
// different versions of the same Android Context does not cause crashes
// like crbug.com/453845
mTestFramework = startCronetTestFramework();
CronetEngine firstEngine =
new CronetUrlRequestContext(mTestFramework.createCronetEngineBuilder(getContext()));
CronetEngine secondEngine = new CronetUrlRequestContext(
mTestFramework.createCronetEngineBuilder(getContext().getApplicationContext()));
CronetEngine thirdEngine = new CronetUrlRequestContext(
mTestFramework.createCronetEngineBuilder(new ContextWrapper(getContext())));
firstEngine.shutdown();
secondEngine.shutdown();
thirdEngine.shutdown();
}
@SmallTest
@Feature({"Cronet"})
public void testGetGlobalMetricsDeltas() throws Exception {
mTestFramework = startCronetTestFramework();
byte delta1[] = mTestFramework.mCronetEngine.getGlobalMetricsDeltas();
TestUrlRequestCallback callback = new TestUrlRequestCallback();
UrlRequest.Builder builder = new UrlRequest.Builder(
TEST_URL, callback, callback.getExecutor(), mTestFramework.mCronetEngine);
builder.build().start();
callback.blockForDone();
byte delta2[] = mTestFramework.mCronetEngine.getGlobalMetricsDeltas();
assertTrue(delta2.length != 0);
assertFalse(Arrays.equals(delta1, delta2));
}
@SmallTest
@Feature({"Cronet"})
public void testCronetEngineBuilderConfig() throws Exception {
// This is to prompt load of native library.
startCronetTestFramework();
// Verify CronetEngine.Builder config is passed down accurately to native code.
CronetEngine.Builder builder = new CronetEngine.Builder(getContext());
builder.enableHTTP2(false);
builder.enableQUIC(true);
builder.enableSDCH(true);
builder.addQuicHint("example.com", 12, 34);
builder.enableHttpCache(CronetEngine.Builder.HTTP_CACHE_IN_MEMORY, 54321);
builder.enableDataReductionProxy("abcd");
builder.setUserAgent("efgh");
builder.setExperimentalOptions("ijkl");
builder.setDataReductionProxyOptions("mnop", "qrst", "uvwx");
builder.setStoragePath(CronetTestFramework.getTestStorage(getContext()));
nativeVerifyUrlRequestContextConfig(
CronetUrlRequestContext.createNativeUrlRequestContextConfig(builder),
CronetTestFramework.getTestStorage(getContext()));
}
// Verifies that CronetEngine.Builder config from testCronetEngineBuilderConfig() is properly
// translated to a native UrlRequestContextConfig.
private static native void nativeVerifyUrlRequestContextConfig(long config, String storagePath);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.atomspace.camel.component.tinkerforge.device;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.atomspace.camel.component.tinkerforge.TinkerforgeConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.tinkerforge.BrickMaster;
import com.tinkerforge.BrickMaster.StackCurrentListener;
import com.tinkerforge.BrickMaster.StackVoltageListener;
import com.tinkerforge.BrickMaster.USBVoltageListener;
import com.tinkerforge.BrickMaster.StackCurrentReachedListener;
import com.tinkerforge.BrickMaster.StackVoltageReachedListener;
import com.tinkerforge.BrickMaster.USBVoltageReachedListener;;
public class MasterConsumer extends TinkerforgeConsumer<MasterEndpoint, BrickMaster> implements StackCurrentListener, StackVoltageListener, USBVoltageListener, StackCurrentReachedListener, StackVoltageReachedListener, USBVoltageReachedListener {
private static final Logger LOG = LoggerFactory.getLogger(MasterConsumer.class);
public MasterConsumer(MasterEndpoint endpoint, Processor processor) throws Exception {
super(endpoint, processor);
device = new BrickMaster(endpoint.getUid(),endpoint.getSharedConnection().getConnection());
endpoint.init(device);
if(endpoint.getCallback()==null || endpoint.getCallback().equals("")){
device.addStackCurrentListener(this);
device.addStackVoltageListener(this);
device.addUSBVoltageListener(this);
device.addStackCurrentReachedListener(this);
device.addStackVoltageReachedListener(this);
device.addUSBVoltageReachedListener(this);
}else{
String[] callbacks = endpoint.getCallback().split(",");
for (String callback : callbacks) {
if(callback.equals("StackCurrentListener")) device.addStackCurrentListener(this);
if(callback.equals("StackVoltageListener")) device.addStackVoltageListener(this);
if(callback.equals("USBVoltageListener")) device.addUSBVoltageListener(this);
if(callback.equals("StackCurrentReachedListener")) device.addStackCurrentReachedListener(this);
if(callback.equals("StackVoltageReachedListener")) device.addStackVoltageReachedListener(this);
if(callback.equals("USBVoltageReachedListener")) device.addUSBVoltageReachedListener(this);
}
}
}
@Override
public void stackCurrent(int current) {
LOG.trace("stackCurrent()");
Exchange exchange = null;
try {
exchange = createExchange();
// ADD HEADER
exchange.getIn().setHeader("fireBy", BrickMaster.CALLBACK_STACK_CURRENT);
exchange.getIn().setHeader("current", current);
// ADD BODY
exchange.getIn().setBody("stack_current");;
getProcessor().process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException("Error processing exchange", exchange, e);
} finally {
if (exchange != null && exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
}
@Override
public void stackVoltage(int voltage) {
LOG.trace("stackVoltage()");
Exchange exchange = null;
try {
exchange = createExchange();
// ADD HEADER
exchange.getIn().setHeader("fireBy", BrickMaster.CALLBACK_STACK_VOLTAGE);
exchange.getIn().setHeader("voltage", voltage);
// ADD BODY
exchange.getIn().setBody("stack_voltage");;
getProcessor().process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException("Error processing exchange", exchange, e);
} finally {
if (exchange != null && exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
}
@Override
public void usbVoltage(int voltage) {
LOG.trace("usbVoltage()");
Exchange exchange = null;
try {
exchange = createExchange();
// ADD HEADER
exchange.getIn().setHeader("fireBy", BrickMaster.CALLBACK_USB_VOLTAGE);
exchange.getIn().setHeader("voltage", voltage);
// ADD BODY
exchange.getIn().setBody("usb_voltage");;
getProcessor().process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException("Error processing exchange", exchange, e);
} finally {
if (exchange != null && exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
}
@Override
public void stackCurrentReached(int current) {
LOG.trace("stackCurrentReached()");
Exchange exchange = null;
try {
exchange = createExchange();
// ADD HEADER
exchange.getIn().setHeader("fireBy", BrickMaster.CALLBACK_STACK_CURRENT_REACHED);
exchange.getIn().setHeader("current", current);
// ADD BODY
exchange.getIn().setBody("stack_current_reached");;
getProcessor().process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException("Error processing exchange", exchange, e);
} finally {
if (exchange != null && exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
}
@Override
public void stackVoltageReached(int voltage) {
LOG.trace("stackVoltageReached()");
Exchange exchange = null;
try {
exchange = createExchange();
// ADD HEADER
exchange.getIn().setHeader("fireBy", BrickMaster.CALLBACK_STACK_VOLTAGE_REACHED);
exchange.getIn().setHeader("voltage", voltage);
// ADD BODY
exchange.getIn().setBody("stack_voltage_reached");;
getProcessor().process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException("Error processing exchange", exchange, e);
} finally {
if (exchange != null && exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
}
@Override
public void usbVoltageReached(int voltage) {
LOG.trace("usbVoltageReached()");
Exchange exchange = null;
try {
exchange = createExchange();
// ADD HEADER
exchange.getIn().setHeader("fireBy", BrickMaster.CALLBACK_USB_VOLTAGE_REACHED);
exchange.getIn().setHeader("voltage", voltage);
// ADD BODY
exchange.getIn().setBody("usb_voltage_reached");;
getProcessor().process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException("Error processing exchange", exchange, e);
} finally {
if (exchange != null && exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.shipment.shipment;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.UtilMisc;
import org.apache.ofbiz.base.util.UtilValidate;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.GenericEntityException;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.entity.util.EntityUtilProperties;
import org.apache.ofbiz.service.DispatchContext;
import org.apache.ofbiz.service.GenericServiceException;
import org.apache.ofbiz.service.LocalDispatcher;
import org.apache.ofbiz.service.ModelService;
/**
* ShipmentWorker - Worker methods for Shipment and related entities
*/
public final class ShipmentWorker {
public static final String module = ShipmentWorker.class.getName();
private static final MathContext generalRounding = new MathContext(10);
private ShipmentWorker() {}
/*
* Returns the value of a given ShipmentPackageContent record. Calculated by working out the total value (from the OrderItems) of all ItemIssuances
* for the ShipmentItem then dividing that by the total quantity issued for the same to get an average item value then multiplying that by the package
* content quantity.
* Note: No rounding of the calculation is performed so you will need to round it to the accuracy that you require
*/
public static BigDecimal getShipmentPackageContentValue(GenericValue shipmentPackageContent) {
BigDecimal quantity = shipmentPackageContent.getBigDecimal("quantity");
BigDecimal value;
// lookup the issuance to find the order
List<GenericValue> issuances = null;
try {
GenericValue shipmentItem = shipmentPackageContent.getRelatedOne("ShipmentItem", false);
issuances = shipmentItem.getRelated("ItemIssuance", null, null, false);
} catch (GenericEntityException e) {
Debug.logError(e, module);
}
BigDecimal totalIssued = BigDecimal.ZERO;
BigDecimal totalValue = BigDecimal.ZERO;
if (UtilValidate.isNotEmpty(issuances)) {
for (GenericValue issuance : issuances) {
// we only need one
BigDecimal issuanceQuantity = issuance.getBigDecimal("quantity");
BigDecimal issuanceCancelQuantity = issuance.getBigDecimal("cancelQuantity");
if (issuanceCancelQuantity != null) {
issuanceQuantity = issuanceQuantity.subtract(issuanceCancelQuantity);
}
// get the order item
GenericValue orderItem = null;
try {
orderItem = issuance.getRelatedOne("OrderItem", false);
} catch (GenericEntityException e) {
Debug.logError(e, module);
}
if (orderItem != null) {
// get the value per unit - (base price * amount)
BigDecimal selectedAmount = orderItem.getBigDecimal("selectedAmount");
if (selectedAmount == null || selectedAmount.compareTo(BigDecimal.ZERO) <= 0) {
selectedAmount = BigDecimal.ONE;
}
BigDecimal unitPrice = orderItem.getBigDecimal("unitPrice");
BigDecimal itemValue = unitPrice.multiply(selectedAmount);
// total value for package (per unit * quantity)
totalIssued = totalIssued.add(issuanceQuantity);
totalValue = totalValue.add(itemValue.multiply(issuanceQuantity));
}
}
}
// take the average value of the issuances and multiply it by the shipment package content quantity
value = totalValue.divide(totalIssued, 10, RoundingMode.HALF_EVEN).multiply(quantity);
return value;
}
public static List<Map<String, BigDecimal>> getPackageSplit(DispatchContext dctx, List<Map<String, Object>> shippableItemInfo, BigDecimal maxWeight) {
// create the package list w/ the first package
List<Map<String, BigDecimal>> packages = new LinkedList<Map<String,BigDecimal>>();
if (UtilValidate.isNotEmpty(shippableItemInfo)) {
for (Map<String, Object> itemInfo: shippableItemInfo) {
long pieces = ((Long) itemInfo.get("piecesIncluded")).longValue();
BigDecimal totalQuantity = (BigDecimal) itemInfo.get("quantity");
BigDecimal totalWeight = (BigDecimal) itemInfo.get("weight");
String productId = (String) itemInfo.get("productId");
// sanity check
if (pieces < 1) {
pieces = 1; // can NEVER be less than one
}
BigDecimal weight = totalWeight.divide(BigDecimal.valueOf(pieces), generalRounding);
for (int z = 1; z <= totalQuantity.intValue(); z++) {
BigDecimal partialQty = pieces > 1 ? BigDecimal.ONE.divide(BigDecimal.valueOf(pieces), generalRounding) : BigDecimal.ONE;
for (long x = 0; x < pieces; x++) {
if (weight.compareTo(maxWeight) >= 0) {
Map<String, BigDecimal> newPackage = new HashMap<String, BigDecimal>();
newPackage.put(productId, partialQty);
packages.add(newPackage);
} else if (totalWeight.compareTo(BigDecimal.ZERO) > 0) {
// create the first package
if (packages.size() == 0) {
packages.add(new HashMap<String, BigDecimal>());
}
// package loop
boolean addedToPackage = false;
for (Map<String, BigDecimal> packageMap: packages) {
if (!addedToPackage) {
BigDecimal packageWeight = calcPackageWeight(dctx, packageMap, shippableItemInfo, weight);
if (packageWeight.compareTo(maxWeight) <= 0) {
BigDecimal qty = packageMap.get(productId);
qty = UtilValidate.isEmpty(qty) ? BigDecimal.ZERO : qty;
packageMap.put(productId, qty.add(partialQty));
addedToPackage = true;
}
}
}
if (!addedToPackage) {
Map<String, BigDecimal> packageMap = new HashMap<String, BigDecimal>();
packageMap.put(productId, partialQty);
packages.add(packageMap);
}
}
}
}
}
}
return packages;
}
public static BigDecimal calcPackageWeight(DispatchContext dctx, Map<String, BigDecimal> packageMap, List<Map<String, Object>> shippableItemInfo, BigDecimal additionalWeight) {
LocalDispatcher dispatcher = dctx.getDispatcher();
Delegator delegator = dctx.getDelegator();
BigDecimal totalWeight = BigDecimal.ZERO;
String defaultWeightUomId = EntityUtilProperties.getPropertyValue("shipment", "shipment.default.weight.uom", delegator);
for (Map.Entry<String, BigDecimal> entry: packageMap.entrySet()) {
String productId = entry.getKey();
Map<String, Object> productInfo = getProductItemInfo(shippableItemInfo, productId);
BigDecimal productWeight = (BigDecimal) productInfo.get("productWeight");
BigDecimal quantity = packageMap.get(productId);
String weightUomId = (String) productInfo.get("weightUomId");
Debug.logInfo("Product Id : " + productId + " Product Weight : " + String.valueOf(productWeight) + " Product UomId : " + weightUomId + " assuming " + defaultWeightUomId + " if null. Quantity : " + String.valueOf(quantity), module);
if (UtilValidate.isEmpty(weightUomId)) {
weightUomId = defaultWeightUomId;
}
if (!"WT_lb".equals(weightUomId)) {
// attempt a conversion to pounds
Map<String, Object> result = new HashMap<String, Object>();
try {
result = dispatcher.runSync("convertUom", UtilMisc.<String, Object>toMap("uomId", weightUomId, "uomIdTo", "WT_lb", "originalValue", productWeight));
} catch (GenericServiceException ex) {
Debug.logError(ex, module);
}
if (result.get(ModelService.RESPONSE_MESSAGE).equals(ModelService.RESPOND_SUCCESS) && UtilValidate.isNotEmpty(result.get("convertedValue"))) {
productWeight = (BigDecimal) result.get("convertedValue");
} else {
Debug.logError("Unsupported weightUom [" + weightUomId + "] for calcPackageWeight running productId " + productId + ", could not find a conversion factor to WT_lb",module);
}
}
totalWeight = totalWeight.add(productWeight.multiply(quantity));
}
Debug.logInfo("Package Weight : " + String.valueOf(totalWeight) + " lbs.", module);
return totalWeight.add(additionalWeight);
}
public static Map<String, Object> getProductItemInfo(List<Map<String, Object>> shippableItemInfo, String productId) {
if (UtilValidate.isNotEmpty(shippableItemInfo)) {
for (Map<String, Object> itemInfoMap: shippableItemInfo) {
String compareProductId = (String) itemInfoMap.get("productId");
if (productId.equals(compareProductId)) {
return itemInfoMap;
}
}
}
return null;
}
}
| |
package cn.orgid.message.client;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.ResponseHandler;
import cn.orgid.common.http.client.HttpClientComponent;
import cn.orgid.common.http.client.HttpClientComponent.RequestParameters;
import cn.orgid.common.http.client.JsonResponseHandler;
public class WebSocketConf {
public final static String WS_APP_ID="appId";
public final static String WS_APP_SECRET="appSecret";
public static final String WS_CLIENT_NAME = "name";
public static final String WS_CLIENT_TAG = "tag";
public static final String WS_APP_TOKEN = "appToken";
public static final String WS_CLIENT_ID = "clientId";
public static final String WS_TALK_SESSION_ID = "sessionId";
private String wsHttpServiceBaseUrl;
private String wsCreateClientUri;
private String wsClientTokenUrl;
private String wsCreateTalkSessionUri="/service/create_p2p_talk_session.htm";
private String wsAddPeerConnectionToTalkSessionUri="/service/add_connection_to_talk_session.htm";
private String wsHost;
private String wsUrl;
private MsgServerList msgServerList;
private Random r=new Random();
private String wsAppId;
private String wsSecret;
private String uploadFileUri="/service/upload_file.htm";
private String serverListUrl="/service/get_server_list.htm";
private int checkPongTimeInterval = 1000;
private int pingTimeInterval = 1000;
private int pongTimeOut = 15*1000;
private boolean debug;
private String tempFilePath="./tmp";
public String getTempFilePath() {
return tempFilePath;
}
public void setTempFilePath(String tempFilePath) {
this.tempFilePath = tempFilePath;
}
public String getWsHost() {
return wsHost;
}
public void setWsHost(String wsHost) {
this.wsHost = wsHost;
}
public String getWsHttpServiceBaseUrl() {
return wsHttpServiceBaseUrl;
}
public void setWsHttpServiceBaseUrl(String wsHttpServiceBaseUrl) {
this.wsHttpServiceBaseUrl = wsHttpServiceBaseUrl;
}
public String getWsCreateClientUri() {
return wsCreateClientUri;
}
public void setWsCreateClientUri(String wsCreateClientUri) {
this.wsCreateClientUri = wsCreateClientUri;
}
public String getWsClientTokenUrl() {
return wsClientTokenUrl;
}
public void setWsClientTokenUrl(String wsClientTokenUrl) {
this.wsClientTokenUrl = wsClientTokenUrl;
}
public String getWsCreateTalkSessionUri() {
return wsCreateTalkSessionUri;
}
public void setWsCreateTalkSessionUri(String wsCreateTalkSessionUri) {
this.wsCreateTalkSessionUri = wsCreateTalkSessionUri;
}
public String getWsAddPeerConnectionToTalkSessionUri() {
return wsAddPeerConnectionToTalkSessionUri;
}
public void setWsAddPeerConnectionToTalkSessionUri(String wsAddPeerConnectionToTalkSessionUri) {
this.wsAddPeerConnectionToTalkSessionUri = wsAddPeerConnectionToTalkSessionUri;
}
public String getWsUrl() {
if(StringUtils.isNotBlank(serverListUrl)){
String url=getWsHttpServiceBaseUrl()+serverListUrl;
RequestParameters parameters = new RequestParameters();
MsgServerList list =HttpClientComponent.executePost(parameters , url, JsonResponseHandler.createResponseHandler(MsgServerList.class));
this.msgServerList=list;
}
if(msgServerList!=null){
MsgServer s=msgServerList.randomServer();
if(s!=null){
return s.serverUrl;
}
}
return wsUrl;
}
public void setWsUrl(String wsUrl) {
this.wsUrl = wsUrl;
}
public String getWsAppId() {
return wsAppId;
}
public void setWsAppId(String wsAppId) {
this.wsAppId = wsAppId;
}
public String getWsSecret() {
return wsSecret;
}
public void setWsSecret(String wsSecret) {
this.wsSecret = wsSecret;
}
public int getCheckPongTimeInterval() {
return checkPongTimeInterval;
}
public void setCheckPongTimeInterval(int checkPongTimeInterval) {
this.checkPongTimeInterval = checkPongTimeInterval;
}
public int getPingTimeInterval() {
return pingTimeInterval;
}
public String getUploadFileUri() {
return uploadFileUri;
}
public void setUploadFileUri(String uploadFileUri) {
this.uploadFileUri = uploadFileUri;
}
public void setPingTimeInterval(int pingTimeInterval) {
this.pingTimeInterval = pingTimeInterval;
}
public int getPongTimeOut() {
return pongTimeOut;
}
public void setPongTimeOut(int pongTimeOut) {
this.pongTimeOut = pongTimeOut;
}
public boolean isDebug() {
return debug;
}
public void setDebug(boolean debug) {
this.debug = debug;
}
public String getServerListUrl() {
return serverListUrl;
}
public void setServerListUrl(String serverListUrl) {
this.serverListUrl = serverListUrl;
}
public void init(){
}
public static class MsgServerList{
private List<MsgServer> servers;
private final static Random r = new Random();
public List<MsgServer> getServers() {
return servers;
}
public void setValue(List<MsgServer> servers) {
this.servers = servers;
}
public MsgServer randomServer(){
if(servers!=null&&servers.size()>0){
int i =r.nextInt(servers.size());
return servers.get(i);
}
return null;
}
}
public static class MsgServer{
private String serverId;
private String serverUrl;
public String getServerId() {
return serverId;
}
public void setServerId(String serverId) {
this.serverId = serverId;
}
public String getServerUrl() {
return serverUrl;
}
public void setServerUrl(String serverUrl) {
this.serverUrl = serverUrl;
}
}
}
| |
package com.pandanomic.flashnotifier;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockListFragment;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuInflater;
import com.actionbarsherlock.view.MenuItem;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ListFragmentAccessTab extends SherlockListFragment {
private final String PERMISSION = "android.permission.ACCESS_FLASHNOTIFIER";
private SharedPreferences mWhitelistPrefs;
private SharedPreferences mBlacklistPrefs;
private AppsArrayAdapter mNewAdapter;
private static List<Map<String, String>> mAppListItems = new ArrayList<Map<String, String>>();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mWhitelistPrefs = getActivity().getSharedPreferences(
"com.leepapesweers.flashnotifier.whitelistprefs", Context.MODE_PRIVATE);
mBlacklistPrefs = getActivity().getSharedPreferences(
"com.leepapesweers.flashnotifier.blacklistprefs", Context.MODE_PRIVATE);
if (savedInstanceState == null) {
mNewAdapter = new AppsArrayAdapter(getActivity(), R.layout.api_access_listitem, mAppListItems);
updateACList();
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_access_tab, container, false);
setRetainInstance(true);
setListAdapter(mNewAdapter);
return view;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
final MenuItem refresh = menu.getItem(R.id.refresh);
refresh.setVisible(true);
refresh.setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
refresh.setIcon(R.layout.progressbar);
updateACList();
refresh.setIcon(R.drawable.ic_action_reload);
return true;
}
});
super.onCreateOptionsMenu(menu, inflater);
}
/**
* Method for sniffing apps that use a certain permission.
* Based on here: http://stackoverflow.com/a/13028631/3034339
*/
private void updateACList() {
mAppListItems.clear();
mNewAdapter.notifyDataSetChanged();
PackageManager packageManager = getActivity().getPackageManager();
List<PackageInfo> applist = packageManager.getInstalledPackages(0);
for (PackageInfo pk : applist) {
if (PackageManager.PERMISSION_GRANTED == packageManager.checkPermission(PERMISSION, pk.packageName)) {
HashMap<String, String> data = new HashMap<String, String>();
data.put("package_name", pk.packageName);
data.put("app_name", (String) pk.applicationInfo.loadLabel(packageManager));
mNewAdapter.add(data);
}
}
// Sort the apps
Collections.sort(mAppListItems, new Comparator<Map<String, String>>() {
@Override
public int compare(Map<String, String> lhs, Map<String, String> rhs) {
return lhs.get("app_name").compareTo(rhs.get("app_name"));
}
});
mNewAdapter.notifyDataSetChanged();
}
/**
* Custom ArrayAdapter class used for maintaining the list of apps
*/
public class AppsArrayAdapter extends ArrayAdapter<Map<String, String>> {
private Context mContext;
public AppsArrayAdapter(Context context, int resourceId, List<Map<String, String>> objects) {
super(context, resourceId, objects);
mContext = context;
}
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(getContext()).inflate(R.layout.api_access_listitem, null, false);
}
// Initialize local vars for view control
final Map<String, String> data = getItem(position);
final String appNameString = (data.get("app_name") != null) ? data.get("app_name") : "(unknown)";
String packageString = data.get("package_name");
// Set the image
ImageView appImage = (ImageView) convertView.findViewById(R.id.ic_appicon);
try {
appImage.setImageDrawable(mContext.getPackageManager().getApplicationIcon(packageString));
} catch (PackageManager.NameNotFoundException e) {
appImage.setImageDrawable(getResources().getDrawable(R.drawable.ic_generic));
}
// Set the app name text
final TextView appNameTextView = (TextView) convertView.findViewById(R.id.tv_appname);
appNameTextView.setText(appNameString);
// Set the name color based on user's AC pref
if (mWhitelistPrefs.contains(appNameString)) {
appNameTextView.setTextColor(getResources().getColor(android.R.color.holo_green_dark));
}
else if (mBlacklistPrefs.contains(appNameString)) {
appNameTextView.setTextColor(getResources().getColor(android.R.color.holo_red_light));
}
else {
appNameTextView.setTextColor(getResources().getColor(android.R.color.black));
}
// Set onClickListener for when user presses the "info" button
ImageView infoImg = (ImageView) convertView.findViewById(R.id.ic_info);
infoImg.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Make a dialog
new AlertDialog.Builder(mContext)
.setTitle(appNameTextView.getText() + " access")
.setPositiveButton("Allow", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
mWhitelistPrefs.edit().putBoolean(appNameString, true).commit();
if (mBlacklistPrefs.contains(appNameString))
mBlacklistPrefs.edit().remove(appNameString).commit();
appNameTextView.setTextColor(getResources().getColor(android.R.color.holo_green_dark));
}
})
.setNegativeButton("Deny", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
mBlacklistPrefs.edit().putBoolean(appNameString, true).commit();
if (mWhitelistPrefs.contains(appNameString))
mWhitelistPrefs.edit().remove(appNameString).commit();
appNameTextView.setTextColor(getResources().getColor(android.R.color.holo_red_light));
}
})
.setNeutralButton("Clear Setting", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (mWhitelistPrefs.contains(appNameString))
mWhitelistPrefs.edit().remove(appNameString).commit();
if (mBlacklistPrefs.contains(appNameString))
mBlacklistPrefs.edit().remove(appNameString).commit();
appNameTextView.setTextColor(getResources().getColor(android.R.color.black));
}
})
.show();
}
});
return convertView;
}
}
}
| |
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
/** A Query that matches documents matching boolean combinations of other
* queries, e.g. {@link TermQuery}s, {@link PhraseQuery}s or other
* BooleanQuerys.
*/
public class BooleanQuery extends Query implements Iterable<BooleanClause> {
private static int maxClauseCount = 1024;
/** Thrown when an attempt is made to add more than {@link
* #getMaxClauseCount()} clauses. This typically happens if
* a PrefixQuery, FuzzyQuery, WildcardQuery, or TermRangeQuery
* is expanded to many terms during search.
*/
public static class TooManyClauses extends RuntimeException {
public TooManyClauses() {
super("maxClauseCount is set to " + maxClauseCount);
}
}
/** Return the maximum number of clauses permitted, 1024 by default.
* Attempts to add more than the permitted number of clauses cause {@link
* TooManyClauses} to be thrown.
* @see #setMaxClauseCount(int)
*/
public static int getMaxClauseCount() { return maxClauseCount; }
/**
* Set the maximum number of clauses permitted per BooleanQuery.
* Default value is 1024.
*/
public static void setMaxClauseCount(int maxClauseCount) {
if (maxClauseCount < 1)
throw new IllegalArgumentException("maxClauseCount must be >= 1");
BooleanQuery.maxClauseCount = maxClauseCount;
}
private ArrayList<BooleanClause> clauses = new ArrayList<BooleanClause>();
private final boolean disableCoord;
/** Constructs an empty boolean query. */
public BooleanQuery() {
disableCoord = false;
}
/** Constructs an empty boolean query.
*
* {@link Similarity#coord(int,int)} may be disabled in scoring, as
* appropriate. For example, this score factor does not make sense for most
* automatically generated queries, like {@link WildcardQuery} and {@link
* FuzzyQuery}.
*
* @param disableCoord disables {@link Similarity#coord(int,int)} in scoring.
*/
public BooleanQuery(boolean disableCoord) {
this.disableCoord = disableCoord;
}
/** Returns true iff {@link Similarity#coord(int,int)} is disabled in
* scoring for this query instance.
* @see #BooleanQuery(boolean)
*/
public boolean isCoordDisabled() { return disableCoord; }
/**
* Specifies a minimum number of the optional BooleanClauses
* which must be satisfied.
*
* <p>
* By default no optional clauses are necessary for a match
* (unless there are no required clauses). If this method is used,
* then the specified number of clauses is required.
* </p>
* <p>
* Use of this method is totally independent of specifying that
* any specific clauses are required (or prohibited). This number will
* only be compared against the number of matching optional clauses.
* </p>
*
* @param min the number of optional clauses that must match
*/
public void setMinimumNumberShouldMatch(int min) {
this.minNrShouldMatch = min;
}
protected int minNrShouldMatch = 0;
/**
* Gets the minimum number of the optional BooleanClauses
* which must be satisfied.
*/
public int getMinimumNumberShouldMatch() {
return minNrShouldMatch;
}
/** Adds a clause to a boolean query.
*
* @throws TooManyClauses if the new number of clauses exceeds the maximum clause number
* @see #getMaxClauseCount()
*/
public void add(Query query, BooleanClause.Occur occur) {
add(new BooleanClause(query, occur));
}
/** Adds a clause to a boolean query.
* @throws TooManyClauses if the new number of clauses exceeds the maximum clause number
* @see #getMaxClauseCount()
*/
public void add(BooleanClause clause) {
if (clauses.size() >= maxClauseCount)
throw new TooManyClauses();
clauses.add(clause);
}
/** Returns the set of clauses in this query. */
public BooleanClause[] getClauses() {
return clauses.toArray(new BooleanClause[clauses.size()]);
}
/** Returns the list of clauses in this query. */
public List<BooleanClause> clauses() { return clauses; }
/** Returns an iterator on the clauses in this query. It implements the {@link Iterable} interface to
* make it possible to do:
* <pre class="prettyprint">for (BooleanClause clause : booleanQuery) {}</pre>
*/
@Override
public final Iterator<BooleanClause> iterator() { return clauses().iterator(); }
/**
* Expert: the Weight for BooleanQuery, used to
* normalize, score and explain these queries.
*
* <p>NOTE: this API and implementation is subject to
* change suddenly in the next release.</p>
*/
protected class BooleanWeight extends Weight {
/** The Similarity implementation. */
protected Similarity similarity;
protected ArrayList<Weight> weights;
protected int maxCoord; // num optional + num required
private final boolean disableCoord;
public BooleanWeight(IndexSearcher searcher, boolean disableCoord)
throws IOException {
this.similarity = searcher.getSimilarity();
this.disableCoord = disableCoord;
weights = new ArrayList<Weight>(clauses.size());
for (int i = 0 ; i < clauses.size(); i++) {
BooleanClause c = clauses.get(i);
Weight w = c.getQuery().createWeight(searcher);
weights.add(w);
if (!c.isProhibited()) maxCoord++;
}
}
@Override
public Query getQuery() { return BooleanQuery.this; }
@Override
public float getValueForNormalization() throws IOException {
float sum = 0.0f;
for (int i = 0 ; i < weights.size(); i++) {
// call sumOfSquaredWeights for all clauses in case of side effects
float s = weights.get(i).getValueForNormalization(); // sum sub weights
if (!clauses.get(i).isProhibited())
// only add to sum for non-prohibited clauses
sum += s;
}
sum *= getBoost() * getBoost(); // boost each sub-weight
return sum ;
}
public float coord(int overlap, int maxOverlap) {
// LUCENE-4300: in most cases of maxOverlap=1, BQ rewrites itself away,
// so coord() is not applied. But when BQ cannot optimize itself away
// for a single clause (minNrShouldMatch, prohibited clauses, etc), its
// important not to apply coord(1,1) for consistency, it might not be 1.0F
return maxOverlap == 1 ? 1F : similarity.coord(overlap, maxOverlap);
}
@Override
public void normalize(float norm, float topLevelBoost) {
topLevelBoost *= getBoost(); // incorporate boost
for (Weight w : weights) {
// normalize all clauses, (even if prohibited in case of side affects)
w.normalize(norm, topLevelBoost);
}
}
@Override
public Explanation explain(AtomicReaderContext context, int doc)
throws IOException {
final int minShouldMatch =
BooleanQuery.this.getMinimumNumberShouldMatch();
ComplexExplanation sumExpl = new ComplexExplanation();
sumExpl.setDescription("sum of:");
int coord = 0;
float sum = 0.0f;
boolean fail = false;
int shouldMatchCount = 0;
Iterator<BooleanClause> cIter = clauses.iterator();
for (Iterator<Weight> wIter = weights.iterator(); wIter.hasNext();) {
Weight w = wIter.next();
BooleanClause c = cIter.next();
if (w.scorer(context, true, true, context.reader().getLiveDocs()) == null) {
if (c.isRequired()) {
fail = true;
Explanation r = new Explanation(0.0f, "no match on required clause (" + c.getQuery().toString() + ")");
sumExpl.addDetail(r);
}
continue;
}
Explanation e = w.explain(context, doc);
if (e.isMatch()) {
if (!c.isProhibited()) {
sumExpl.addDetail(e);
sum += e.getValue();
coord++;
} else {
Explanation r =
new Explanation(0.0f, "match on prohibited clause (" + c.getQuery().toString() + ")");
r.addDetail(e);
sumExpl.addDetail(r);
fail = true;
}
if (c.getOccur() == Occur.SHOULD)
shouldMatchCount++;
} else if (c.isRequired()) {
Explanation r = new Explanation(0.0f, "no match on required clause (" + c.getQuery().toString() + ")");
r.addDetail(e);
sumExpl.addDetail(r);
fail = true;
}
}
if (fail) {
sumExpl.setMatch(Boolean.FALSE);
sumExpl.setValue(0.0f);
sumExpl.setDescription
("Failure to meet condition(s) of required/prohibited clause(s)");
return sumExpl;
} else if (shouldMatchCount < minShouldMatch) {
sumExpl.setMatch(Boolean.FALSE);
sumExpl.setValue(0.0f);
sumExpl.setDescription("Failure to match minimum number "+
"of optional clauses: " + minShouldMatch);
return sumExpl;
}
sumExpl.setMatch(0 < coord ? Boolean.TRUE : Boolean.FALSE);
sumExpl.setValue(sum);
final float coordFactor = disableCoord ? 1.0f : coord(coord, maxCoord);
if (coordFactor == 1.0f) {
return sumExpl; // eliminate wrapper
} else {
ComplexExplanation result = new ComplexExplanation(sumExpl.isMatch(),
sum*coordFactor,
"product of:");
result.addDetail(sumExpl);
result.addDetail(new Explanation(coordFactor,
"coord("+coord+"/"+maxCoord+")"));
return result;
}
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
boolean topScorer, Bits acceptDocs)
throws IOException {
List<Scorer> required = new ArrayList<Scorer>();
List<Scorer> prohibited = new ArrayList<Scorer>();
List<Scorer> optional = new ArrayList<Scorer>();
Iterator<BooleanClause> cIter = clauses.iterator();
for (Weight w : weights) {
BooleanClause c = cIter.next();
Scorer subScorer = w.scorer(context, true, false, acceptDocs);
if (subScorer == null) {
if (c.isRequired()) {
return null;
}
} else if (c.isRequired()) {
required.add(subScorer);
} else if (c.isProhibited()) {
prohibited.add(subScorer);
} else {
optional.add(subScorer);
}
}
// NOTE: we could also use BooleanScorer, if we knew
// this BooleanQuery was embedded in another
// BooleanQuery that was also using BooleanScorer (ie,
// BooleanScorer can nest). But this is hard to
// detect and we never do so today... (ie, we only
// return BooleanScorer for topScorer):
// Check if we can and should return a BooleanScorer
// TODO: (LUCENE-4872) in some cases BooleanScorer may be faster for minNrShouldMatch
// but the same is even true of pure conjunctions...
if (!scoreDocsInOrder && topScorer && required.size() == 0 && minNrShouldMatch <= 1) {
return new BooleanScorer(this, disableCoord, minNrShouldMatch, optional, prohibited, maxCoord);
}
if (required.size() == 0 && optional.size() == 0) {
// no required and optional clauses.
return null;
} else if (optional.size() < minNrShouldMatch) {
// either >1 req scorer, or there are 0 req scorers and at least 1
// optional scorer. Therefore if there are not enough optional scorers
// no documents will be matched by the query
return null;
}
// simple conjunction
if (optional.size() == 0 && prohibited.size() == 0) {
float coord = disableCoord ? 1.0f : coord(required.size(), maxCoord);
return new ConjunctionScorer(this, required.toArray(new Scorer[required.size()]), coord);
}
// simple disjunction
if (required.size() == 0 && prohibited.size() == 0 && minNrShouldMatch <= 1 && optional.size() > 1) {
float coord[] = new float[optional.size()+1];
for (int i = 0; i < coord.length; i++) {
coord[i] = disableCoord ? 1.0f : coord(i, maxCoord);
}
return new DisjunctionSumScorer(this, optional.toArray(new Scorer[optional.size()]), coord);
}
// Return a BooleanScorer2
return new BooleanScorer2(this, disableCoord, minNrShouldMatch, required, prohibited, optional, maxCoord);
}
@Override
public boolean scoresDocsOutOfOrder() {
for (BooleanClause c : clauses) {
if (c.isRequired()) {
return false; // BS2 (in-order) will be used by scorer()
}
}
// scorer() will return an out-of-order scorer if requested.
return true;
}
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BooleanWeight(searcher, disableCoord);
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (minNrShouldMatch == 0 && clauses.size() == 1) { // optimize 1-clause queries
BooleanClause c = clauses.get(0);
if (!c.isProhibited()) { // just return clause
Query query = c.getQuery().rewrite(reader); // rewrite first
if (getBoost() != 1.0f) { // incorporate boost
if (query == c.getQuery()) { // if rewrite was no-op
query = query.clone(); // then clone before boost
}
// Since the BooleanQuery only has 1 clause, the BooleanQuery will be
// written out. Therefore the rewritten Query's boost must incorporate both
// the clause's boost, and the boost of the BooleanQuery itself
query.setBoost(getBoost() * query.getBoost());
}
return query;
}
}
BooleanQuery clone = null; // recursively rewrite
for (int i = 0 ; i < clauses.size(); i++) {
BooleanClause c = clauses.get(i);
Query query = c.getQuery().rewrite(reader);
if (query != c.getQuery()) { // clause rewrote: must clone
if (clone == null) {
// The BooleanQuery clone is lazily initialized so only initialize
// it if a rewritten clause differs from the original clause (and hasn't been
// initialized already). If nothing differs, the clone isn't needlessly created
clone = this.clone();
}
clone.clauses.set(i, new BooleanClause(query, c.getOccur()));
}
}
if (clone != null) {
return clone; // some clauses rewrote
} else
return this; // no clauses rewrote
}
// inherit javadoc
@Override
public void extractTerms(Set<Term> terms) {
for (BooleanClause clause : clauses) {
if (clause.getOccur() != Occur.MUST_NOT) {
clause.getQuery().extractTerms(terms);
}
}
}
@Override @SuppressWarnings("unchecked")
public BooleanQuery clone() {
BooleanQuery clone = (BooleanQuery)super.clone();
clone.clauses = (ArrayList<BooleanClause>) this.clauses.clone();
return clone;
}
/** Prints a user-readable version of this query. */
@Override
public String toString(String field) {
StringBuilder buffer = new StringBuilder();
boolean needParens=(getBoost() != 1.0) || (getMinimumNumberShouldMatch()>0) ;
if (needParens) {
buffer.append("(");
}
for (int i = 0 ; i < clauses.size(); i++) {
BooleanClause c = clauses.get(i);
if (c.isProhibited())
buffer.append("-");
else if (c.isRequired())
buffer.append("+");
Query subQuery = c.getQuery();
if (subQuery != null) {
if (subQuery instanceof BooleanQuery) { // wrap sub-bools in parens
buffer.append("(");
buffer.append(subQuery.toString(field));
buffer.append(")");
} else {
buffer.append(subQuery.toString(field));
}
} else {
buffer.append("null");
}
if (i != clauses.size()-1)
buffer.append(" ");
}
if (needParens) {
buffer.append(")");
}
if (getMinimumNumberShouldMatch()>0) {
buffer.append('~');
buffer.append(getMinimumNumberShouldMatch());
}
if (getBoost() != 1.0f)
{
buffer.append(ToStringUtils.boost(getBoost()));
}
return buffer.toString();
}
/** Returns true iff <code>o</code> is equal to this. */
@Override
public boolean equals(Object o) {
if (!(o instanceof BooleanQuery))
return false;
BooleanQuery other = (BooleanQuery)o;
return (this.getBoost() == other.getBoost())
&& this.clauses.equals(other.clauses)
&& this.getMinimumNumberShouldMatch() == other.getMinimumNumberShouldMatch()
&& this.disableCoord == other.disableCoord;
}
/** Returns a hash code value for this object.*/
@Override
public int hashCode() {
return Float.floatToIntBits(getBoost()) ^ clauses.hashCode()
+ getMinimumNumberShouldMatch() + (disableCoord ? 17:0);
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.server.domain;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.TreeSet;
import com.thoughtworks.go.config.CaseInsensitiveString;
import com.thoughtworks.go.domain.PipelineTimelineEntry;
import com.thoughtworks.go.listener.TimelineUpdateListener;
import com.thoughtworks.go.server.persistence.PipelineRepository;
import com.thoughtworks.go.helper.PipelineMaterialModificationMother;
import com.thoughtworks.go.server.transaction.TransactionSynchronizationManager;
import com.thoughtworks.go.server.transaction.TransactionTemplate;
import org.joda.time.DateTime;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionSynchronization;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.matchers.JUnitMatchers.hasItems;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
public class PipelineTimelineTest {
private DateTime now;
private List<String> materials;
private PipelineTimelineEntry first;
private PipelineTimelineEntry second;
private PipelineTimelineEntry third;
private PipelineTimelineEntry fourth;
private PipelineRepository pipelineRepository;
private String pipelineName;
private TransactionTemplate transactionTemplate;
private TransactionSynchronizationManager transactionSynchronizationManager;
private TransactionSynchronization transactionSynchronization;
private PipelineTimelineEntry[] repositoryEntries;
private int txnStatus;
@Before public void setUp() throws Exception {
now = new DateTime();
pipelineRepository = mock(PipelineRepository.class);
materials = Arrays.asList("first", "second", "third", "fourth");
first = PipelineMaterialModificationMother.modification(1, materials, Arrays.asList(now, now.plusMinutes(1), now.plusMinutes(2), now.plusMinutes(3)), 1, "111", "pipeline");
second = PipelineMaterialModificationMother.modification(2, materials, Arrays.asList(now, now.plusMinutes(2), now.plusMinutes(1), now.plusMinutes(2)), 2, "222", "pipeline");
third = PipelineMaterialModificationMother.modification(3, materials, Arrays.asList(now, now.plusMinutes(2), now.plusMinutes(1), now.plusMinutes(3)), 3, "333", "pipeline");
fourth = PipelineMaterialModificationMother.modification(4, materials, Arrays.asList(now, now.plusMinutes(2), now.plusMinutes(3), now.plusMinutes(2)), 4, "444", "pipeline");
pipelineName = "pipeline";
transactionTemplate = mock(TransactionTemplate.class);
transactionSynchronizationManager = mock(TransactionSynchronizationManager.class);
}
@Test
public void shouldReturnTheNextAndPreviousOfAGivenPipeline() throws Exception {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(first);
mods.add(third);
mods.add(second);
mods.add(fourth);
assertBeforeAfter(mods, first, null, null);
assertBeforeAfter(mods, third, first, null);
assertBeforeAfter(mods, second, first, third);
assertBeforeAfter(mods, fourth, third, null);
}
private void assertBeforeAfter(PipelineTimeline mods, PipelineTimelineEntry actual, PipelineTimelineEntry before, PipelineTimelineEntry after) {
PipelineTimelineEntry actualBefore = mods.runBefore(actual.getId(), new CaseInsensitiveString(pipelineName));
PipelineTimelineEntry actualAfter = mods.runAfter(actual.getId(), new CaseInsensitiveString(pipelineName));
assertEquals("Expected " + before + " to be before " + actual + ". Got " + actualBefore, actualBefore, before);
assertEquals("Expected " + after + " to be after " + actual + ". Got " + actualAfter, actualAfter, after);
}
@Test public void shouldPopulateTheBeforeAndAfterNodesForAGivenPMMDuringAddition() throws Exception {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(first);
mods.add(fourth);
mods.add(third);
mods.add(second);
assertThat(third.insertedBefore(), is(fourth));
assertThat(third.insertedAfter(), is(first));
assertThat(second.insertedBefore(), is(third));
assertThat(second.insertedAfter(), is(first));
}
@Test public void shouldReturnThePipelineBeforeAGivenPipelineId() throws Exception {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(first);
mods.add(fourth);
mods.add(third);
mods.add(second);
assertThat(mods.pipelineBefore(first.getId()), is(-1L));
assertThat(mods.pipelineBefore(second.getId()), is(first.getId()));
assertThat(mods.pipelineBefore(third.getId()), is(second.getId()));
assertThat(mods.pipelineBefore(fourth.getId()), is(third.getId()));
}
@Test public void shouldReturnThePipelineAfterAGivenPipelineId() throws Exception {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(first);
mods.add(fourth);
mods.add(third);
mods.add(second);
assertThat(mods.pipelineAfter(first.getId()), is(second.getId()));
assertThat(mods.pipelineAfter(second.getId()), is(third.getId()));
assertThat(mods.pipelineAfter(third.getId()), is(fourth.getId()));
assertThat(mods.pipelineAfter(fourth.getId()), is(-1L));
}
@Test public void shouldBeAbleToFindThePreviousPipelineForAGivenPipeline() throws Exception {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(first);
mods.add(fourth);
assertThat(mods.naturalOrderBefore(fourth), is(first));
//bisect
mods.add(third);
assertThat(mods.naturalOrderBefore(fourth), is(third));
assertThat(mods.naturalOrderBefore(third), is(first));
//bisect
mods.add(second);
assertThat(mods.naturalOrderBefore(fourth), is(third));
assertThat(mods.naturalOrderBefore(third), is(second));
assertThat(mods.naturalOrderBefore(second), is(first));
}
@Test public void shouldPopuplateTheBeforeAndAfterNodesForAGivenPipelineDuringAddition() throws Exception {
PipelineTimelineEntry anotherPipeline1 = PipelineMaterialModificationMother.modification("another", 4, materials, Arrays.asList(now, now.plusMinutes(1), now.plusMinutes(2), now.plusMinutes(3)), 1, "123");
PipelineTimelineEntry anotherPipeline2 = PipelineMaterialModificationMother.modification("another", 5, materials, Arrays.asList(now, now.plusMinutes(2), now.plusMinutes(1), now.plusMinutes(3)), 2, "123");
PipelineTimelineEntry anotherPipeline3 = PipelineMaterialModificationMother.modification("another", 6, materials, Arrays.asList(now, now.plusMinutes(2), now.plusMinutes(3), now.plusMinutes(2)), 3, "123");
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(first);
mods.add(fourth);
mods.add(anotherPipeline1);
mods.add(third);
mods.add(anotherPipeline3);
mods.add(second);
mods.add(anotherPipeline2);
assertThat(third.insertedBefore(), is(fourth));
assertThat(third.insertedAfter(), is(first));
assertThat(second.insertedBefore(), is(third));
assertThat(second.insertedAfter(), is(first));
assertThat(anotherPipeline2.insertedBefore(), is(anotherPipeline3));
assertThat(anotherPipeline2.insertedAfter(), is(anotherPipeline1));
assertThat(mods.runAfter(anotherPipeline2.getId(), new CaseInsensitiveString("another")), is(anotherPipeline3));
assertThat(mods.runBefore(anotherPipeline2.getId(), new CaseInsensitiveString("another")), is(anotherPipeline1));
assertThat(mods.runAfter(first.getId(), new CaseInsensitiveString(first.getPipelineName())), is(nullValue()));
assertThat(mods.runAfter(second.getId(), new CaseInsensitiveString(second.getPipelineName())), is(third));
}
@Test public void updateShouldNotifyListenersOnAddition() throws Exception {
stubTransactionSynchronization();
setupTransactionTemplateStub(TransactionSynchronization.STATUS_COMMITTED, true);
final List<PipelineTimelineEntry>[] entries = new List[1];
entries[0] = new ArrayList<PipelineTimelineEntry>();
final PipelineTimeline timeline = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager, new TimelineUpdateListener() {
public void added(PipelineTimelineEntry newlyAddedEntry, TreeSet<PipelineTimelineEntry> timeline) {
assertThat(timeline.contains(newlyAddedEntry), is(true));
assertThat(timeline.containsAll(entries[0]), is(true));
entries[0].add(newlyAddedEntry);
}
});
stubPipelineRepository(timeline, true, new PipelineTimelineEntry[]{first, second});
timeline.update();
assertThat(entries[0].size(), is(1));
assertThat(entries[0].contains(first), is(true));
}
@Test public void updateShouldIgnoreExceptionThrownByListenersDuringNotifications() throws Exception {
stubTransactionSynchronization();
setupTransactionTemplateStub(TransactionSynchronization.STATUS_COMMITTED, true);
TimelineUpdateListener anotherListener = mock(TimelineUpdateListener.class);
final PipelineTimeline timeline = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager, new TimelineUpdateListener() {
public void added(PipelineTimelineEntry newlyAddedEntry, TreeSet<PipelineTimelineEntry> timeline) {
throw new RuntimeException();
}
}, anotherListener);
stubPipelineRepository(timeline, true, new PipelineTimelineEntry[]{first, second});
try {
timeline.update();
} catch (Exception e) {
fail("should not have failed because of exception thrown by listener");
}
verify(anotherListener).added(eq(first), any(TreeSet.class));
}
@Test public void updateOnInitShouldBeDoneOutsideTransaction() throws Exception {
PipelineTimeline timeline = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
stubPipelineRepository(timeline, true, new PipelineTimelineEntry[]{first, second});
timeline.updateTimelineOnInit();
verify(pipelineRepository).updatePipelineTimeline(timeline);
verifyNoMoreInteractions(transactionSynchronizationManager);
verifyNoMoreInteractions(transactionTemplate);
assertThat(timeline.maximumId(), is(2L));
assertThat(timeline.pipelineAfter(1L), is(2L));
}
@Test public void updateShouldLoadNewInstancesFromTheDatabase() throws Exception {
stubTransactionSynchronization();
setupTransactionTemplateStub(TransactionSynchronization.STATUS_COMMITTED, true);
final PipelineTimeline timeline = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
stubPipelineRepository(timeline, true, new PipelineTimelineEntry[]{first, second});
timeline.update();
verify(pipelineRepository).updatePipelineTimeline(timeline);
assertThat(timeline.maximumId(), is(2L));
assertThat(timeline.pipelineAfter(1L), is(2L));
}
@Test public void updateShouldRemoveTheTimelinesReturnedOnRollback() throws Exception {
stubTransactionSynchronization();
setupTransactionTemplateStub(TransactionSynchronization.STATUS_ROLLED_BACK, true);
final PipelineTimeline timeline = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
stubPipelineRepository(timeline, true, new PipelineTimelineEntry[]{first, second});
timeline.update();
verify(pipelineRepository).updatePipelineTimeline(timeline);
assertThat(timeline.maximumId(), is(-1L));
}
@Test
public void shouldRemove_NewlyAddedTimelineEntries_fromAllCollections_UponRollback() throws Exception {
stubTransactionSynchronization();
setupTransactionTemplateStub(TransactionSynchronization.STATUS_COMMITTED, true);
final PipelineTimeline timeline = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
stubPipelineRepository(timeline, true, first, second);
timeline.update();
setupTransactionTemplateStub(TransactionSynchronization.STATUS_ROLLED_BACK, false);
stubPipelineRepository(timeline, false, third, fourth);
timeline.update();
assertThat(timeline.maximumId(), is(2L));
assertThat(timeline.getEntriesFor("pipeline").size(), is(2));
assertThat(timeline.getEntriesFor("pipeline"), hasItems(first, second));
assertThat(timeline.instanceCount(new CaseInsensitiveString("pipeline")), is(2));
assertThat(timeline.instanceFor(new CaseInsensitiveString("pipeline"), 0), is(first));
assertThat(timeline.instanceFor(new CaseInsensitiveString("pipeline"), 1), is(second));
}
private void stubPipelineRepository(final PipelineTimeline timeline, boolean restub, final PipelineTimelineEntry... entries) {
repositoryEntries = entries;
if (restub) {
when(pipelineRepository.updatePipelineTimeline(timeline)).thenAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
for (PipelineTimelineEntry entry : repositoryEntries) {
timeline.add(entry);
}
return Arrays.asList(repositoryEntries);
}
});
}
}
private void stubTransactionSynchronization() {
doAnswer(new Answer() {
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
transactionSynchronization = (TransactionSynchronization) invocationOnMock.getArguments()[0];
return null;
}
}).when(transactionSynchronizationManager).registerSynchronization(any(TransactionSynchronization.class));
}
private void setupTransactionTemplateStub(final int status, final boolean restub) throws Exception {
this.txnStatus = status;
if (restub) {
when(transactionTemplate.execute(Mockito.any(TransactionCallback.class))).thenAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
TransactionCallback callback = (TransactionCallback) invocationOnMock.getArguments()[0];
callback.doInTransaction(null);
if (txnStatus == TransactionSynchronization.STATUS_COMMITTED) {
transactionSynchronization.afterCommit();
}
transactionSynchronization.afterCompletion(txnStatus);
return null;
}
});
}
}
@Test public void shouldReturnNullForPipelineBeforeAndAfterIfPipelineDoesNotExist() throws Exception {
PipelineTimeline timeline = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
timeline.add(first);
assertThat(timeline.runBefore(2, new CaseInsensitiveString("not-present")), is(nullValue()));
assertThat(timeline.runAfter(2, new CaseInsensitiveString("not-present")), is(nullValue()));
}
@Test
public void shouldCreateANaturalOrderingHalfWayBetweenEachPipeline() throws Exception {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(first);
assertThat(first.naturalOrder(), is(1.0));
mods.add(fourth);
assertThat(fourth.naturalOrder(), is(2.0));
double thirdOrder = (2.0 + 1.0) / 2.0;
mods.add(third);
assertThat(third.naturalOrder(), is(thirdOrder));
mods.add(second);
assertThat(second.naturalOrder(), is((thirdOrder + 1.0) / 2.0));
}
@Test
public void shouldCreateANaturalOrderingHalfWayBetweenEachPipelineWhenInsertedInReverseOrder() throws Exception {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(fourth);
assertThat(fourth.naturalOrder(), is(1.0));
mods.add(first);
assertThat(first.naturalOrder(), is(0.5));
double thirdOrder = (1.0 + 0.5) / 2.0;
mods.add(third);
assertThat(third.naturalOrder(), is(thirdOrder));
mods.add(second);
assertThat(second.naturalOrder(), is((thirdOrder + 0.5) / 2.0));
}
@Test
public void shouldNotAllowResetingOfNaturalOrder() {
PipelineTimeline mods = new PipelineTimeline(pipelineRepository, transactionTemplate, transactionSynchronizationManager);
mods.add(fourth);
mods.add(first);
try {
mods.add(fourth);
} catch (Exception e) {
assertThat(e.getMessage(), is("Calculated natural ordering 1.5 is not the same as the existing naturalOrder 1.0, for pipeline pipeline, with id 4"));
}
}
}
| |
package germ.model.nodes;
import germ.app.Application;
import germ.command.SetPropertiesCommand;
import germ.configuration.ConfigurationManager;
import germ.gui.windows.DecisionProperties;
import germ.gui.windows.PropertyWindow;
import germ.i18n.Messages;
import germ.model.Node;
import germ.view.painters.DecisionPainter;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.GradientPaint;
import java.awt.Paint;
import java.awt.Stroke;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
/**
* Klasa reprezentuje Odluku u modelu.
*
*/
public class Decision extends Node {
/**
* Autor odluke.
*/
private String author;
/**
* Oznacava da li je zavrsena diskusija o odluci.
*/
private boolean finalized;
/**
* Stanje Odluke
*
* {@link germ.model.nodes.DecisionValue}
*/
private DecisionValue value;
private static PropertyWindow nodePropertyWindow = new DecisionProperties();
private Decision() {
}
public Decision(Point2D position, Dimension size, Stroke stroke, Paint paint) {
super(position, size, stroke, paint);
graphElementPainter = new DecisionPainter(this);
}
public static Node createDefault(Point2D pos, int elemNo) {
Point2D position = (Point2D) pos.clone();
Decision r = new Decision();
r.setSize(ConfigurationManager.getInstance().getDimension(
"decisionSize")); //$NON-NLS-1$
r.setStrokeThickness(ConfigurationManager.getInstance().getFloat(
"decisionStrokeThickness")); //$NON-NLS-1$
r.setStrokeColor(ConfigurationManager.getInstance().getColor(
"decisionStrokeColor")); //$NON-NLS-1$
r.setPrimColor(ConfigurationManager.getInstance().getColor(
"decisionFillPrimColor")); //$NON-NLS-1$
r.setSecColor(ConfigurationManager.getInstance().getColor(
"decisionFillSecColor")); //$NON-NLS-1$
position.setLocation(position.getX() - r.getSize().width / 2, position
.getY()
- r.getSize().height / 2);
r.setPosition(position);
r.setStroke(new BasicStroke(r.getStrokeThickness(),
BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
r.setPaint(new GradientPaint(0, 0, r.getPrimColor(), r.size.width,
r.size.height, r.getSecColor()));
r.setStrokePaint(r.getStrokeColor());
r.setCreationDate(Calendar.getInstance());
r.graphElementPainter = new DecisionPainter(r);
r.setName(Messages.getString("Decision.5") + Application.getInstance().getModel().getCounter("decision")); //$NON-NLS-1$ //$NON-NLS-2$
r.setValue(DecisionValue.INTERNAL_DECISION);
return r;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public Calendar getCreationDate() {
return creationDate;
}
public void setCreationDate(Calendar creationDate) {
this.creationDate = creationDate;
}
public boolean isFinalized() {
return finalized;
}
public void setFinalized(boolean finalized) {
this.finalized = finalized;
}
public DecisionValue getValue() {
return value;
}
public void setValue(DecisionValue value) {
this.value = value;
((DecisionPainter) graphElementPainter).repaint(this);
}
public Decision clone() {
Decision d = new Decision();
super.clone(d);
d.graphElementPainter = new DecisionPainter(d);
d.setAuthor(getAuthor());
d.setFinalized(isFinalized());
d.setValue(getValue());
return d;
}
@Override
public PropertyWindow getPropertyWindow() {
DecisionProperties properties = (DecisionProperties) nodePropertyWindow;
properties.setName(this.getName());
properties.setDescription(this.getDescription());
properties.setAuthor(this.getAuthor());
properties.setValue(this.getValue());
properties.setFinalized(this.isFinalized());
properties.setFillPrimColor(this.getPrimColor());
properties.setFillSecColor(this.getSecColor());
properties.setStrokeThickness(this.getStrokeThickness());
properties.setCbStrokeThickness(properties.getStrokeThickness());
properties.setStrokeColor(this.getStrokeColor());
properties.setGradient(!this.getPrimColor().equals(this.getSecColor()));
properties.setDateCreated(this.getCreationDate().getTime().toString());
if (this.getLastChangeDate() != null) {
properties.setDateChanged(this.getLastChangeDate().getTime()
.toString());
} else
properties.setDateChanged("###"); //$NON-NLS-1$
return nodePropertyWindow;
}
@Override
public void setProperties(boolean result) {
if (result == true) {
HashMap<String, Object> oldProperties = getProperties();
DecisionProperties properties = (DecisionProperties) nodePropertyWindow;
// predstavlja nove vrednosti propertija
HashMap<String, Object> newProperties = new HashMap<String, Object>();
newProperties.put("name", properties.getName()); //$NON-NLS-1$
newProperties.put("description", properties.getDescription()); //$NON-NLS-1$
newProperties.put("author", properties.getAuthor()); //$NON-NLS-1$
newProperties.put("value", properties.getValue()); //$NON-NLS-1$
newProperties.put("primColor", properties.getFillPrimColor()); //$NON-NLS-1$
if (properties.isGradient()) {
newProperties.put("secColor", properties.getFillSecColor()); //$NON-NLS-1$
}
else {
newProperties.put("secColor", properties.getFillPrimColor()); //$NON-NLS-1$
}
newProperties.put("paint", new GradientPaint(0, 0, (Color)newProperties.get("primColor"), //$NON-NLS-1$ //$NON-NLS-2$
size.width, size.height, (Color)newProperties.get("secColor"))); //$NON-NLS-1$
newProperties.put("strokeColor", properties.getStrokeColor()); //$NON-NLS-1$
newProperties.put("strokePaint", properties.getStrokeColor()); //$NON-NLS-1$
newProperties.put("strokeThickness", properties.getStrokeThickness()); //$NON-NLS-1$
newProperties.put("stroke", new BasicStroke(properties.getStrokeThickness(), //$NON-NLS-1$
BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL));
newProperties.put("lastChangedDate", Calendar.getInstance()); //$NON-NLS-1$
newProperties.put("finalized", properties.getFinalized()); //$NON-NLS-1$
ArrayList<Node> nodes = new ArrayList<Node>();
nodes.add(this);
SetPropertiesCommand propetiesCommand = new SetPropertiesCommand(nodes, oldProperties, newProperties);
Application.getInstance().getCommandManager().doCommand(propetiesCommand);
}
}
@Override
public HashMap<String, Object> getProperties() {
HashMap<String, Object> rez = super.getNodeProperties();
rez.put("author", getAuthor()); //$NON-NLS-1$
rez.put("value", getValue()); //$NON-NLS-1$
rez.put("finalized", isFinalized()); //$NON-NLS-1$
return rez;
}
@Override
public void setProperties(HashMap<String, Object> properties) {
super.setNodeProperties(properties);
setAuthor((String)properties.get("author")); //$NON-NLS-1$
setValue((DecisionValue)properties.get("value")); //$NON-NLS-1$
setFinalized((Boolean)properties.get("finalized")); //$NON-NLS-1$
}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.gui.main.contactlist.contactsource;
import java.awt.*;
import java.util.*;
import java.util.List;
import javax.swing.*;
import org.jitsi.service.resources.*;
import org.jitsi.util.*;
import net.java.sip.communicator.impl.gui.*;
import net.java.sip.communicator.impl.gui.main.contactlist.*;
import net.java.sip.communicator.impl.gui.utils.*;
import net.java.sip.communicator.plugin.desktoputil.*;
import net.java.sip.communicator.service.contactsource.*;
import net.java.sip.communicator.service.gui.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.globalstatus.*;
import net.java.sip.communicator.util.*;
/**
* The <tt>SourceUIContact</tt> is the implementation of the UIContact for the
* <tt>ExternalContactSource</tt>.
*
* @author Yana Stamcheva
* @author Hristo Terezov
*/
public class SourceUIContact
extends UIContactImpl
{
/**
* The corresponding <tt>SourceContact</tt>, on which this abstraction is
* based.
*/
private final SourceContact sourceContact;
/**
* The corresponding <tt>ContactNode</tt> in the contact list component.
*/
private ContactNode contactNode;
/**
* The parent <tt>UIGroup</tt>.
*/
private ExternalContactSource.SourceUIGroup uiGroup;
/**
* The search strings for this <tt>UIContact</tt>.
*/
private final List<String> searchStrings = new LinkedList<String>();
/**
* Whether we should filter all call details only to numbers.
*/
private static final String FILTER_CALL_DETAILS_TO_NUMBERS_PROP
= "net.java.sip.communicator.impl.gui.main.contactlist.contactsource" +
".FILTER_CALL_DETAILS_TO_NUMBERS";
/**
* Creates an instance of <tt>SourceUIContact</tt> by specifying the
* <tt>SourceContact</tt>, on which this abstraction is based and the
* parent <tt>UIGroup</tt>.
*
* @param contact the <tt>SourceContact</tt>, on which this abstraction
* is based
* @param parentGroup the parent <tt>UIGroup</tt>
*/
public SourceUIContact( SourceContact contact,
ExternalContactSource.SourceUIGroup parentGroup)
{
this.sourceContact = contact;
this.uiGroup = parentGroup;
if(contact.getContactDetails() != null)
for(ContactDetail detail : contact.getContactDetails())
{
if(detail.getDetail() != null)
searchStrings.add(detail.getDetail());
}
searchStrings.add(contact.getDisplayName());
}
/**
* Returns the display name of the underlying <tt>SourceContact</tt>.
* @return the display name
*/
@Override
public String getDisplayName()
{
return sourceContact.getDisplayName();
}
/**
* Returns the parent <tt>UIGroup</tt>.
* @return the parent <tt>UIGroup</tt>
*/
@Override
public UIGroup getParentGroup()
{
return uiGroup;
}
/**
* The parent group of source contacts could not be changed.
*
* @param parentGroup the parent group to set
*/
@Override
public void setParentGroup(UIGroup parentGroup) {}
/**
* Returns -1 to indicate that the source index of the underlying
* <tt>SourceContact</tt> is unknown.
* @return -1
*/
@Override
public int getSourceIndex()
{
int contactIndex = sourceContact.getIndex();
int groupIndex = getParentGroup().getSourceIndex();
return ((contactIndex == -1) ? -1 :
((groupIndex == -1) ? contactIndex : groupIndex + contactIndex));
}
/**
* Returns null to indicate unknown status of the underlying
* <tt>SourceContact</tt>.
* @return null
*/
@Override
public ImageIcon getStatusIcon()
{
PresenceStatus status = sourceContact.getPresenceStatus();
if (status != null)
return new ImageIcon(Constants.getStatusIcon(status));
return new ImageIcon(GlobalStatusEnum.OFFLINE.getStatusIcon());
}
/**
* Gets the avatar of a specific <tt>UIContact</tt> in the form of an
* <tt>ImageIcon</tt> value.
*
* @return a byte array representing the avatar of this <tt>UIContact</tt>
*/
@Override
public byte[] getAvatar()
{
return sourceContact.getImage();
}
/**
* Returns the image corresponding to the underlying <tt>SourceContact</tt>.
* @param isSelected indicates if the contact is currently selected in the
* contact list component
* @param width the desired image width
* @param height the desired image height
* @return the image
*/
@Override
public ImageIcon getScaledAvatar(boolean isSelected, int width, int height)
{
byte[] image = sourceContact.getImage();
if ((image != null) && (image.length > 0))
{
ImageIcon icon = new ImageIcon(image);
if (icon.getIconWidth() > width || icon.getIconHeight() > height)
{
icon
= ImageUtils.getScaledRoundedIcon(
icon.getImage(),
width, height);
}
return icon;
}
else
return null;
}
/**
* Returns the default <tt>ContactDetail</tt> to use for any operations
* depending to the given <tt>OperationSet</tt> class.
*
* @param opSetClass the <tt>OperationSet</tt> class we're interested in
* @return the default <tt>ContactDetail</tt> to use for any operations
* depending to the given <tt>OperationSet</tt> class
*/
@Override
public UIContactDetail getDefaultContactDetail(
Class<? extends OperationSet> opSetClass)
{
List<UIContactDetail> details
= getContactDetailsForOperationSet(opSetClass);
if (details != null && !details.isEmpty())
return details.get(0);
return null;
}
/**
* Returns the underlying <tt>SourceContact</tt> this abstraction is about.
* @return the underlying <tt>SourceContact</tt>
*/
@Override
public Object getDescriptor()
{
return sourceContact;
}
/**
* Returns the display details for the underlying <tt>SourceContact</tt>.
* @return the display details for the underlying <tt>SourceContact</tt>
*/
@Override
public String getDisplayDetails()
{
return sourceContact.getDisplayDetails();
}
/**
* Returns a list of all contained <tt>UIContactDetail</tt>s.
*
* @return a list of all contained <tt>UIContactDetail</tt>s
*/
@Override
public List<UIContactDetail> getContactDetails()
{
List<UIContactDetail> resultList
= new LinkedList<UIContactDetail>();
Iterator<ContactDetail> details
= sourceContact.getContactDetails().iterator();
while (details.hasNext())
{
ContactDetail detail = details.next();
resultList.add(new SourceContactDetail(
detail,
getInternationalizedLabel(detail.getCategory()),
getInternationalizedLabels(
detail.getSubCategories().iterator()),
null, sourceContact));
}
return resultList;
}
/**
* Returns a list of <tt>UIContactDetail</tt>s supporting the given
* <tt>OperationSet</tt> class.
* @param opSetClass the <tt>OperationSet</tt> class we're interested in
* @return a list of <tt>UIContactDetail</tt>s supporting the given
* <tt>OperationSet</tt> class
*/
@Override
public List<UIContactDetail> getContactDetailsForOperationSet(
Class<? extends OperationSet> opSetClass)
{
List<UIContactDetail> resultList
= new LinkedList<UIContactDetail>();
Iterator<ContactDetail> details
= sourceContact.getContactDetails().iterator();
PhoneNumberI18nService phoneNumberService
= GuiActivator.getPhoneNumberI18nService();
boolean filterToNumbers
= GuiActivator.getConfigurationService().getBoolean(
FILTER_CALL_DETAILS_TO_NUMBERS_PROP, false);
while (details.hasNext())
{
ContactDetail detail = details.next();
List<Class<? extends OperationSet>> supportedOperationSets
= detail.getSupportedOperationSets();
if ((supportedOperationSets != null)
&& supportedOperationSets.contains(opSetClass))
{
if(filterToNumbers
&& opSetClass.equals(OperationSetBasicTelephony.class)
&& !phoneNumberService.isPhoneNumber(
detail.getDetail()))
{
continue;
}
resultList.add(new SourceContactDetail(
detail,
getInternationalizedLabel(detail.getCategory()),
getInternationalizedLabels(
detail.getSubCategories().iterator()),
opSetClass,
sourceContact));
}
}
return resultList;
}
/**
* Returns an <tt>Iterator</tt> over a list of strings, which can be used
* to find this contact.
* @return an <tt>Iterator</tt> over a list of search strings
*/
@Override
public Iterator<String> getSearchStrings()
{
return searchStrings.iterator();
}
/**
* Returns the corresponding <tt>ContactNode</tt> from the contact list
* component.
* @return the corresponding <tt>ContactNode</tt>
*/
@Override
public ContactNode getContactNode()
{
return contactNode;
}
/**
* Sets the corresponding <tt>ContactNode</tt>.
* @param contactNode the corresponding <tt>ContactNode</tt>
*/
@Override
public void setContactNode(ContactNode contactNode)
{
this.contactNode = contactNode;
if (contactNode == null)
uiGroup.getParentUISource().removeUIContact(sourceContact);
}
/**
* The implementation of the <tt>UIContactDetail</tt> interface for the
* external source <tt>ContactDetail</tt>s.
*/
protected static class SourceContactDetail
extends UIContactDetailImpl
{
/**
* Creates an instance of <tt>SourceContactDetail</tt> by specifying
* the underlying <tt>detail</tt> and the <tt>OperationSet</tt> class
* for it.
* @param detail the underlying <tt>ContactDetail</tt>
* @param category detail category string
* @param subCategories the detail list of sub-categories
* @param opSetClass the <tt>OperationSet</tt> class for the
* preferred protocol provider
* @param sourceContact the source contact
*/
public SourceContactDetail( ContactDetail detail,
String category,
Collection<String> subCategories,
Class<? extends OperationSet> opSetClass,
SourceContact sourceContact)
{
super( detail.getDetail(),
detail.getDetail(),
category,
subCategories,
null,
null,
null,
detail);
ContactSourceService contactSource
= sourceContact.getContactSource();
if (contactSource instanceof PrefixedContactSourceService)
{
String prefix = ((PrefixedContactSourceService) contactSource)
.getPhoneNumberPrefix();
if (prefix != null)
setPrefix(prefix);
}
addPreferredProtocolProvider(opSetClass,
detail.getPreferredProtocolProvider(opSetClass));
addPreferredProtocol(opSetClass,
detail.getPreferredProtocol(opSetClass));
}
/**
* Creates an instance of <tt>SourceContactDetail</tt> by specifying
* the underlying <tt>detail</tt> and the <tt>OperationSet</tt> class
* for it.
*
* @param displayName the display name
* @param sourceContact the source contact
*/
public SourceContactDetail(String displayName,
SourceContact sourceContact)
{
super( displayName,
displayName,
null,
null,
null,
null,
null,
sourceContact);
}
/**
* Returns null to indicate that this detail doesn't support presence.
* @return null
*/
@Override
public PresenceStatus getPresenceStatus()
{
return null;
}
}
/**
* Returns the <tt>JPopupMenu</tt> opened on a right button click over this
* <tt>SourceUIContact</tt>.
* @return the <tt>JPopupMenu</tt> opened on a right button click over this
* <tt>SourceUIContact</tt>
*/
@Override
public JPopupMenu getRightButtonMenu()
{
return new SourceContactRightButtonMenu(this);
}
/**
* Returns the tool tip opened on mouse over.
* @return the tool tip opened on mouse over
*/
@Override
public ExtendedTooltip getToolTip()
{
ExtendedTooltip tip = new ExtendedTooltip(true);
byte[] avatarImage = sourceContact.getImage();
if (avatarImage != null && avatarImage.length > 0)
tip.setImage(new ImageIcon(avatarImage));
tip.setTitle(sourceContact.getDisplayName());
String displayDetails = getDisplayDetails();
if (displayDetails != null)
tip.addLine(new JLabel[]{new JLabel(getDisplayDetails())});
try
{
List<ContactDetail> details = sourceContact.getContactDetails(
ContactDetail.Category.Phone);
if (details != null && details.size() > 0)
addDetailsToToolTip(
details,
GuiActivator.getResources()
.getI18NString("service.gui.PHONES"),
tip);
details = sourceContact.getContactDetails(
ContactDetail.Category.Email);
if (details != null && details.size() > 0)
addDetailsToToolTip(
details,
GuiActivator.getResources()
.getI18NString("service.gui.EMAILS"),
tip);
details = sourceContact.getContactDetails(
ContactDetail.Category.InstantMessaging);
if (details != null && details.size() > 0)
addDetailsToToolTip(
details,
GuiActivator.getResources()
.getI18NString("service.gui.INSTANT_MESSAGINGS"),
tip);
}
catch (OperationNotSupportedException e)
{
List<ContactDetail> telDetails
= sourceContact.getContactDetails(
OperationSetBasicTelephony.class);
// if there is no telephony
if(telDetails == null || telDetails.isEmpty())
return tip;
// Categories aren't supported. This is the case for history
// records.
List<ContactDetail> allDetails = sourceContact.getContactDetails();
addDetailsToToolTip(allDetails,
GuiActivator.getResources()
.getI18NString("service.gui.CALL_WITH"), tip);
}
return tip;
}
private void addDetailsToToolTip( List<ContactDetail> details,
String category,
ExtendedTooltip toolTip)
{
ContactDetail contactDetail;
JLabel categoryLabel = new JLabel(category, null, JLabel.LEFT);
categoryLabel.setFont(categoryLabel.getFont().deriveFont(Font.BOLD));
categoryLabel.setForeground(Color.DARK_GRAY);
toolTip.addLine(null, " ");
toolTip.addLine(new JLabel[]{categoryLabel});
Iterator<ContactDetail> detailsIter = details.iterator();
while (detailsIter.hasNext())
{
contactDetail = detailsIter.next();
Collection<ContactDetail.SubCategory> subCategories
= contactDetail.getSubCategories();
JLabel[] jLabels = new JLabel[subCategories.size() + 1];
int i = 0;
if (subCategories != null && subCategories.size() > 0)
{
Iterator<ContactDetail.SubCategory> labelsIter
= subCategories.iterator();
while(labelsIter.hasNext())
{
JLabel label = new JLabel(
getInternationalizedLabel(labelsIter.next()));
label.setFont(label.getFont().deriveFont(Font.BOLD));
label.setForeground(Color.GRAY);
jLabels[i] = label;
i++;
}
}
String labelText;
if(ConfigurationUtils.isHideAddressInCallHistoryTooltipEnabled())
{
labelText = contactDetail.getDisplayName();
if(StringUtils.isNullOrEmpty(labelText))
labelText = contactDetail.getDetail();
}
else
{
labelText = contactDetail.getDetail();
}
jLabels[i] = new JLabel(filterAddressDisplay(labelText));
toolTip.addLine(jLabels);
}
}
/**
* Returns the internationalized category corresponding to the given
* <tt>ContactDetail.Category</tt>.
*
* @param category the <tt>ContactDetail.SubCategory</tt>, for which we
* would like to obtain an internationalized label
* @return the internationalized label corresponding to the given category
*/
protected String getInternationalizedLabel(ContactDetail.Category category)
{
if (category == null)
return null;
String categoryString = null;
ResourceManagementService resources = GuiActivator.getResources();
switch(category)
{
case Address:
categoryString = resources.getI18NString("service.gui.ADDRESS");
break;
case Email:
categoryString = resources.getI18NString("service.gui.EMAIL");
break;
case Personal:
categoryString = resources.getI18NString("service.gui.PERSONAL");
break;
case Organization:
categoryString = resources.getI18NString("service.gui.ORGANIZATION");
break;
case Phone:
categoryString = resources.getI18NString("service.gui.PHONE");
break;
case InstantMessaging:
categoryString = resources.getI18NString("service.gui.IM");
break;
}
return categoryString;
}
/**
* Returns a collection of internationalized string corresponding to the
* given subCategories.
*
* @param subCategories an Iterator over a list of
* <tt>ContactDetail.SubCategory</tt>s
* @return a collection of internationalized string corresponding to the
* given subCategories
*/
protected Collection<String> getInternationalizedLabels(
Iterator<ContactDetail.SubCategory> subCategories)
{
Collection<String> labels = new LinkedList<String>();
while (subCategories.hasNext())
{
labels.add(getInternationalizedLabel(subCategories.next()));
}
return labels;
}
/**
* Returns the internationalized label corresponding to the given category.
*
* @param subCategory the <tt>ContactDetail.SubCategory</tt>, for which we
* would like to obtain an internationalized label
* @return the internationalized label corresponding to the given category
*/
protected String getInternationalizedLabel(
ContactDetail.SubCategory subCategory)
{
if (subCategory == null)
return null;
String label;
ResourceManagementService resources = GuiActivator.getResources();
switch(subCategory)
{
case City:
label = resources.getI18NString("service.gui.CITY");
break;
case Country:
label = resources.getI18NString("service.gui.COUNTRY");
break;
case Fax:
label = resources.getI18NString("service.gui.FAX");
break;
case Home:
label = resources.getI18NString("service.gui.HOME");
break;
case HomePage:
label = resources.getI18NString("service.gui.HOME_PAGE");
break;
case JobTitle:
label = resources.getI18NString("service.gui.JOB_TITLE");
break;
case LastName:
label = resources.getI18NString("service.gui.LAST_NAME");
break;
case Mobile:
label = resources.getI18NString("service.gui.MOBILE_PHONE");
break;
case Name:
label = resources.getI18NString("service.gui.NAME");
break;
case Nickname:
label = resources.getI18NString("service.gui.NICKNAME");
break;
case Other:
label = resources.getI18NString("service.gui.OTHER");
break;
case PostalCode:
label = resources.getI18NString("service.gui.POSTAL_CODE");
break;
case Street:
label = resources.getI18NString("service.gui.STREET");
break;
case Work:
label = resources.getI18NString("service.gui.WORK_PHONE");
break;
case AIM:
case ICQ:
case Jabber:
case MSN:
case Yahoo:
case Skype:
case GoogleTalk:
case Facebook:
label = subCategory.value();
break;
default:
label = null;
break;
}
return label;
}
/**
* Returns all custom action buttons for this notification contact.
*
* @return a list of all custom action buttons for this notification contact
*/
@Override
public Collection<SIPCommButton> getContactCustomActionButtons()
{
if (sourceContact != null)
return uiGroup.getParentUISource()
.getContactCustomActionButtons(sourceContact);
return null;
}
/**
* Returns all custom action menu items for this contact.
*
* @param initActions if <tt>true</tt> the actions will be reloaded.
* @return a list of all custom action menu items for this contact.
*/
@Override
public Collection<JMenuItem> getContactCustomActionMenuItems(
boolean initActions)
{
if (sourceContact != null)
return uiGroup.getParentUISource()
.getContactCustomActionMenuItems(sourceContact, initActions);
return null;
}
}
| |
package dbdoc.impl;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.util.List;
import org.junit.Test;
import dbdoc.TestCase;
import dbdoc.reflect.Column;
import dbdoc.reflect.Constraint;
import dbdoc.reflect.Database;
import dbdoc.reflect.ForeignKey;
import dbdoc.reflect.Index;
import dbdoc.reflect.PrimaryKey;
import dbdoc.reflect.Procedure;
import dbdoc.reflect.Schema;
import dbdoc.reflect.Table;
import dbdoc.reflect.Trigger;
/**
* @author jk
*
*/
public class TestDBRefIndex extends TestCase {
@Test
public void testConstruct() {
new DBRefIndex();
}
@Test
public void testAddSchema() {
DBRefIndex index = new DBRefIndex();
Schema schema = new SchemaStub(null, new DatabaseStub("testdb"));
index.add(schema);
assertNotNull(index.getSchema("testdb", null));
assertNull(index.getSchema("testdb", "foo"));
assertNull(index.getTable("testdb", null, "footable"));
assertNull(index.getColumn("testdb", null, "footable", "foocolumn"));
}
@Test
public void testAddTable() {
DBRefIndex index = new DBRefIndex();
Table table = new TableStub("testtab", new SchemaStub(null,
new DatabaseStub("testdb")));
index.add(table);
assertNotNull(index.getTable("testdb", null, "testtab"));
assertNull(index.getSchema("testdb", "foo"));
assertNull(index.getTable("testdb", null, "footable"));
assertNull(index.getColumn("testdb", null, "footable", "foocolumn"));
}
@Test
public void testAddColumn() {
DBRefIndex index = new DBRefIndex();
Column col = new ColumnStub("testcol", new TableStub("testtab",
new SchemaStub(null, new DatabaseStub("testdb"))));
index.add(col);
assertNotNull(index.getColumn("testdb", null, "testtab", "testcol"));
assertNull(index.getSchema("testdb", "foo"));
assertNull(index.getTable("testdb", null, "footable"));
assertNull(index.getColumn("testdb", null, "footable", "foocolumn"));
}
//
//
//
public static class DatabaseStub implements Database {
private String name;
public DatabaseStub(String name) {
this.name = name;
}
@Override
public String getComment() {
return null;
}
@Override
public String getName() {
return name;
}
@Override
public List<Schema> getSchemas() {
return null;
}
@Override
public boolean isSupportingSchemas() {
return false;
}
}
public static class SchemaStub implements Schema {
private String name;
private Database database;
public SchemaStub(String name, Database database) {
this.name = name;
this.database = database;
}
@Override
public String getComment() {
return null;
}
@Override
public Database getDatabase() {
return database;
}
@Override
public String getName() {
return name;
}
@Override
public List<Procedure> getProcedures() {
return null;
}
@Override
public List<Table> getTables() {
return null;
}
}
public static class TableStub implements Table {
private String name;
private Schema schema;
public TableStub(String name, Schema schema) {
this.name = name;
this.schema = schema;
}
@Override
public List<Column> getColumns() {
return null;
}
@Override
public String getComment() {
return null;
}
@Override
public List<Constraint> getConstraints() {
return null;
}
@Override
public List<Index> getIndexes() {
return null;
}
@Override
public String getName() {
return name;
}
@Override
public Schema getSchema() {
return schema;
}
@Override
public List<Trigger> getTriggers() {
return null;
}
@Override
public PrimaryKey getPrimaryKey() {
return null;
}
@Override
public List<ForeignKey> getForeignKeysIn() {
// TODO Auto-generated method stub
return null;
}
@Override
public List<ForeignKey> getForeignKeysOut() {
// TODO Auto-generated method stub
return null;
}
}
public static class ColumnStub implements Column {
private String name;
private Table table;
public ColumnStub(String name, Table table) {
this.name = name;
this.table = table;
}
@Override
public String getComment() {
return null;
}
@Override
public List<Constraint> getConstraints() {
return null;
}
@Override
public int getDataLength() {
return 0;
}
@Override
public String getDataType() {
return null;
}
@Override
public String getDefaultValue() {
return null;
}
@Override
public List<Index> getIndexes() {
return null;
}
@Override
public String getName() {
return name;
}
@Override
public int getOrdinalNumber() {
return 0;
}
@Override
public PrimaryKey getPrimaryKey() {
return null;
}
@Override
public List<Column> getReferencedColumns() {
return null;
}
@Override
public List<Column> getReferencingColumns() {
return null;
}
@Override
public Table getTable() {
return table;
}
@Override
public boolean isConstrainedColumn() {
return false;
}
@Override
public boolean isIndexColumn() {
return false;
}
@Override
public boolean isNullable() {
return false;
}
@Override
public boolean isPrimaryKeyColumn() {
return false;
}
@Override
public boolean isReferenced() {
return false;
}
@Override
public boolean isReferencing() {
return false;
}
@Override
public boolean isSequence() {
return false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.accumulo.serde;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.apache.accumulo.core.data.ColumnUpdate;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
import org.apache.hadoop.hive.accumulo.columns.ColumnMapping;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.Text;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import com.google.common.base.Joiner;
/**
*
*/
public class TestAccumuloRowSerializer {
@Test
public void testBufferResetBeforeUse() throws IOException {
ByteStream.Output output = new ByteStream.Output();
PrimitiveObjectInspector fieldObjectInspector = Mockito.mock(StringObjectInspector.class);
ColumnMapping mapping = Mockito.mock(ColumnMapping.class);
// Write some garbage to the buffer that should be erased
output.write("foobar".getBytes());
// Stub out the serializer
AccumuloRowSerializer serializer = Mockito.mock(AccumuloRowSerializer.class);
String object = "hello";
Mockito.when(
serializer.getSerializedValue(Mockito.any(ObjectInspector.class), Mockito.any(),
Mockito.any(ByteStream.Output.class), Mockito.any(ColumnMapping.class)))
.thenCallRealMethod();
Mockito.when(fieldObjectInspector.getCategory()).thenReturn(ObjectInspector.Category.PRIMITIVE);
Mockito.when(fieldObjectInspector.getPrimitiveCategory()).thenReturn(PrimitiveCategory.STRING);
Mockito.when(fieldObjectInspector.getPrimitiveWritableObject(Mockito.any(Object.class)))
.thenReturn(new Text(object));
Mockito.when(mapping.getEncoding()).thenReturn(ColumnEncoding.STRING);
// Invoke the method
serializer.getSerializedValue(fieldObjectInspector, object, output, mapping);
// Verify the buffer was reset (real output doesn't happen because it was mocked)
Assert.assertEquals(0, output.size());
}
@Test
public void testBinarySerialization() throws IOException, SerDeException {
List<String> columns = Arrays.asList("row", "cq1", "cq2", "cq3");
List<TypeInfo> types = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo,
TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
List<String> typeNames = new ArrayList<String>(types.size());
for (TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties = new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,
":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
.createLazyStructInspector(columns, types, serDeParams.getSeparators(),
serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
serDeParams.isEscaped(), serDeParams.getEscapeChar());
AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(),
accumuloSerDeParams.getRowIdFactory());
// Create the LazyStruct from the LazyStruct...Inspector
LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(oi);
ByteArrayRef byteRef = new ByteArrayRef();
byteRef.setData(new byte[] {'r', 'o', 'w', '1', ' ', '1', '0', ' ', '2', '0', ' ', 'v', 'a',
'l', 'u', 'e'});
obj.init(byteRef, 0, byteRef.getData().length);
Mutation m = (Mutation) serializer.serialize(obj, oi);
Assert.assertArrayEquals("row1".getBytes(), m.getRow());
List<ColumnUpdate> updates = m.getUpdates();
Assert.assertEquals(3, updates.size());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
ColumnUpdate update = updates.get(0);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
out.writeInt(10);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(1);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
baos.reset();
out.writeInt(20);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(2);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
Assert.assertEquals("value", new String(update.getValue()));
}
@Test
public void testVisibilityLabel() throws IOException, SerDeException {
List<String> columns = Arrays.asList("row", "cq1", "cq2", "cq3");
List<TypeInfo> types = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo,
TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
List<String> typeNames = new ArrayList<String>(types.size());
for (TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties = new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,
":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
.createLazyStructInspector(columns, types, serDeParams.getSeparators(),
serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
serDeParams.isEscaped(), serDeParams.getEscapeChar());
AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
accumuloSerDeParams.getColumnMappings(), new ColumnVisibility("foo"),
accumuloSerDeParams.getRowIdFactory());
// Create the LazyStruct from the LazyStruct...Inspector
LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(oi);
ByteArrayRef byteRef = new ByteArrayRef();
byteRef.setData(new byte[] {'r', 'o', 'w', '1', ' ', '1', '0', ' ', '2', '0', ' ', 'v', 'a',
'l', 'u', 'e'});
obj.init(byteRef, 0, byteRef.getData().length);
Mutation m = (Mutation) serializer.serialize(obj, oi);
Assert.assertArrayEquals("row1".getBytes(), m.getRow());
List<ColumnUpdate> updates = m.getUpdates();
Assert.assertEquals(3, updates.size());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
ColumnUpdate update = updates.get(0);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
Assert.assertEquals("foo", new String(update.getColumnVisibility()));
out.writeInt(10);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(1);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
Assert.assertEquals("foo", new String(update.getColumnVisibility()));
baos.reset();
out.writeInt(20);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(2);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
Assert.assertEquals("foo", new String(update.getColumnVisibility()));
Assert.assertEquals("value", new String(update.getValue()));
}
@Test
public void testMapSerialization() throws IOException, SerDeException {
List<String> columns = Arrays.asList("row", "col");
List<TypeInfo> types = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory
.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo));
List<String> typeNames = new ArrayList<String>(types.size());
for (TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties = new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:*");
tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
tableProperties.setProperty(serdeConstants.COLLECTION_DELIM, ",");
tableProperties.setProperty(serdeConstants.MAPKEY_DELIM, ":");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
TypeInfo stringTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
LazyStringObjectInspector stringOI = (LazyStringObjectInspector) LazyFactory
.createLazyObjectInspector(stringTypeInfo, new byte[] {0}, 0,
serDeParams.getNullSequence(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
LazyMapObjectInspector mapOI = LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(
stringOI, stringOI, (byte) ',', (byte) ':', serDeParams.getNullSequence(),
serDeParams.isEscaped(), serDeParams.getEscapeChar());
LazySimpleStructObjectInspector structOI = (LazySimpleStructObjectInspector) LazyObjectInspectorFactory
.getLazySimpleStructObjectInspector(columns, Arrays.asList(stringOI, mapOI), (byte) ' ',
serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
serDeParams.isEscaped(), serDeParams.getEscapeChar());
AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(),
accumuloSerDeParams.getRowIdFactory());
// Create the LazyStruct from the LazyStruct...Inspector
LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(structOI);
ByteArrayRef byteRef = new ByteArrayRef();
byteRef.setData("row1 cq1:10,cq2:20,cq3:value".getBytes());
obj.init(byteRef, 0, byteRef.getData().length);
Mutation m = (Mutation) serializer.serialize(obj, structOI);
Assert.assertArrayEquals("row1".getBytes(), m.getRow());
List<ColumnUpdate> updates = m.getUpdates();
Assert.assertEquals(3, updates.size());
ColumnUpdate update = updates.get(0);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
Assert.assertEquals("10", new String(update.getValue()));
update = updates.get(1);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
Assert.assertEquals("20", new String(update.getValue()));
update = updates.get(2);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
Assert.assertEquals("value", new String(update.getValue()));
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidRowIdOffset() throws SerDeException {
ArrayList<ColumnMapping> mappings = new ArrayList<ColumnMapping>();
// Should fail because of the -1
new AccumuloRowSerializer(-1, null, mappings, new ColumnVisibility(), null);
}
}
| |
/**********************************************************************************
*
* $Id: StandaloneSectionsDataLoader.java 105079 2012-02-24 23:08:11Z ottenhoff@longsight.com $
*
***********************************************************************************
*
* Copyright (c) 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.gradebook.test;
import java.util.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.section.api.coursemanagement.Course;
import org.sakaiproject.section.api.coursemanagement.CourseSection;
import org.sakaiproject.section.api.coursemanagement.User;
import org.sakaiproject.section.api.coursemanagement.EnrollmentRecord;
import org.sakaiproject.section.api.facade.Role;
public class StandaloneSectionsDataLoader extends GradebookTestBase {
private static Log log = LogFactory.getLog(StandaloneSectionsDataLoader.class);
public static final String[] SITE_UIDS = {
"QA_1",
"QA_2",
"QA_3",
"QA_4",
"QA_5",
"QA_6",
"QA_7",
"QA_8",
};
public static final String[] SITE_NAMES = {
"QA Site #1 [no students, 1 instructor, no self reg, no self switch, external]",
"QA Site #2 [10 students, 1 instructor, no self reg, no self switch, internal]",
"QA Site #3 [10 students, 2 instructors, no self reg, no self switch, internal]",
"QA Site #4 [10 students, 1 instructor, no self reg, no self switch, internal]",
"QA Site #5 [50 students, 1 instructor, no self reg, no self switch, internal]",
"QA Site #6 [50 students, 1 instructor, no self reg, no self switch, internal]",
"QA Site #7 [150 students, 1 instructor, no self reg, no self switch, internal]",
"QA Site #8 [400 students, 1 instructor, no self reg, no self switch, internal]",
};
public static boolean[] SITE_SELF_REG = {
false, false, false, false,
false, false, false, false,
};
public static boolean[] SITE_SELF_SWITCH = {
false, false, false, false,
false, false, false, false,
};
public static boolean[] SITE_EXTERNALLY_MANAGED = {
true, false, false, false,
false, false, false, false,
};
/** Special users */
public final static String AUTHID_TEACHER_ALL = "authid_teacher";
public final static String AUTHID_TEACHER_AND_STUDENT = "authid_teacher_student";
public final static String AUTHID_STUDENT_ALL = "stu_0";
public final static String AUTHID_NO_SITE = "authid_nowhere";
public final static String AUTHID_STUDENT_PREFIX = "stu_";
public static final String AUTHID_WITHOUT_GRADES_1 = "stu_16";
public static final String AUTHID_WITHOUT_GRADES_2 = "stu_17";
public static final String AUTHID_TA = "authid_ta";
/** Special sites */
public static int SITE_AMBIGUOUS_TEACHER = 2;
public static int SITE_AMBIGUOUS_STUDENT = 5;
public static int SITE_LOADED_UP = 5;
public StandaloneSectionsDataLoader() {
// Don't roll these tests back, since they are intended to load data
setDefaultRollback(false);
}
public void testLoadData() {
// Load courses. (No sections yet!)
List sites = new ArrayList(SITE_UIDS.length);
for (int i = 0; i < SITE_UIDS.length; i++) {
Course courseSite = integrationSupport.createCourse(SITE_UIDS[i], SITE_NAMES[i], SITE_EXTERNALLY_MANAGED[i], SITE_SELF_REG[i], SITE_SELF_SWITCH[i]);
sites.add(courseSite);
}
// Load users.
User teacherAll = userManager.createUser(AUTHID_TEACHER_ALL, "Bizzy Teacher", "Teacher, Bizzy", "uTeacher");
User teacherStudent = userManager.createUser(AUTHID_TEACHER_AND_STUDENT, "Teaching Student", "Student, Teaching", "uTeSt");
List students = new ArrayList(400);
for(int i=0; i < 400; i++) {
String firstName;
String lastName;
switch(i) {
case 0:
firstName = "Abby Lynn";
lastName = "Astudent";
break;
case 1:
firstName = "Mary";
lastName = "LongLastNameThatExceedsTheMaximumInTheGradebook";
break;
case 3:
firstName = "Susan";
lastName = "Smith-Morris";
break;
case 4:
firstName = "Nathan Q., Jr.";
lastName = "Brewster";
break;
case 5:
firstName = "Carol Lee";
lastName = "Williams";
break;
case 6:
firstName = "Kim";
lastName = "Jones Parker";
break;
case 7:
firstName = "Joe";
lastName = "Brown";
break;
case 8:
firstName = "Joe";
lastName = "Brown";
break;
case 9:
firstName = "Sarah Jane";
lastName = "Miller";
break;
case 10:
firstName = "Rachel";
lastName = "Wilson";
break;
case 11:
firstName = "Ali";
lastName = "Moore";
break;
case 12:
firstName = "Chen-Wai";
lastName = "Taylor";
break;
case 13:
firstName = "Samuel Taylor Coleridge";
lastName = "Ascot";
break;
case 14:
firstName = "Jane Quincy";
lastName = "Brandenburg";
break;
case 15:
firstName = "Thor";
lastName = "Mj\u00F8lner";
break;
case 16:
firstName = "Lazy";
lastName = "Etudient1";
break;
case 17:
firstName = "Lazy";
lastName = "Etudient2";
break;
default:
firstName = "First Middle";
lastName = "LastName" + i;
}
String uidPrefix = (i != 3) ? "uid_" : "uID_";
students.add(userManager.createUser(AUTHID_STUDENT_PREFIX + i, firstName + " " + lastName, lastName + ", " + firstName, uidPrefix + i));
}
userManager.createUser(AUTHID_NO_SITE, "Johnny Nobody", "Nobody, Johnny", AUTHID_NO_SITE);
// Load enrollments into the courses.
for (int i = 0; i < students.size(); i++) {
// Everyone is added to Site 8.
integrationSupport.addSiteMembership(((User)students.get(i)).getUserUid(), SITE_UIDS[7], Role.STUDENT);
// The first 150 students are added to Site 7.
if (i < 150) {
integrationSupport.addSiteMembership(((User)students.get(i)).getUserUid(), SITE_UIDS[6], Role.STUDENT);
// The first 50 students are added to Site 5 and 6, but 6 contains a special student....
if (i < 50) {
integrationSupport.addSiteMembership(((User)students.get(i)).getUserUid(), SITE_UIDS[4], Role.STUDENT);
if (i < 49) {
integrationSupport.addSiteMembership(((User)students.get(i)).getUserUid(), SITE_UIDS[5], Role.STUDENT);
// The first 10 students are added to Site 2, 3, and 4.
if (i < 10) {
integrationSupport.addSiteMembership(((User)students.get(i)).getUserUid(), SITE_UIDS[3], Role.STUDENT);
integrationSupport.addSiteMembership(((User)students.get(i)).getUserUid(), SITE_UIDS[2], Role.STUDENT);
integrationSupport.addSiteMembership(((User)students.get(i)).getUserUid(), SITE_UIDS[1], Role.STUDENT);
}
}
}
}
}
// Load instructors into the courses.
for (int i = 0; i < SITE_UIDS.length; i++) {
integrationSupport.addSiteMembership(teacherAll.getUserUid(), SITE_UIDS[i], Role.INSTRUCTOR);
}
// Load the ambiguous one.
integrationSupport.addSiteMembership(teacherStudent.getUserUid(), SITE_UIDS[SITE_AMBIGUOUS_TEACHER], Role.INSTRUCTOR);
integrationSupport.addSiteMembership(teacherStudent.getUserUid(), SITE_UIDS[SITE_AMBIGUOUS_STUDENT], Role.STUDENT);
// Define and load sections for the assignment-loaded site.
String loadedSiteUid = ((Course)sites.get(SITE_LOADED_UP)).getUuid();
List sectionCategories = sectionAwareness.getSectionCategories(loadedSiteUid);
// We'd better have more than one category for this to work....
String catId = (String)sectionCategories.get(1);
String catName = sectionAwareness.getCategoryName(catId, Locale.US);
List catASectionUuids = new ArrayList();
for (int i = 0; i < 4; i++) {
String sectionName;
if (i != 2) {
sectionName = catName + " " + (i + 1);
} else {
sectionName = "Abe's " + catName;
}
CourseSection section = integrationSupport.createSection(loadedSiteUid, sectionName, catId, new Integer(40), "Room 2" + i, null, null, true, false, true, false, false, false, false);
catASectionUuids.add(section.getUuid());
}
catId = (String)sectionCategories.get(0);
catName = sectionAwareness.getCategoryName(catId, Locale.US);
List catBSectionUuids = new ArrayList();
for (int i = 0; i < 2; i++) {
String sectionName = catName + " " + (i + 1);
CourseSection section = integrationSupport.createSection(loadedSiteUid, sectionName, catId, new Integer(40), "Room 3" + i, null, null, true, false, true, false, false, false, false);
catBSectionUuids.add(section.getUuid());
}
// Populate the sections. Not all students will end up in a Category A section.
List enrollments = sectionAwareness.getSiteMembersInRole(SITE_UIDS[SITE_LOADED_UP], Role.STUDENT);
for (int i = 0; i < enrollments.size(); i++) {
String userUid = ((EnrollmentRecord)enrollments.get(i)).getUser().getUserUid();
String sectionUuid = (String)catBSectionUuids.get(i % catBSectionUuids.size());
integrationSupport.addSectionMembership(userUid, sectionUuid, Role.STUDENT);
if (i < (enrollments.size() - 5)) {
sectionUuid = (String)catASectionUuids.get(i % catASectionUuids.size());
integrationSupport.addSectionMembership(userUid, sectionUuid, Role.STUDENT);
}
}
// Add a TA to the site and two sections.
userManager.createUser(AUTHID_TA, "Teech N. Assist", "Assist, Teech N.", "uTA");
integrationSupport.addSiteMembership(AUTHID_TA, SITE_UIDS[SITE_LOADED_UP], Role.TA);
integrationSupport.addSectionMembership(AUTHID_TA, (String)catASectionUuids.get(2), Role.TA);
integrationSupport.addSectionMembership(AUTHID_TA, (String)catBSectionUuids.get(1), Role.TA);
}
}
/**********************************************************************************
* $Id: StandaloneSectionsDataLoader.java 105079 2012-02-24 23:08:11Z ottenhoff@longsight.com $
*********************************************************************************/
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide;
import com.intellij.Patches;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ApplicationComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.ui.mac.foundation.Foundation;
import com.intellij.ui.mac.foundation.ID;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.TimeoutUtil;
import com.intellij.util.concurrency.FutureResult;
import com.sun.jna.IntegerType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import sun.awt.datatransfer.DataTransferer;
import java.awt.*;
import java.awt.datatransfer.*;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* <p>This class is used to workaround the problem with getting clipboard contents (http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4818143).
* Although this bug is marked as fixed actually Sun just set 10 seconds timeout for {@link java.awt.datatransfer.Clipboard#getContents(Object)}
* method which may cause unacceptably long UI freezes. So we worked around this as follows:
* <ul>
* <li>for Macs we perform synchronization with system clipboard on a separate thread and schedule it when IDEA frame is activated
* or Copy/Cut action in Swing component is invoked, and use native method calls to access system clipboard lock-free (?);</li>
* <li>for X Window we temporary set short timeout and check for available formats (which should be fast if a clipboard owner is alive).</li>
* </ul>
* </p>
*
* @author nik
*/
public class ClipboardSynchronizer implements ApplicationComponent {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.ClipboardSynchronizer");
private final ClipboardHandler myClipboardHandler;
public static ClipboardSynchronizer getInstance() {
return ApplicationManager.getApplication().getComponent(ClipboardSynchronizer.class);
}
public ClipboardSynchronizer() {
if (ApplicationManager.getApplication().isHeadlessEnvironment() && ApplicationManager.getApplication().isUnitTestMode()) {
myClipboardHandler = new HeadlessClipboardHandler();
}
else if (Patches.SLOW_GETTING_CLIPBOARD_CONTENTS && SystemInfo.isMac) {
myClipboardHandler = new MacClipboardHandler();
}
else if (Patches.SLOW_GETTING_CLIPBOARD_CONTENTS && SystemInfo.isXWindow) {
myClipboardHandler = new XWinClipboardHandler();
}
else {
myClipboardHandler = new ClipboardHandler();
}
}
@Override
public void initComponent() {
myClipboardHandler.init();
}
@Override
public void disposeComponent() {
myClipboardHandler.dispose();
}
@NotNull
@Override
public String getComponentName() {
return "ClipboardSynchronizer";
}
public boolean areDataFlavorsAvailable(@NotNull DataFlavor... flavors) {
try {
return myClipboardHandler.areDataFlavorsAvailable(flavors);
}
catch (IllegalStateException e) {
LOG.info(e);
return false;
}
}
@Nullable
public Transferable getContents() {
try {
return myClipboardHandler.getContents();
}
catch (IllegalStateException e) {
LOG.info(e);
return null;
}
}
public void setContent(@NotNull final Transferable content, @NotNull final ClipboardOwner owner) {
myClipboardHandler.setContent(content, owner);
}
public void resetContent() {
myClipboardHandler.resetContent();
}
private static class ClipboardHandler {
public void init() { }
public void dispose() { }
public boolean areDataFlavorsAvailable(@NotNull DataFlavor... flavors) {
Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
for (DataFlavor flavor : flavors) {
if (clipboard.isDataFlavorAvailable(flavor)) {
return true;
}
}
return false;
}
@Nullable
public Transferable getContents() throws IllegalStateException {
IllegalStateException last = null;
for (int i = 0; i < 3; i++) {
try {
return Toolkit.getDefaultToolkit().getSystemClipboard().getContents(this);
}
catch (IllegalStateException e) {
TimeoutUtil.sleep(50);
last = e;
}
}
throw last;
}
public void setContent(@NotNull final Transferable content, @NotNull final ClipboardOwner owner) {
for (int i = 0; i < 3; i++) {
try {
Toolkit.getDefaultToolkit().getSystemClipboard().setContents(content, owner);
}
catch (IllegalStateException e) {
TimeoutUtil.sleep(50);
continue;
}
break;
}
}
public void resetContent() {
}
}
private static class MacClipboardHandler extends ClipboardHandler {
private Pair<String,Transferable> myFullTransferable;
@Nullable
private Transferable doGetContents() throws IllegalStateException {
if (Registry.is("ide.mac.useNativeClipboard")) {
final Transferable safe = getContentsSafe();
if (safe != null) {
return safe;
}
}
return super.getContents();
}
@Override
public boolean areDataFlavorsAvailable(@NotNull DataFlavor... flavors) {
Transferable contents = getContents();
return contents != null && ClipboardSynchronizer.areDataFlavorsAvailable(contents, flavors);
}
@Override
public Transferable getContents() {
Transferable transferable = doGetContents();
if (transferable != null && myFullTransferable != null && transferable.isDataFlavorSupported(DataFlavor.stringFlavor)) {
try {
String stringData = (String) transferable.getTransferData(DataFlavor.stringFlavor);
if (stringData != null && stringData.equals(myFullTransferable.getFirst())) {
return myFullTransferable.getSecond();
}
}
catch (UnsupportedFlavorException e) {
LOG.info(e);
}
catch (IOException e) {
LOG.info(e);
}
}
myFullTransferable = null;
return transferable;
}
@Override
public void resetContent() {
//myFullTransferable = null;
super.resetContent();
}
@Override
public void setContent(@NotNull final Transferable content, @NotNull final ClipboardOwner owner) {
if (Registry.is("ide.mac.useNativeClipboard") && content.isDataFlavorSupported(DataFlavor.stringFlavor)) {
try {
String stringData = (String) content.getTransferData(DataFlavor.stringFlavor);
myFullTransferable = Pair.create(stringData, content);
super.setContent(new StringSelection(stringData), owner);
}
catch (UnsupportedFlavorException e) {
LOG.info(e);
}
catch (IOException e) {
LOG.info(e);
}
} else {
myFullTransferable = null;
super.setContent(content, owner);
}
}
@Nullable
private static Transferable getContentsSafe() {
final FutureResult<Transferable> result = new FutureResult<Transferable>();
Foundation.executeOnMainThread(new Runnable() {
@Override
public void run() {
Transferable transferable = getClipboardContentNatively();
if (transferable != null) {
result.set(transferable);
}
}
}, true, false);
try {
return result.get(10, TimeUnit.MILLISECONDS);
}
catch (Exception ignored) {
return null;
}
}
@Nullable
private static Transferable getClipboardContentNatively() {
String plainText = "public.utf8-plain-text";
ID pasteboard = Foundation.invoke("NSPasteboard", "generalPasteboard");
ID types = Foundation.invoke(pasteboard, "types");
IntegerType count = Foundation.invoke(types, "count");
ID plainTextType = null;
for (int i = 0; i < count.intValue(); i++) {
ID each = Foundation.invoke(types, "objectAtIndex:", i);
String eachType = Foundation.toStringViaUTF8(each);
if (plainText.equals(eachType)) {
plainTextType = each;
break;
}
}
// will put string value even if we doesn't found java object. this is needed because java caches clipboard value internally and
// will reset it ONLY IF we'll put jvm-object into clipboard (see our setContent optimizations which avoids putting jvm-objects
// into clipboard)
Transferable result = null;
if (plainTextType != null) {
ID text = Foundation.invoke(pasteboard, "stringForType:", plainTextType);
String value = Foundation.toStringViaUTF8(text);
if (value == null) {
LOG.info(String.format("[Clipboard] Strange string value (null?) for type: %s", plainTextType));
}
else {
result = new StringSelection(value);
}
}
return result;
}
}
private static class XWinClipboardHandler extends ClipboardHandler {
private static final String DATA_TRANSFER_TIMEOUT_PROPERTY = "sun.awt.datatransfer.timeout";
private static final String LONG_TIMEOUT = "2000";
private static final String SHORT_TIMEOUT = "100";
private static final FlavorTable FLAVOR_MAP = (FlavorTable)SystemFlavorMap.getDefaultFlavorMap();
private volatile Transferable myCurrentContent = null;
@Override
public void init() {
if (System.getProperty(DATA_TRANSFER_TIMEOUT_PROPERTY) == null) {
System.setProperty(DATA_TRANSFER_TIMEOUT_PROPERTY, LONG_TIMEOUT);
}
}
@Override
public void dispose() {
resetContent();
}
@Override
public boolean areDataFlavorsAvailable(@NotNull DataFlavor... flavors) {
Transferable currentContent = myCurrentContent;
if (currentContent != null) {
return ClipboardSynchronizer.areDataFlavorsAvailable(currentContent, flavors);
}
try {
Collection<DataFlavor> contents = checkContentsQuick();
if (contents != null) {
return ClipboardSynchronizer.areDataFlavorsAvailable(contents, flavors);
}
return super.areDataFlavorsAvailable(flavors);
}
catch (NullPointerException e) {
LOG.warn("Java bug #6322854", e);
return false;
}
catch (IllegalArgumentException e) {
LOG.warn("Java bug #7173464", e);
return false;
}
}
@Override
public Transferable getContents() throws IllegalStateException {
final Transferable currentContent = myCurrentContent;
if (currentContent != null) {
return currentContent;
}
try {
final Collection<DataFlavor> contents = checkContentsQuick();
if (contents != null && contents.isEmpty()) {
return null;
}
return super.getContents();
}
catch (NullPointerException e) {
LOG.warn("Java bug #6322854", e);
return null;
}
catch (IllegalArgumentException e) {
LOG.warn("Java bug #7173464", e);
return null;
}
}
@Override
public void setContent(@NotNull final Transferable content, @NotNull final ClipboardOwner owner) {
myCurrentContent = content;
super.setContent(content, owner);
}
@Override
public void resetContent() {
myCurrentContent = null;
}
/**
* Quickly checks availability of data in X11 clipboard selection.
*
* @return null if is unable to check; empty list if clipboard owner doesn't respond timely;
* collection of available data flavors otherwise.
*/
@Nullable
private static Collection<DataFlavor> checkContentsQuick() {
final Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
final Class<? extends Clipboard> aClass = clipboard.getClass();
if (!"sun.awt.X11.XClipboard".equals(aClass.getName())) return null;
Method getClipboardFormats = ReflectionUtil.getDeclaredMethod(aClass, "getClipboardFormats");
final String timeout = System.getProperty(DATA_TRANSFER_TIMEOUT_PROPERTY);
System.setProperty(DATA_TRANSFER_TIMEOUT_PROPERTY, SHORT_TIMEOUT);
try {
final long[] formats = (long[])getClipboardFormats.invoke(clipboard);
if (formats == null || formats.length == 0) {
return Collections.emptySet();
}
@SuppressWarnings({"unchecked"}) final Set<DataFlavor> set = DataTransferer.getInstance().getFlavorsForFormats(formats, FLAVOR_MAP).keySet();
return set;
}
catch (IllegalAccessException ignore) { }
catch (IllegalArgumentException ignore) { }
catch (InvocationTargetException e) {
final Throwable cause = e.getCause();
if (cause instanceof IllegalStateException) {
throw (IllegalStateException)cause;
}
}
finally {
System.setProperty(DATA_TRANSFER_TIMEOUT_PROPERTY, timeout);
}
return null;
}
}
private static class HeadlessClipboardHandler extends ClipboardHandler {
private volatile Transferable myContent = null;
@Override
public boolean areDataFlavorsAvailable(@NotNull DataFlavor... flavors) {
Transferable content = myContent;
return content != null && ClipboardSynchronizer.areDataFlavorsAvailable(content, flavors);
}
@Override
public Transferable getContents() throws IllegalStateException {
return myContent;
}
@Override
public void setContent(@NotNull Transferable content, @NotNull ClipboardOwner owner) {
myContent = content;
}
@Override
public void resetContent() {
myContent = null;
}
}
private static boolean areDataFlavorsAvailable(Transferable contents, DataFlavor... flavors) {
for (DataFlavor flavor : flavors) {
if (contents.isDataFlavorSupported(flavor)) {
return true;
}
}
return false;
}
private static boolean areDataFlavorsAvailable(Collection<DataFlavor> contents, DataFlavor... flavors) {
for (DataFlavor flavor : flavors) {
if (contents.contains(flavor)) {
return true;
}
}
return false;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.NavigableMap;
import java.util.OptionalLong;
import java.util.TreeMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.Future;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.replication.WALEntryFilter;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
@Category({ ReplicationTests.class, LargeTests.class })
public class TestWALEntryStream {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestWALEntryStream.class);
private static HBaseTestingUtility TEST_UTIL;
private static Configuration CONF;
private static FileSystem fs;
private static MiniDFSCluster cluster;
private static final TableName tableName = TableName.valueOf("tablename");
private static final byte[] family = Bytes.toBytes("column");
private static final byte[] qualifier = Bytes.toBytes("qualifier");
private static final RegionInfo info = RegionInfoBuilder.newBuilder(tableName)
.setStartKey(HConstants.EMPTY_START_ROW).setEndKey(HConstants.LAST_ROW).build();
private static final NavigableMap<byte[], Integer> scopes = getScopes();
private static NavigableMap<byte[], Integer> getScopes() {
NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
scopes.put(family, 1);
return scopes;
}
private WAL log;
PriorityBlockingQueue<Path> walQueue;
private PathWatcher pathWatcher;
@Rule
public TestName tn = new TestName();
private final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
CONF = TEST_UTIL.getConfiguration();
TEST_UTIL.startMiniDFSCluster(3);
cluster = TEST_UTIL.getDFSCluster();
fs = cluster.getFileSystem();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Before
public void setUp() throws Exception {
walQueue = new PriorityBlockingQueue<>();
pathWatcher = new PathWatcher();
final WALFactory wals = new WALFactory(CONF, tn.getMethodName());
wals.getWALProvider().addWALActionsListener(pathWatcher);
log = wals.getWAL(info);
}
@After
public void tearDown() throws Exception {
log.close();
}
// Try out different combinations of row count and KeyValue count
@Test
public void testDifferentCounts() throws Exception {
int[] NB_ROWS = { 1500, 60000 };
int[] NB_KVS = { 1, 100 };
// whether compression is used
Boolean[] BOOL_VALS = { false, true };
// long lastPosition = 0;
for (int nbRows : NB_ROWS) {
for (int walEditKVs : NB_KVS) {
for (boolean isCompressionEnabled : BOOL_VALS) {
TEST_UTIL.getConfiguration().setBoolean(HConstants.ENABLE_WAL_COMPRESSION,
isCompressionEnabled);
mvcc.advanceTo(1);
for (int i = 0; i < nbRows; i++) {
appendToLogAndSync(walEditKVs);
}
log.rollWriter();
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
int i = 0;
while (entryStream.hasNext()) {
assertNotNull(entryStream.next());
i++;
}
assertEquals(nbRows, i);
// should've read all entries
assertFalse(entryStream.hasNext());
}
// reset everything for next loop
log.close();
setUp();
}
}
}
}
/**
* Tests basic reading of log appends
*/
@Test
public void testAppendsWithRolls() throws Exception {
appendToLogAndSync();
long oldPos;
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
// There's one edit in the log, read it. Reading past it needs to throw exception
assertTrue(entryStream.hasNext());
WAL.Entry entry = entryStream.peek();
assertSame(entry, entryStream.next());
assertNotNull(entry);
assertFalse(entryStream.hasNext());
assertNull(entryStream.peek());
assertNull(entryStream.next());
oldPos = entryStream.getPosition();
}
appendToLogAndSync();
try (WALEntryStream entryStream = new WALEntryStream(walQueue, CONF, oldPos,
log, null, new MetricsSource("1"))) {
// Read the newly added entry, make sure we made progress
WAL.Entry entry = entryStream.next();
assertNotEquals(oldPos, entryStream.getPosition());
assertNotNull(entry);
oldPos = entryStream.getPosition();
}
// We rolled but we still should see the end of the first log and get that item
appendToLogAndSync();
log.rollWriter();
appendToLogAndSync();
try (WALEntryStream entryStream = new WALEntryStream(walQueue, CONF, oldPos,
log, null, new MetricsSource("1"))) {
WAL.Entry entry = entryStream.next();
assertNotEquals(oldPos, entryStream.getPosition());
assertNotNull(entry);
// next item should come from the new log
entry = entryStream.next();
assertNotEquals(oldPos, entryStream.getPosition());
assertNotNull(entry);
// no more entries to read
assertFalse(entryStream.hasNext());
oldPos = entryStream.getPosition();
}
}
/**
* Tests that if after a stream is opened, more entries come in and then the log is rolled, we
* don't mistakenly dequeue the current log thinking we're done with it
*/
@Test
public void testLogrollWhileStreaming() throws Exception {
appendToLog("1");
appendToLog("2");// 2
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
assertEquals("1", getRow(entryStream.next()));
appendToLog("3"); // 3 - comes in after reader opened
log.rollWriter(); // log roll happening while we're reading
appendToLog("4"); // 4 - this append is in the rolled log
assertEquals("2", getRow(entryStream.next()));
assertEquals(2, walQueue.size()); // we should not have dequeued yet since there's still an
// entry in first log
assertEquals("3", getRow(entryStream.next())); // if implemented improperly, this would be 4
// and 3 would be skipped
assertEquals("4", getRow(entryStream.next())); // 4
assertEquals(1, walQueue.size()); // now we've dequeued and moved on to next log properly
assertFalse(entryStream.hasNext());
}
}
/**
* Tests that if writes come in while we have a stream open, we shouldn't miss them
*/
@Test
public void testNewEntriesWhileStreaming() throws Exception {
appendToLog("1");
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
entryStream.next(); // we've hit the end of the stream at this point
// some new entries come in while we're streaming
appendToLog("2");
appendToLog("3");
// don't see them
assertFalse(entryStream.hasNext());
// But we do if we reset
entryStream.reset();
assertEquals("2", getRow(entryStream.next()));
assertEquals("3", getRow(entryStream.next()));
assertFalse(entryStream.hasNext());
}
}
@Test
public void testResumeStreamingFromPosition() throws Exception {
long lastPosition = 0;
appendToLog("1");
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
entryStream.next(); // we've hit the end of the stream at this point
appendToLog("2");
appendToLog("3");
lastPosition = entryStream.getPosition();
}
// next stream should picks up where we left off
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, lastPosition, log, null, new MetricsSource("1"))) {
assertEquals("2", getRow(entryStream.next()));
assertEquals("3", getRow(entryStream.next()));
assertFalse(entryStream.hasNext()); // done
assertEquals(1, walQueue.size());
}
}
/**
* Tests that if we stop before hitting the end of a stream, we can continue where we left off
* using the last position
*/
@Test
public void testPosition() throws Exception {
long lastPosition = 0;
appendEntriesToLogAndSync(3);
// read only one element
try (WALEntryStream entryStream = new WALEntryStream(walQueue, CONF, lastPosition,
log, null, new MetricsSource("1"))) {
entryStream.next();
lastPosition = entryStream.getPosition();
}
// there should still be two more entries from where we left off
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, lastPosition, log, null, new MetricsSource("1"))) {
assertNotNull(entryStream.next());
assertNotNull(entryStream.next());
assertFalse(entryStream.hasNext());
}
}
@Test
public void testEmptyStream() throws Exception {
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
assertFalse(entryStream.hasNext());
}
}
private ReplicationSource mockReplicationSource(boolean recovered, Configuration conf) {
ReplicationSourceManager mockSourceManager = Mockito.mock(ReplicationSourceManager.class);
when(mockSourceManager.getTotalBufferUsed()).thenReturn(new AtomicLong(0));
Server mockServer = Mockito.mock(Server.class);
ReplicationSource source = Mockito.mock(ReplicationSource.class);
when(source.getSourceManager()).thenReturn(mockSourceManager);
when(source.getSourceMetrics()).thenReturn(new MetricsSource("1"));
when(source.getWALFileLengthProvider()).thenReturn(log);
when(source.getServer()).thenReturn(mockServer);
when(source.isRecovered()).thenReturn(recovered);
return source;
}
private ReplicationSourceWALReader createReader(boolean recovered, Configuration conf) {
ReplicationSource source = mockReplicationSource(recovered, conf);
when(source.isPeerEnabled()).thenReturn(true);
ReplicationSourceWALReader reader =
new ReplicationSourceWALReader(fs, conf, walQueue, 0, getDummyFilter(), source);
reader.start();
return reader;
}
@Test
public void testReplicationSourceWALReader() throws Exception {
appendEntriesToLogAndSync(3);
// get ending position
long position;
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
entryStream.next();
entryStream.next();
entryStream.next();
position = entryStream.getPosition();
}
// start up a reader
Path walPath = walQueue.peek();
ReplicationSourceWALReader reader = createReader(false, CONF);
WALEntryBatch entryBatch = reader.take();
// should've batched up our entries
assertNotNull(entryBatch);
assertEquals(3, entryBatch.getWalEntries().size());
assertEquals(position, entryBatch.getLastWalPosition());
assertEquals(walPath, entryBatch.getLastWalPath());
assertEquals(3, entryBatch.getNbRowKeys());
appendToLog("foo");
entryBatch = reader.take();
assertEquals(1, entryBatch.getNbEntries());
assertEquals("foo", getRow(entryBatch.getWalEntries().get(0)));
}
@Test
public void testReplicationSourceWALReaderRecovered() throws Exception {
appendEntriesToLogAndSync(10);
Path walPath = walQueue.peek();
log.rollWriter();
appendEntriesToLogAndSync(5);
log.shutdown();
Configuration conf = new Configuration(CONF);
conf.setInt("replication.source.nb.capacity", 10);
ReplicationSourceWALReader reader = createReader(true, conf);
WALEntryBatch batch = reader.take();
assertEquals(walPath, batch.getLastWalPath());
assertEquals(10, batch.getNbEntries());
assertFalse(batch.isEndOfFile());
batch = reader.take();
assertEquals(walPath, batch.getLastWalPath());
assertEquals(0, batch.getNbEntries());
assertTrue(batch.isEndOfFile());
walPath = walQueue.peek();
batch = reader.take();
assertEquals(walPath, batch.getLastWalPath());
assertEquals(5, batch.getNbEntries());
assertTrue(batch.isEndOfFile());
assertSame(WALEntryBatch.NO_MORE_DATA, reader.take());
}
// Testcase for HBASE-20206
@Test
public void testReplicationSourceWALReaderWrongPosition() throws Exception {
appendEntriesToLogAndSync(1);
Path walPath = walQueue.peek();
log.rollWriter();
appendEntriesToLogAndSync(20);
TEST_UTIL.waitFor(5000, new ExplainingPredicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
return fs.getFileStatus(walPath).getLen() > 0;
}
@Override
public String explainFailure() throws Exception {
return walPath + " has not been closed yet";
}
});
long walLength = fs.getFileStatus(walPath).getLen();
ReplicationSourceWALReader reader = createReader(false, CONF);
WALEntryBatch entryBatch = reader.take();
assertEquals(walPath, entryBatch.getLastWalPath());
assertTrue("Position " + entryBatch.getLastWalPosition() + " is out of range, file length is " +
walLength, entryBatch.getLastWalPosition() <= walLength);
assertEquals(1, entryBatch.getNbEntries());
assertTrue(entryBatch.isEndOfFile());
Path walPath2 = walQueue.peek();
entryBatch = reader.take();
assertEquals(walPath2, entryBatch.getLastWalPath());
assertEquals(20, entryBatch.getNbEntries());
assertFalse(entryBatch.isEndOfFile());
log.rollWriter();
appendEntriesToLogAndSync(10);
entryBatch = reader.take();
assertEquals(walPath2, entryBatch.getLastWalPath());
assertEquals(0, entryBatch.getNbEntries());
assertTrue(entryBatch.isEndOfFile());
Path walPath3 = walQueue.peek();
entryBatch = reader.take();
assertEquals(walPath3, entryBatch.getLastWalPath());
assertEquals(10, entryBatch.getNbEntries());
assertFalse(entryBatch.isEndOfFile());
}
@Test
public void testReplicationSourceWALReaderDisabled()
throws IOException, InterruptedException, ExecutionException {
appendEntriesToLogAndSync(3);
// get ending position
long position;
try (WALEntryStream entryStream =
new WALEntryStream(walQueue, CONF, 0, log, null, new MetricsSource("1"))) {
entryStream.next();
entryStream.next();
entryStream.next();
position = entryStream.getPosition();
}
// start up a reader
Path walPath = walQueue.peek();
ReplicationSource source = mockReplicationSource(false, CONF);
AtomicInteger invokeCount = new AtomicInteger(0);
AtomicBoolean enabled = new AtomicBoolean(false);
when(source.isPeerEnabled()).then(i -> {
invokeCount.incrementAndGet();
return enabled.get();
});
ReplicationSourceWALReader reader =
new ReplicationSourceWALReader(fs, CONF, walQueue, 0, getDummyFilter(), source);
reader.start();
Future<WALEntryBatch> future = ForkJoinPool.commonPool().submit(() -> {
return reader.take();
});
// make sure that the isPeerEnabled has been called several times
TEST_UTIL.waitFor(30000, () -> invokeCount.get() >= 5);
// confirm that we can read nothing if the peer is disabled
assertFalse(future.isDone());
// then enable the peer, we should get the batch
enabled.set(true);
WALEntryBatch entryBatch = future.get();
// should've batched up our entries
assertNotNull(entryBatch);
assertEquals(3, entryBatch.getWalEntries().size());
assertEquals(position, entryBatch.getLastWalPosition());
assertEquals(walPath, entryBatch.getLastWalPath());
assertEquals(3, entryBatch.getNbRowKeys());
}
private String getRow(WAL.Entry entry) {
Cell cell = entry.getEdit().getCells().get(0);
return Bytes.toString(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
}
private void appendToLog(String key) throws IOException {
final long txid = log.append(info,
new WALKeyImpl(info.getEncodedNameAsBytes(), tableName, System.currentTimeMillis(),
mvcc, scopes), getWALEdit(key), true);
log.sync(txid);
}
private void appendEntriesToLogAndSync(int count) throws IOException {
long txid = -1L;
for (int i = 0; i < count; i++) {
txid = appendToLog(1);
}
log.sync(txid);
}
private void appendToLogAndSync() throws IOException {
appendToLogAndSync(1);
}
private void appendToLogAndSync(int count) throws IOException {
long txid = appendToLog(count);
log.sync(txid);
}
private long appendToLog(int count) throws IOException {
return log.append(info, new WALKeyImpl(info.getEncodedNameAsBytes(), tableName,
System.currentTimeMillis(), mvcc, scopes), getWALEdits(count), true);
}
private WALEdit getWALEdits(int count) {
WALEdit edit = new WALEdit();
for (int i = 0; i < count; i++) {
edit.add(new KeyValue(Bytes.toBytes(System.currentTimeMillis()), family, qualifier,
System.currentTimeMillis(), qualifier));
}
return edit;
}
private WALEdit getWALEdit(String row) {
WALEdit edit = new WALEdit();
edit.add(
new KeyValue(Bytes.toBytes(row), family, qualifier, System.currentTimeMillis(), qualifier));
return edit;
}
private WALEntryFilter getDummyFilter() {
return new WALEntryFilter() {
@Override
public Entry filter(Entry entry) {
return entry;
}
};
}
class PathWatcher implements WALActionsListener {
Path currentPath;
@Override
public void preLogRoll(Path oldPath, Path newPath) throws IOException {
walQueue.add(newPath);
currentPath = newPath;
}
}
@Test
public void testReadBeyondCommittedLength() throws IOException, InterruptedException {
appendToLog("1");
appendToLog("2");
long size = log.getLogFileSizeIfBeingWritten(walQueue.peek()).getAsLong();
AtomicLong fileLength = new AtomicLong(size - 1);
try (WALEntryStream entryStream = new WALEntryStream(walQueue, CONF, 0,
p -> OptionalLong.of(fileLength.get()), null, new MetricsSource("1"))) {
assertTrue(entryStream.hasNext());
assertNotNull(entryStream.next());
// can not get log 2
assertFalse(entryStream.hasNext());
Thread.sleep(1000);
entryStream.reset();
// still can not get log 2
assertFalse(entryStream.hasNext());
// can get log 2 now
fileLength.set(size);
entryStream.reset();
assertTrue(entryStream.hasNext());
assertNotNull(entryStream.next());
assertFalse(entryStream.hasNext());
}
}
}
| |
package org.apache.helix.integration;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.helix.HelixDataAccessor;
import org.apache.helix.PropertyKey.Builder;
import org.apache.helix.TestHelper;
import org.apache.helix.ZNRecord;
import org.apache.helix.api.State;
import org.apache.helix.manager.zk.MockParticipant;
import org.apache.helix.manager.zk.MockController;
import org.apache.helix.manager.zk.ZKHelixDataAccessor;
import org.apache.helix.manager.zk.ZkBaseDataAccessor;
import org.apache.helix.manager.zk.ZkClient;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.IdealState;
import org.apache.helix.model.IdealState.RebalanceMode;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.model.LiveInstance;
import org.apache.helix.model.StateModelDefinition;
import org.apache.helix.tools.ClusterSetup;
import org.apache.helix.tools.ClusterStateVerifier;
import org.apache.helix.tools.ClusterStateVerifier.ZkVerifier;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class TestAutoRebalance extends ZkStandAloneCMTestBase {
String db2 = TEST_DB + "2";
String _tag = "SSDSSD";
@Override
@BeforeClass
public void beforeClass() throws Exception {
// Logger.getRootLogger().setLevel(Level.INFO);
System.out.println("START " + CLASS_NAME + " at " + new Date(System.currentTimeMillis()));
String namespace = "/" + CLUSTER_NAME;
if (_zkclient.exists(namespace)) {
_zkclient.deleteRecursive(namespace);
}
_setupTool = new ClusterSetup(_zkclient);
// setup storage cluster
_setupTool.addCluster(CLUSTER_NAME, true);
_setupTool.addResourceToCluster(CLUSTER_NAME, TEST_DB, _PARTITIONS, STATE_MODEL,
RebalanceMode.FULL_AUTO + "");
_setupTool.addResourceToCluster(CLUSTER_NAME, db2, _PARTITIONS, "OnlineOffline",
RebalanceMode.FULL_AUTO + "");
for (int i = 0; i < NODE_NR; i++) {
String storageNodeName = "localhost_" + (START_PORT + i);
_setupTool.addInstanceToCluster(CLUSTER_NAME, storageNodeName);
}
_setupTool.rebalanceStorageCluster(CLUSTER_NAME, TEST_DB, _replica);
for (int i = 0; i < 3; i++) {
String storageNodeName = "localhost_" + (START_PORT + i);
_setupTool.getClusterManagementTool().addInstanceTag(CLUSTER_NAME, storageNodeName, _tag);
}
_setupTool.rebalanceCluster(CLUSTER_NAME, db2, 1, "ucpx", _tag);
// start dummy participants
for (int i = 0; i < NODE_NR; i++) {
String instanceName = "localhost_" + (START_PORT + i);
MockParticipant participant =
new MockParticipant(_zkaddr, CLUSTER_NAME, instanceName);
participant.syncStart();
_participants[i] = participant;
}
// start controller
String controllerName = "controller_0";
_controller = new MockController(_zkaddr, CLUSTER_NAME, controllerName);
_controller.syncStart();
boolean result =
ClusterStateVerifier.verifyByZkCallback(new ExternalViewBalancedVerifier(_zkclient,
CLUSTER_NAME, TEST_DB));
Assert.assertTrue(result);
}
@Test()
public void testDropResourceAutoRebalance() throws Exception {
// add a resource to be dropped
_setupTool.addResourceToCluster(CLUSTER_NAME, "MyDB", _PARTITIONS, "OnlineOffline",
RebalanceMode.FULL_AUTO + "");
_setupTool.rebalanceStorageCluster(CLUSTER_NAME, "MyDB", 1);
boolean result =
ClusterStateVerifier.verifyByZkCallback(new ExternalViewBalancedVerifier(_zkclient,
CLUSTER_NAME, "MyDB"));
Assert.assertTrue(result);
String command = "-zkSvr " + _zkaddr + " -dropResource " + CLUSTER_NAME + " " + "MyDB";
ClusterSetup.processCommandLineArgs(command.split(" "));
TestHelper.verifyWithTimeout("verifyEmptyCurStateAndExtView", 30 * 1000, CLUSTER_NAME, "MyDB",
TestHelper.<String> setOf("localhost_12918", "localhost_12919", "localhost_12920",
"localhost_12921", "localhost_12922"), _zkaddr);
// add a resource to be dropped
_setupTool.addResourceToCluster(CLUSTER_NAME, "MyDB2", _PARTITIONS, "MasterSlave",
RebalanceMode.FULL_AUTO + "");
_setupTool.rebalanceStorageCluster(CLUSTER_NAME, "MyDB2", 3);
result =
ClusterStateVerifier.verifyByZkCallback(new ExternalViewBalancedVerifier(_zkclient,
CLUSTER_NAME, "MyDB2"));
Assert.assertTrue(result);
command = "-zkSvr " + _zkaddr + " -dropResource " + CLUSTER_NAME + " " + "MyDB2";
ClusterSetup.processCommandLineArgs(command.split(" "));
TestHelper.verifyWithTimeout("verifyEmptyCurStateAndExtView", 30 * 1000, CLUSTER_NAME, "MyDB2",
TestHelper.<String> setOf("localhost_12918", "localhost_12919", "localhost_12920",
"localhost_12921", "localhost_12922"), _zkaddr);
}
@Test()
public void testAutoRebalance() throws Exception {
// kill 1 node
_participants[0].syncStop();
boolean result =
ClusterStateVerifier.verifyByZkCallback(new ExternalViewBalancedVerifier(_zkclient,
CLUSTER_NAME, TEST_DB));
Assert.assertTrue(result);
// add 2 nodes
for (int i = 0; i < 2; i++) {
String storageNodeName = "localhost_" + (1000 + i);
_setupTool.addInstanceToCluster(CLUSTER_NAME, storageNodeName);
MockParticipant participant =
new MockParticipant(_zkaddr, CLUSTER_NAME, storageNodeName.replace(':', '_'));
participant.syncStart();
}
Thread.sleep(5000);
result =
ClusterStateVerifier.verifyByZkCallback(new ExternalViewBalancedVerifier(_zkclient,
CLUSTER_NAME, TEST_DB));
Assert.assertTrue(result);
result =
ClusterStateVerifier.verifyByZkCallback(new ExternalViewBalancedVerifier(_zkclient,
CLUSTER_NAME, db2));
Assert.assertTrue(result);
HelixDataAccessor accessor =
new ZKHelixDataAccessor(CLUSTER_NAME, new ZkBaseDataAccessor<ZNRecord>(_zkclient));
Builder keyBuilder = accessor.keyBuilder();
ExternalView ev = accessor.getProperty(keyBuilder.externalView(db2));
Set<String> instancesSet = new HashSet<String>();
for (String partitionName : ev.getRecord().getMapFields().keySet()) {
Map<String, String> assignmentMap = ev.getRecord().getMapField(partitionName);
for (String instance : assignmentMap.keySet()) {
instancesSet.add(instance);
}
}
Assert.assertEquals(instancesSet.size(), 2);
}
static boolean verifyBalanceExternalView(ZNRecord externalView, int partitionCount,
String masterState, int replica, int instances) {
if (externalView == null) {
return false;
}
Map<String, Integer> masterPartitionsCountMap = new HashMap<String, Integer>();
for (String partitionName : externalView.getMapFields().keySet()) {
Map<String, String> assignmentMap = externalView.getMapField(partitionName);
// Assert.assertTrue(assignmentMap.size() >= replica);
for (String instance : assignmentMap.keySet()) {
if (assignmentMap.get(instance).equals(masterState)) {
if (!masterPartitionsCountMap.containsKey(instance)) {
masterPartitionsCountMap.put(instance, 0);
}
masterPartitionsCountMap.put(instance, masterPartitionsCountMap.get(instance) + 1);
}
}
}
int perInstancePartition = partitionCount / instances;
int totalCount = 0;
for (String instanceName : masterPartitionsCountMap.keySet()) {
int instancePartitionCount = masterPartitionsCountMap.get(instanceName);
totalCount += instancePartitionCount;
if (Math.abs(instancePartitionCount - perInstancePartition) > 1) {
// System.out.println("instanceName: " + instanceName + ", instancePartitionCnt: "
// + instancePartitionCount + ", perInstancePartition: " + perInstancePartition);
return false;
}
}
if (partitionCount != totalCount) {
// System.out.println("partitionCnt: " + partitionCount + ", totalCnt: " + totalCount);
return false;
}
return true;
}
public static class ExternalViewBalancedVerifier extends ZkVerifier {
String _resourceName;
public ExternalViewBalancedVerifier(ZkClient client, String clusterName, String resourceName) {
super(clusterName, client);
_resourceName = resourceName;
}
@Override
public boolean verify() {
HelixDataAccessor accessor =
new ZKHelixDataAccessor(getClusterName(), _baseAccessor);
Builder keyBuilder = accessor.keyBuilder();
IdealState idealState = accessor.getProperty(keyBuilder.idealStates(_resourceName));
if (idealState == null) {
return false;
}
int numberOfPartitions = idealState.getRecord().getListFields().size();
String stateModelDefName = idealState.getStateModelDefId().stringify();
StateModelDefinition stateModelDef =
accessor.getProperty(keyBuilder.stateModelDef(stateModelDefName));
State masterValue = stateModelDef.getTypedStatesPriorityList().get(0);
int replicas = Integer.parseInt(idealState.getReplicas());
String instanceGroupTag = idealState.getInstanceGroupTag();
int instances = 0;
Map<String, LiveInstance> liveInstanceMap =
accessor.getChildValuesMap(keyBuilder.liveInstances());
Map<String, InstanceConfig> instanceConfigMap =
accessor.getChildValuesMap(keyBuilder.instanceConfigs());
for (String liveInstanceName : liveInstanceMap.keySet()) {
if (instanceConfigMap.get(liveInstanceName).containsTag(instanceGroupTag)) {
instances++;
}
}
if (instances == 0) {
instances = liveInstanceMap.size();
}
ExternalView ev = accessor.getProperty(keyBuilder.externalView(_resourceName));
if (ev == null) {
return false;
}
return verifyBalanceExternalView(ev.getRecord(), numberOfPartitions, masterValue.toString(),
replicas, instances);
}
}
}
| |
/*<license>
Copyright 2007, PeopleWare n.v.
NO RIGHTS ARE GRANTED FOR THE USE OF THIS SOFTWARE, EXCEPT, IN WRITING,
TO SELECTED PARTIES.
</license>*/
package org.ppwcode.util.smallfries;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.ppwcode.metainfo_I.License.Type.APACHE_V2;
import static org.ppwcode.util.smallfries_I.MathUtil.arithmeticMean;
import static org.ppwcode.util.smallfries_I.MathUtil.equalPrimitiveValue;
import org.apache.commons.math.stat.descriptive.moment.Mean;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.ppwcode.metainfo_I.Copyright;
import org.ppwcode.metainfo_I.License;
import org.ppwcode.metainfo_I.vcs.SvnInfo;
import org.ppwcode.util.smallfries_I.StandardError;
@Copyright("2007 - $Date$, PeopleWare n.v.")
@License(APACHE_V2)
@SvnInfo(revision = "$Revision$",
date = "$Date$")
public class TestStandardError {
@Before
public void setUp() throws Exception {
// NOP
}
@After
public void tearDown() throws Exception {
// NOP
}
private StandardError $sampleStandardError = new StandardError(true);
private StandardError $populationStandardError = new StandardError(false);
private StandardError $sampleStandardError2 = new StandardError(true);
private StandardError $populationStandardError2 = new StandardError(false);
@Test
public void constructor1() {
StandardError standardError = new StandardError();
assertTrue(standardError.isBiasCorrected());
assertEquals(0L, standardError.getN());
assertEquals(Double.NaN, standardError.getResult());
}
@Test
public void constructor2() {
StandardError standardError1 = new StandardError(false);
assertFalse(standardError1.isBiasCorrected());
assertEquals(0L, standardError1.getN());
assertEquals(Double.NaN, standardError1.getResult());
StandardError standardError2 = new StandardError(true);
assertTrue(standardError2.isBiasCorrected());
assertEquals(0L, standardError2.getN());
assertEquals(Double.NaN, standardError2.getResult());
}
@Test
public void getResult() {
double[] values;
// []
values = new double[0];
checkGetResult(values);
// [1.1]
values = new double[] {1.1};
checkGetResult(values);
// [1.1, 2.2]
values = new double[] {1.1, 2.2};
checkGetResult(values);
// [1.1, 2.2, 3.3]
values = new double[] {1.1, 2.2, 3.3};
checkGetResult(values);
// [1.1, 2.2, 3.3, -5.0]
values = new double[] {1.1, 2.2, 3.3, -5.0};
checkGetResult(values);
// [1.1, 2.2, 3.3, 0.0]
values = new double[] {1.1, 2.2, 3.3, 0.0};
checkGetResult(values);
// [1.1, 2.2, 3.3, Double.POSITIVE_INFINITY]
values = new double[] {1.1, 2.2, 3.3, Double.POSITIVE_INFINITY};
checkGetResult(values);
// [1.1, 2.2, 3.3, Double.NEGATIVE_INFINITY]
values = new double[] {1.1, 2.2, 3.3, Double.NEGATIVE_INFINITY};
checkGetResult(values);
// [1.1, 2.2, 3.3, Double.NaN]
values = new double[] {1.1, 2.2, 3.3, Double.NaN};
checkGetResult(values);
}
private void checkGetResult(double[] values) {
$sampleStandardError.clear();
$populationStandardError.clear();
$sampleStandardError.incrementAll(values);
$populationStandardError.incrementAll(values);
assertEquals((long)values.length, $sampleStandardError.getN());
assertEquals((long)values.length, $populationStandardError.getN());
assertTrue(equalPrimitiveValue(sampleStandardErrorByHand(values), $sampleStandardError.getResult()));
assertTrue(equalPrimitiveValue(populationStandardErrorByHand(values), $populationStandardError.getResult()));
}
/**
* @pre doubles != null;
*/
private double standardErrorByHand(boolean b, double... doubles) {
assert doubles != null;
if (doubles.length == 0) {
return Double.NaN;
}
else if (doubles.length == 1) {
return 0.0;
}
else if (Util.containsNaN(doubles)) {
return Double.NaN;
}
else if (Util.containsInfinity(doubles)) {
return Double.POSITIVE_INFINITY;
}
else {
double sum = 0.0;
double mean = arithmeticMean(doubles);
for (int i = 0; i < doubles.length; i++) {
sum += Math.pow(doubles[i]-mean, 2);
}
double x = b ? 1 : 0; // sample: n - 1, population: n
double error = Math.sqrt(sum / ((doubles.length - x)*doubles.length));
return error;
}
}
/**
* @pre doubles != null;
*/
private double sampleStandardErrorByHand(double... doubles) {
return standardErrorByHand(true, doubles);
}
/**
* @pre doubles != null;
*/
private double populationStandardErrorByHand(double... doubles) {
return standardErrorByHand(false, doubles);
}
@Test
public void evaluate() {
double[] values;
double[] values2;
// []
values = new double[0];
values2 = new double[] {5.5, 6.6, 7.7, 8.8};
checkEvaluate(values, values2, 2);
// [1.1]
values = new double[] {1.1};
values2 = new double[] {6.6, 7.7, 1.1, 8.8, 9.9};
checkEvaluate(values, values2, 2);
// [1.1, 2.2]
values = new double[] {1.1, 2.2};
values2 = new double[] {6.6, 7.7, 1.1, 2.2, 8.8, 9.9};
checkEvaluate(values, values2, 2);
// [1.1, 2.2, 3.3]
values = new double[] {1.1, 2.2, 3.3};
values2 = new double[] {6.6, 7.7, 1.1, 2.2, 3.3, 8.8, 9.9};
checkEvaluate(values, values2, 2);
// [1.1, 2.2, 3.3, -5.0]
values = new double[] {1.1, 2.2, 3.3, -5.0};
values2 = new double[] {6.6, 7.7, 1.1, 2.2, 3.3, -5.0, 8.8, 9.9};
checkEvaluate(values, values2, 2);
// [1.1, 2.2, 3.3, 0.0]
values = new double[] {1.1, 2.2, 3.3, 0.0};
values2 = new double[] {6.6, 7.7, 1.1, 2.2, 3.3, 0.0, 8.8, 9.9};
checkEvaluate(values, values2, 2);
// [1.1, 2.2, 3.3, Double.POSITIVE_INFINITY]
// @remark In Variance, there is a difference between getResult() and evaluate for values
// containing Double.POSITIVE_INFINITY and Double.NEGATIVE_INFINITY.
// getResult() --> Infinity
// evaluate() --> NaN
// values = new double[] {1.1, 2.2, 3.3, Double.POSITIVE_INFINITY};
// values2 = new double[] {6.6, 7.7, 1.1, 2.2, 3.3, Double.POSITIVE_INFINITY, 8.8, 9.9};
// checkEvaluate(values, values2, 2);
// [1.1, 2.2, 3.3, Double.NEGATIVE_INFINITY]
// values = new double[] {1.1, 2.2, 3.3, Double.NEGATIVE_INFINITY};
// values2 = new double[] {6.6, 7.7, 1.1, 2.2, 3.3, Double.NEGATIVE_INFINITY, 8.8, 9.9};
// checkEvaluate(values, values2, 2);
// [1.1, 2.2, 3.3, Double.NaN]
values = new double[] {1.1, 2.2, 3.3, Double.NaN};
values2 = new double[] {6.6, 7.7, 1.1, 2.2, 3.3, Double.NaN, 8.8, 9.9};
checkEvaluate(values, values2, 2);
}
private void checkEvaluate(double[] values, double[] values2, int begin) {
Mean m = new Mean();
double mean = m.evaluate(values);
$sampleStandardError2.clear();
$sampleStandardError2.incrementAll(values);
assertTrue(equalPrimitiveValue($sampleStandardError2.getResult(), $sampleStandardError.evaluate(values)));
assertTrue(equalPrimitiveValue($sampleStandardError2.getResult(), $sampleStandardError.evaluate(values, 0, values.length)));
assertTrue(equalPrimitiveValue($sampleStandardError2.getResult(), $sampleStandardError.evaluate(values2, begin, values.length)));
assertTrue(equalPrimitiveValue($sampleStandardError2.getResult(), $sampleStandardError.evaluate(values, mean)));
assertTrue(equalPrimitiveValue($sampleStandardError2.getResult(), $sampleStandardError.evaluate(values2, mean, begin, values.length)));
$populationStandardError2.clear();
$populationStandardError2.incrementAll(values);
assertTrue(equalPrimitiveValue($populationStandardError2.getResult(), $populationStandardError.evaluate(values)));
assertTrue(equalPrimitiveValue($populationStandardError2.getResult(), $populationStandardError.evaluate(values, 0, values.length)));
assertTrue(equalPrimitiveValue($populationStandardError2.getResult(), $populationStandardError.evaluate(values2, begin, values.length)));
assertTrue(equalPrimitiveValue($populationStandardError2.getResult(), $populationStandardError.evaluate(values, mean)));
assertTrue(equalPrimitiveValue($populationStandardError2.getResult(), $populationStandardError.evaluate(values2, mean, begin, values.length)));
}
}
| |
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.extras.codecs.guava;
import com.datastax.driver.core.*;
import com.datastax.driver.core.CCMBridge.PerClassSingleNodeCluster;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.utils.CassandraVersion;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static com.datastax.driver.core.TypeCodec.*;
import static com.datastax.driver.core.querybuilder.QueryBuilder.*;
import static org.assertj.core.api.Assertions.assertThat;
public class OptionalCodecTest extends PerClassSingleNodeCluster {
private final OptionalCodec<List<String>> optionalCodec = new OptionalCodec<List<String>>(list(varchar()));
private final CodecRegistry registry = new CodecRegistry().register(optionalCodec);
private BuiltStatement insertStmt;
private BuiltStatement selectStmt;
@Override
protected Collection<String> getTableDefinitions() {
return Collections.singletonList("CREATE TABLE foo (c1 text, c2 text, c3 list<text>, c4 bigint, c5 decimal, PRIMARY KEY (c1, c2))");
}
@Override
protected Cluster.Builder configure(Cluster.Builder builder) {
return builder.withCodecRegistry(registry);
}
@BeforeMethod(groups = "short")
public void createBuiltStatements() throws Exception {
insertStmt = insertInto("foo").value("c1", bindMarker()).value("c2", bindMarker()).value("c3", bindMarker());
selectStmt = select("c2", "c3").from("foo").where(eq("c1", bindMarker()));
}
/**
* <p>
* Validates that if a column is unset, that retrieving the value using {@link OptionalCodec} should return
* an {@link Optional#absent()} value. Since CQL Lists can't differentiate between null and empty lists, the
* OptionalCodec should be smart enough to map an empty list to absent.
* </p>
*
* @test_category data_types:serialization
* @expected_result an absent value.
* @jira_ticket JAVA-846
* @since 2.2.0
*/
@Test(groups = "short")
@CassandraVersion(major = 2.2)
public void should_map_unset_value_to_absent() {
PreparedStatement insertPrep = session.prepare(this.insertStmt);
PreparedStatement selectPrep = session.prepare(this.selectStmt);
BoundStatement bs = insertPrep.bind();
bs.setString(0, "should_map_unset_value_to_absent");
bs.setString(1, "1");
session.execute(bs);
ResultSet results = session.execute(selectPrep.bind("should_map_unset_value_to_absent"));
assertThat(results.getAvailableWithoutFetching()).isEqualTo(1);
Row row = results.one();
assertThat(row.getString("c2")).isEqualTo("1");
assertThat(row.get("c3", optionalCodec.getJavaType())).isEqualTo(Optional.absent());
}
/**
* <p>
* Validates that if a column is set to {@link Optional#absent()} using {@link OptionalCodec} that it should be
* stored as null and that retrieving it should return {@link Optional#absent()} using {@link OptionalCodec}.
* </p>
*
* @test_category data_types:serialization
* @expected_result an absent value.
* @jira_ticket JAVA-846
* @since 2.2.0
*/
@Test(groups = "short")
public void should_map_absent_null_value_to_absent() {
PreparedStatement insertPrep = session.prepare(this.insertStmt);
PreparedStatement selectPrep = session.prepare(this.selectStmt);
BoundStatement bs = insertPrep.bind();
bs.setString(0, "should_map_absent_null_value_to_absent");
bs.setString(1, "1");
bs.set(2, Optional.<List<String>>absent(), optionalCodec.getJavaType());
session.execute(bs);
ResultSet results = session.execute(selectPrep.bind("should_map_absent_null_value_to_absent"));
assertThat(results.getAvailableWithoutFetching()).isEqualTo(1);
Row row = results.one();
assertThat(row.getString("c2")).isEqualTo("1");
assertThat(row.getList("c3", String.class)).isEmpty();
assertThat(row.get("c3", optionalCodec.getJavaType())).isEqualTo(Optional.absent());
}
/**
* <p>
* Validates that if a column is set to an {@link Optional} value using {@link OptionalCodec} that it should be
* stored as the option's value and that retrieving it should return an {@link Optional} using {@link OptionalCodec}
* and its actual value without using it.
* </p>
*
* @test_category data_types:serialization
* @expected_result The options value is stored appropriately and is retrievable with and without OptionalCodec.
* @jira_ticket JAVA-846
* @since 2.2.0
*/
@Test(groups = "short")
public void should_map_some_back_to_itself() {
PreparedStatement insertPrep = session.prepare(this.insertStmt);
PreparedStatement selectPrep = session.prepare(this.selectStmt);
List<String> data = Lists.newArrayList("1", "2", "3");
BoundStatement bs = insertPrep.bind();
bs.setString(0, "should_map_some_back_to_itself");
bs.setString(1, "1");
bs.set(2, Optional.of(data), optionalCodec.getJavaType());
session.execute(bs);
ResultSet results = session.execute(selectPrep.bind("should_map_some_back_to_itself"));
assertThat(results.getAvailableWithoutFetching()).isEqualTo(1);
Row row = results.one();
assertThat(row.getString("c2")).isEqualTo("1");
// Ensure data stored correctly.
assertThat(row.getList("c3", String.class)).isEqualTo(data);
// Ensure data retrievable using Option codec.
Optional<List<String>> returnData = row.get("c3", optionalCodec.getJavaType());
assertThat(returnData.isPresent()).isTrue();
assertThat(returnData.get()).isEqualTo(data);
}
@Test(groups = "short")
public void should_map_a_primitive_type_to_absent() {
OptionalCodec<Long> optionalLongCodec = new OptionalCodec<Long>(bigint());
cluster.getConfiguration().getCodecRegistry().register(optionalLongCodec);
PreparedStatement stmt = session.prepare("insert into foo (c1, c2, c4) values (?,?,?)");
BoundStatement bs = stmt.bind();
bs.setString(0, "should_map_a_primitive_type_to_absent");
bs.setString(1, "1");
bs.set(2, Optional.<Long>absent(), optionalLongCodec.getJavaType());
session.execute(bs);
PreparedStatement selectBigint = session.prepare("select c1, c4 from foo where c1=?");
ResultSet results = session.execute(selectBigint.bind("should_map_a_primitive_type_to_absent"));
assertThat(results.getAvailableWithoutFetching()).isEqualTo(1);
Row row = results.one();
assertThat(row.get("c4", optionalLongCodec.getJavaType())).isEqualTo(Optional.<Long>absent());
assertThat(row.getLong("c4")).isEqualTo(0L); // This will return a 0L since it returns the primitive value.
assertThat(row.get("c4", Long.class)).isNull();
}
@Test(groups = "short")
public void should_map_a_nullable_type_to_absent() {
OptionalCodec<BigDecimal> optionalDecimalCodec = new OptionalCodec<BigDecimal>(decimal());
cluster.getConfiguration().getCodecRegistry().register(optionalDecimalCodec);
PreparedStatement stmt = session.prepare("insert into foo (c1, c2, c5) values (?,?,?)");
BoundStatement bs = stmt.bind();
bs.setString(0, "should_map_a_nullable_type_to_absent");
bs.setString(1, "1");
bs.set(2, Optional.<BigDecimal>absent(), optionalDecimalCodec.getJavaType());
session.execute(bs);
PreparedStatement selectDecimal = session.prepare("select c1, c5 from foo where c1=?");
ResultSet results = session.execute(selectDecimal.bind("should_map_a_nullable_type_to_absent"));
assertThat(results.getAvailableWithoutFetching()).isEqualTo(1);
Row row = results.one();
assertThat(row.get("c5", optionalDecimalCodec.getJavaType())).isEqualTo(Optional.<BigDecimal>absent());
assertThat(row.getDecimal("c5")).isNull(); // Since BigDecimal is not a primitive it is nullable so expect null.
}
}
| |
package de.mwg.web.web.rest;
import de.mwg.web.SimpleWebrtcServerApp;
import de.mwg.web.domain.AnnotationAsPicture;
import de.mwg.web.repository.AnnotationAsPictureRepository;
import de.mwg.web.service.AnnotationAsPictureService;
import de.mwg.web.service.dto.AnnotationAsPictureDTO;
import de.mwg.web.service.mapper.AnnotationAsPictureMapper;
import de.mwg.web.web.rest.errors.ExceptionTranslator;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.web.PageableHandlerMethodArgumentResolver;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import java.util.List;
//import static de.mwg.web.web.rest.TestUtil.createFormattingConversionService;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the AnnotationAsPictureResource REST controller.
*
* @see AnnotationAsPictureResource
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = SimpleWebrtcServerApp.class)
public class AnnotationAsPictureResourceIntTest {
private static final String DEFAULT_NAME = "AAAAAAAAAA";
private static final String UPDATED_NAME = "BBBBBBBBBB";
private static final String DEFAULT_FILE_NAME = "AAAAAAAAAA";
private static final String UPDATED_FILE_NAME = "BBBBBBBBBB";
private static final String DEFAULT_PATH = "AAAAAAAAAA";
private static final String UPDATED_PATH = "BBBBBBBBBB";
private static final String DEFAULT_FOLDER = "AAAAAAAAAA";
private static final String UPDATED_FOLDER = "BBBBBBBBBB";
private static final String DEFAULT_TOOL_NAME = "AAAAAAAAAA";
private static final String UPDATED_TOOL_NAME = "BBBBBBBBBB";
@Autowired
private AnnotationAsPictureRepository annotationAsPictureRepository;
@Autowired
private AnnotationAsPictureMapper annotationAsPictureMapper;
@Autowired
private AnnotationAsPictureService annotationAsPictureService;
@Autowired
private MappingJackson2HttpMessageConverter jacksonMessageConverter;
@Autowired
private PageableHandlerMethodArgumentResolver pageableArgumentResolver;
@Autowired
private ExceptionTranslator exceptionTranslator;
@Autowired
private EntityManager em;
private MockMvc restAnnotationAsPictureMockMvc;
private AnnotationAsPicture annotationAsPicture;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
final AnnotationAsPictureResource annotationAsPictureResource = new AnnotationAsPictureResource(annotationAsPictureService);
this.restAnnotationAsPictureMockMvc = MockMvcBuilders.standaloneSetup(annotationAsPictureResource)
.setCustomArgumentResolvers(pageableArgumentResolver)
.setControllerAdvice(exceptionTranslator)
// .setConversionService(createFormattingConversionService())
.setMessageConverters(jacksonMessageConverter).build();
}
/**
* Create an entity for this test.
*
* This is a static method, as tests for other entities might also need it,
* if they test an entity which requires the current entity.
*/
public static AnnotationAsPicture createEntity(EntityManager em) {
AnnotationAsPicture annotationAsPicture = new AnnotationAsPicture()
.name(DEFAULT_NAME)
.fileName(DEFAULT_FILE_NAME)
.path(DEFAULT_PATH)
.folder(DEFAULT_FOLDER)
.toolName(DEFAULT_TOOL_NAME);
return annotationAsPicture;
}
@Before
public void initTest() {
annotationAsPicture = createEntity(em);
}
@Test
@Transactional
public void createAnnotationAsPicture() throws Exception {
int databaseSizeBeforeCreate = annotationAsPictureRepository.findAll().size();
// Create the AnnotationAsPicture
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(annotationAsPicture);
restAnnotationAsPictureMockMvc.perform(post("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isCreated());
// Validate the AnnotationAsPicture in the database
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeCreate + 1);
AnnotationAsPicture testAnnotationAsPicture = annotationAsPictureList.get(annotationAsPictureList.size() - 1);
assertThat(testAnnotationAsPicture.getName()).isEqualTo(DEFAULT_NAME);
assertThat(testAnnotationAsPicture.getFileName()).isEqualTo(DEFAULT_FILE_NAME);
assertThat(testAnnotationAsPicture.getPath()).isEqualTo(DEFAULT_PATH);
assertThat(testAnnotationAsPicture.getFolder()).isEqualTo(DEFAULT_FOLDER);
assertThat(testAnnotationAsPicture.getToolName()).isEqualTo(DEFAULT_TOOL_NAME);
}
@Test
@Ignore
@Transactional
public void createAnnotationAsPictureWithExistingId() throws Exception {
int databaseSizeBeforeCreate = annotationAsPictureRepository.findAll().size();
// Create the AnnotationAsPicture with an existing ID
annotationAsPicture.setId(1L);
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(annotationAsPicture);
// An entity with an existing ID cannot be created, so this API call must fail
restAnnotationAsPictureMockMvc.perform(post("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isBadRequest());
// Validate the AnnotationAsPicture in the database
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeCreate);
}
@Test
@Transactional
public void checkNameIsRequired() throws Exception {
int databaseSizeBeforeTest = annotationAsPictureRepository.findAll().size();
// set the field null
annotationAsPicture.setName(null);
// Create the AnnotationAsPicture, which fails.
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(annotationAsPicture);
restAnnotationAsPictureMockMvc.perform(post("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isBadRequest());
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeTest);
}
@Test
@Transactional
public void checkFileNameIsRequired() throws Exception {
int databaseSizeBeforeTest = annotationAsPictureRepository.findAll().size();
// set the field null
annotationAsPicture.setFileName(null);
// Create the AnnotationAsPicture, which fails.
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(annotationAsPicture);
restAnnotationAsPictureMockMvc.perform(post("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isBadRequest());
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeTest);
}
@Test
@Transactional
public void checkPathIsRequired() throws Exception {
int databaseSizeBeforeTest = annotationAsPictureRepository.findAll().size();
// set the field null
annotationAsPicture.setPath(null);
// Create the AnnotationAsPicture, which fails.
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(annotationAsPicture);
restAnnotationAsPictureMockMvc.perform(post("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isBadRequest());
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeTest);
}
@Test
@Transactional
public void checkFolderIsRequired() throws Exception {
int databaseSizeBeforeTest = annotationAsPictureRepository.findAll().size();
// set the field null
annotationAsPicture.setFolder(null);
// Create the AnnotationAsPicture, which fails.
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(annotationAsPicture);
restAnnotationAsPictureMockMvc.perform(post("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isBadRequest());
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeTest);
}
@Test
@Transactional
public void getAllAnnotationAsPictures() throws Exception {
// Initialize the database
annotationAsPictureRepository.saveAndFlush(annotationAsPicture);
// Get all the annotationAsPictureList
restAnnotationAsPictureMockMvc.perform(get("/api/annotation-as-pictures?sort=id,desc"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.[*].id").value(hasItem(annotationAsPicture.getId().intValue())))
.andExpect(jsonPath("$.[*].name").value(hasItem(DEFAULT_NAME.toString())))
.andExpect(jsonPath("$.[*].fileName").value(hasItem(DEFAULT_FILE_NAME.toString())))
.andExpect(jsonPath("$.[*].path").value(hasItem(DEFAULT_PATH.toString())))
.andExpect(jsonPath("$.[*].folder").value(hasItem(DEFAULT_FOLDER.toString())))
.andExpect(jsonPath("$.[*].toolName").value(hasItem(DEFAULT_TOOL_NAME.toString())));
}
@Test
@Transactional
public void getAnnotationAsPicture() throws Exception {
// Initialize the database
annotationAsPictureRepository.saveAndFlush(annotationAsPicture);
// Get the annotationAsPicture
restAnnotationAsPictureMockMvc.perform(get("/api/annotation-as-pictures/{id}", annotationAsPicture.getId()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.id").value(annotationAsPicture.getId().intValue()))
.andExpect(jsonPath("$.name").value(DEFAULT_NAME.toString()))
.andExpect(jsonPath("$.fileName").value(DEFAULT_FILE_NAME.toString()))
.andExpect(jsonPath("$.path").value(DEFAULT_PATH.toString()))
.andExpect(jsonPath("$.folder").value(DEFAULT_FOLDER.toString()))
.andExpect(jsonPath("$.toolName").value(DEFAULT_TOOL_NAME.toString()));
}
@Test
@Transactional
public void getNonExistingAnnotationAsPicture() throws Exception {
// Get the annotationAsPicture
restAnnotationAsPictureMockMvc.perform(get("/api/annotation-as-pictures/{id}", Long.MAX_VALUE))
.andExpect(status().isNotFound());
}
@Test
@Transactional
public void updateAnnotationAsPicture() throws Exception {
// Initialize the database
annotationAsPictureRepository.saveAndFlush(annotationAsPicture);
int databaseSizeBeforeUpdate = annotationAsPictureRepository.findAll().size();
// Update the annotationAsPicture
AnnotationAsPicture updatedAnnotationAsPicture = annotationAsPictureRepository.findOne(annotationAsPicture.getId());
updatedAnnotationAsPicture
.name(UPDATED_NAME)
.fileName(UPDATED_FILE_NAME)
.path(UPDATED_PATH)
.folder(UPDATED_FOLDER)
.toolName(UPDATED_TOOL_NAME);
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(updatedAnnotationAsPicture);
restAnnotationAsPictureMockMvc.perform(put("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isOk());
// Validate the AnnotationAsPicture in the database
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeUpdate);
AnnotationAsPicture testAnnotationAsPicture = annotationAsPictureList.get(annotationAsPictureList.size() - 1);
assertThat(testAnnotationAsPicture.getName()).isEqualTo(UPDATED_NAME);
assertThat(testAnnotationAsPicture.getFileName()).isEqualTo(UPDATED_FILE_NAME);
assertThat(testAnnotationAsPicture.getPath()).isEqualTo(UPDATED_PATH);
assertThat(testAnnotationAsPicture.getFolder()).isEqualTo(UPDATED_FOLDER);
assertThat(testAnnotationAsPicture.getToolName()).isEqualTo(UPDATED_TOOL_NAME);
}
@Test
@Transactional
public void updateNonExistingAnnotationAsPicture() throws Exception {
int databaseSizeBeforeUpdate = annotationAsPictureRepository.findAll().size();
// Create the AnnotationAsPicture
AnnotationAsPictureDTO annotationAsPictureDTO = annotationAsPictureMapper.toDto(annotationAsPicture);
// If the entity doesn't have an ID, it will be created instead of just being updated
restAnnotationAsPictureMockMvc.perform(put("/api/annotation-as-pictures")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(annotationAsPictureDTO)))
.andExpect(status().isCreated());
// Validate the AnnotationAsPicture in the database
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeUpdate + 1);
}
@Test
@Transactional
public void deleteAnnotationAsPicture() throws Exception {
// Initialize the database
annotationAsPictureRepository.saveAndFlush(annotationAsPicture);
int databaseSizeBeforeDelete = annotationAsPictureRepository.findAll().size();
// Get the annotationAsPicture
restAnnotationAsPictureMockMvc.perform(delete("/api/annotation-as-pictures/{id}", annotationAsPicture.getId())
.accept(TestUtil.APPLICATION_JSON_UTF8))
.andExpect(status().isOk());
// Validate the database is empty
List<AnnotationAsPicture> annotationAsPictureList = annotationAsPictureRepository.findAll();
assertThat(annotationAsPictureList).hasSize(databaseSizeBeforeDelete - 1);
}
@Test
@Transactional
public void equalsVerifier() throws Exception {
TestUtil.equalsVerifier(AnnotationAsPicture.class);
AnnotationAsPicture annotationAsPicture1 = new AnnotationAsPicture();
annotationAsPicture1.setId(1L);
AnnotationAsPicture annotationAsPicture2 = new AnnotationAsPicture();
annotationAsPicture2.setId(annotationAsPicture1.getId());
assertThat(annotationAsPicture1).isEqualTo(annotationAsPicture2);
annotationAsPicture2.setId(2L);
assertThat(annotationAsPicture1).isNotEqualTo(annotationAsPicture2);
annotationAsPicture1.setId(null);
assertThat(annotationAsPicture1).isNotEqualTo(annotationAsPicture2);
}
@Test
@Transactional
public void dtoEqualsVerifier() throws Exception {
TestUtil.equalsVerifier(AnnotationAsPictureDTO.class);
AnnotationAsPictureDTO annotationAsPictureDTO1 = new AnnotationAsPictureDTO();
annotationAsPictureDTO1.setId(1L);
AnnotationAsPictureDTO annotationAsPictureDTO2 = new AnnotationAsPictureDTO();
assertThat(annotationAsPictureDTO1).isNotEqualTo(annotationAsPictureDTO2);
annotationAsPictureDTO2.setId(annotationAsPictureDTO1.getId());
assertThat(annotationAsPictureDTO1).isEqualTo(annotationAsPictureDTO2);
annotationAsPictureDTO2.setId(2L);
assertThat(annotationAsPictureDTO1).isNotEqualTo(annotationAsPictureDTO2);
annotationAsPictureDTO1.setId(null);
assertThat(annotationAsPictureDTO1).isNotEqualTo(annotationAsPictureDTO2);
}
@Test
@Transactional
public void testEntityFromId() {
assertThat(annotationAsPictureMapper.fromId(42L).getId()).isEqualTo(42);
assertThat(annotationAsPictureMapper.fromId(null)).isNull();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools.util;
import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.tools.CopyListingFileStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Assert;
import org.junit.Test;
import org.junit.BeforeClass;
import org.junit.AfterClass;
import java.util.EnumSet;
import java.util.Random;
import java.util.Stack;
import java.io.IOException;
import java.io.OutputStream;
public class TestDistCpUtils {
private static final Log LOG = LogFactory.getLog(TestDistCpUtils.class);
private static final Configuration config = new Configuration();
private static MiniDFSCluster cluster;
@BeforeClass
public static void create() throws IOException {
cluster = new MiniDFSCluster.Builder(config).numDataNodes(1).format(true)
.build();
}
@AfterClass
public static void destroy() {
if (cluster != null) {
cluster.shutdown();
}
}
@Test
public void testGetRelativePathRoot() {
Path root = new Path("/tmp/abc");
Path child = new Path("/tmp/abc/xyz/file");
Assert.assertEquals(DistCpUtils.getRelativePath(root, child), "/xyz/file");
root = new Path("/");
child = new Path("/a");
Assert.assertEquals(DistCpUtils.getRelativePath(root, child), "/a");
}
@Test
public void testPackAttributes() {
EnumSet<FileAttribute> attributes = EnumSet.noneOf(FileAttribute.class);
Assert.assertEquals(DistCpUtils.packAttributes(attributes), "");
attributes.add(FileAttribute.REPLICATION);
Assert.assertEquals(DistCpUtils.packAttributes(attributes), "R");
Assert.assertEquals(attributes, DistCpUtils.unpackAttributes("R"));
attributes.add(FileAttribute.BLOCKSIZE);
Assert.assertEquals(DistCpUtils.packAttributes(attributes), "RB");
Assert.assertEquals(attributes, DistCpUtils.unpackAttributes("RB"));
attributes.add(FileAttribute.USER);
Assert.assertEquals(DistCpUtils.packAttributes(attributes), "RBU");
Assert.assertEquals(attributes, DistCpUtils.unpackAttributes("RBU"));
attributes.add(FileAttribute.GROUP);
Assert.assertEquals(DistCpUtils.packAttributes(attributes), "RBUG");
Assert.assertEquals(attributes, DistCpUtils.unpackAttributes("RBUG"));
attributes.add(FileAttribute.PERMISSION);
Assert.assertEquals(DistCpUtils.packAttributes(attributes), "RBUGP");
Assert.assertEquals(attributes, DistCpUtils.unpackAttributes("RBUGP"));
}
@Test
public void testPreserve() {
try {
FileSystem fs = FileSystem.get(config);
EnumSet<FileAttribute> attributes = EnumSet.noneOf(FileAttribute.class);
Path path = new Path("/tmp/abc");
Path src = new Path("/tmp/src");
fs.mkdirs(path);
fs.mkdirs(src);
CopyListingFileStatus srcStatus = new CopyListingFileStatus(
fs.getFileStatus(src));
FsPermission noPerm = new FsPermission((short) 0);
fs.setPermission(path, noPerm);
fs.setOwner(path, "nobody", "nobody");
DistCpUtils.preserve(fs, path, srcStatus, attributes);
FileStatus target = fs.getFileStatus(path);
Assert.assertEquals(target.getPermission(), noPerm);
Assert.assertEquals(target.getOwner(), "nobody");
Assert.assertEquals(target.getGroup(), "nobody");
attributes.add(FileAttribute.PERMISSION);
DistCpUtils.preserve(fs, path, srcStatus, attributes);
target = fs.getFileStatus(path);
Assert.assertEquals(target.getPermission(), srcStatus.getPermission());
Assert.assertEquals(target.getOwner(), "nobody");
Assert.assertEquals(target.getGroup(), "nobody");
attributes.add(FileAttribute.GROUP);
attributes.add(FileAttribute.USER);
DistCpUtils.preserve(fs, path, srcStatus, attributes);
target = fs.getFileStatus(path);
Assert.assertEquals(target.getPermission(), srcStatus.getPermission());
Assert.assertEquals(target.getOwner(), srcStatus.getOwner());
Assert.assertEquals(target.getGroup(), srcStatus.getGroup());
fs.delete(path, true);
fs.delete(src, true);
} catch (IOException e) {
LOG.error("Exception encountered ", e);
Assert.fail("Preserve test failure");
}
}
private static Random rand = new Random();
public static String createTestSetup(FileSystem fs) throws IOException {
return createTestSetup("/tmp1", fs, FsPermission.getDefault());
}
public static String createTestSetup(FileSystem fs,
FsPermission perm) throws IOException {
return createTestSetup("/tmp1", fs, perm);
}
public static String createTestSetup(String baseDir,
FileSystem fs,
FsPermission perm) throws IOException {
String base = getBase(baseDir);
fs.mkdirs(new Path(base + "/newTest/hello/world1"));
fs.mkdirs(new Path(base + "/newTest/hello/world2/newworld"));
fs.mkdirs(new Path(base + "/newTest/hello/world3/oldworld"));
fs.setPermission(new Path(base + "/newTest"), perm);
fs.setPermission(new Path(base + "/newTest/hello"), perm);
fs.setPermission(new Path(base + "/newTest/hello/world1"), perm);
fs.setPermission(new Path(base + "/newTest/hello/world2"), perm);
fs.setPermission(new Path(base + "/newTest/hello/world2/newworld"), perm);
fs.setPermission(new Path(base + "/newTest/hello/world3"), perm);
fs.setPermission(new Path(base + "/newTest/hello/world3/oldworld"), perm);
createFile(fs, base + "/newTest/1");
createFile(fs, base + "/newTest/hello/2");
createFile(fs, base + "/newTest/hello/world3/oldworld/3");
createFile(fs, base + "/newTest/hello/world2/4");
return base;
}
private static String getBase(String base) {
String location = String.valueOf(rand.nextLong());
return base + "/" + location;
}
public static void delete(FileSystem fs, String path) {
try {
if (fs != null) {
if (path != null) {
fs.delete(new Path(path), true);
}
}
} catch (IOException e) {
LOG.warn("Exception encountered ", e);
}
}
public static void createFile(FileSystem fs, String filePath) throws IOException {
OutputStream out = fs.create(new Path(filePath));
IOUtils.closeStream(out);
}
public static boolean checkIfFoldersAreInSync(FileSystem fs, String targetBase, String sourceBase)
throws IOException {
Path base = new Path(targetBase);
Stack<Path> stack = new Stack<Path>();
stack.push(base);
while (!stack.isEmpty()) {
Path file = stack.pop();
if (!fs.exists(file)) continue;
FileStatus[] fStatus = fs.listStatus(file);
if (fStatus == null || fStatus.length == 0) continue;
for (FileStatus status : fStatus) {
if (status.isDirectory()) {
stack.push(status.getPath());
}
Assert.assertTrue(fs.exists(new Path(sourceBase + "/" +
DistCpUtils.getRelativePath(new Path(targetBase), status.getPath()))));
}
}
return true;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.InputStream;
import java.io.IOException;
import java.util.*;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Multiset;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.*;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.mapreduce.lib.db.IntegerSplitter;
import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.io.NetCDFArrayWritable;
import java.util.List;
import ucar.nc2.*;
import ucar.nc2.iosp.*;
import ucar.nc2.iosp.netcdf3.*;
import ucar.unidata.io.*;
import ucar.nc2.dataset.*;
import ucar.ma2.Array;
import ucar.ma2.ArrayFloat;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.NetCDFReaderWithMeta;
/**
* Treats keys as offset in fil`e and value as line.
*/
public class NetCDFInputFormatPrunerByFileIndexMultiFile extends FileInputFormat<Text, NetCDFArrayWritable> {
private static final Log LOG
= LogFactory.getLog(NetCDFInputFormatPrunerByFileIndex.class.getName());
public static final String HIVE_QUERY = "hadoop.netcdf.hivequery.raw";
public enum QueryType { TIME, LAT, LON, NOLIMIT }
private long blockSize = 128 * 1024 * 1024;
// mapping from a block to the nodes on which it has replicas
HashMap<NetCDFFileSplit, String[]> blockToNodes =
new HashMap<NetCDFFileSplit, String[]>();
// mapping from a node to the list of blocks that it contains
HashMap<String, Set<NetCDFFileSplit>> nodeToBlocks =
new HashMap<String, Set<NetCDFFileSplit>>();
NetCDFInfo netInfo = null;
private NetCDFInfo getNetCDFInfo(Path file, FileSystem fs, JobConf job)
{
//traverse header and return chunk start and size arrays
NetCDFInfo result = new NetCDFInfo();//library call
NetcdfFile ncFile;
Variable v;
Variable time;
Variable lat;
Variable lon;
ncFile = null;
try {
//if( file == null ){
//System.out.println( "[SAMAN] NetCDFInputFormat.getNetCDFInfo file is null" );
//LOG.info( "[SAMAN] NetCDFInputFormat.getNetCDFInfo file is null" );
//}else{
//System.out.println( "[SAMAN] NetCDFInputFormat.getNetCDFInfo file is " + file.toString() );
//LOG.info( "[SAMAN] NetCDFInputFormat.getNetCDFInfo file is null" );
//}
ncFile = NetcdfDataset.openFile(file.toString(), null);
v = ncFile.findVariable("rsut");
time = ncFile.findVariable("time");
lat = ncFile.findVariable("lat");
lon = ncFile.findVariable("lon");
//List<Variable> vs = ncFile.getVariables();
//v = vs.get(vs.size()-1);
//LOG.info("Variable is "+ v.getFullName());
result.fileSize = ncFile.vfileSize;
result.recStart = ncFile.vrecStart;
Long[] metaArray = v.reallyReadMeta().toArray(new Long[(int)(ncFile.vnumRecs)]);
result.chunkStarts =ArrayUtils.toPrimitive(metaArray);
//result.chunkSizes = nc.chunkSizes;
result.numRecs = ncFile.vnumRecs;
result.recSize = ncFile.vrecSize;
result.smallRecSize = ncFile.vsmallRecSize;
result.timeLength = (int)(time.getSize());
result.latLength = (int)(lat.getSize());
result.lonLength = (int)(lon.getSize());
//result.shape = v.shape;
} catch (Exception e)
{
LOG.info( "Bad... "+ e );
System.out.println("Bad... "+ e);
}
try{if (ncFile!=null)ncFile.close();}catch (Exception e) { LOG.info( "Bad2... "+e ); System.out.println("Bad2... "+e);}
return result;
}
@Override
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
FileStatus[] files = listStatus(job);
LOG.info("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] hive query is: " + job.get(HIVE_QUERY, "Kossher"));
System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] hive query is: " + job.get(HIVE_QUERY, "Kossher"));
/* Analyzing Query here */
String hiveQuery = job.get(HIVE_QUERY, "Kossher");
QueryType queryType = QueryType.NOLIMIT; // default mode
if(hiveQuery.contains("where") || hiveQuery.contains("WHERE")) {
if (hiveQuery.contains("time") || hiveQuery.contains("TIME")) {
queryType = QueryType.TIME;
} else if (hiveQuery.contains("lat") || hiveQuery.contains("LAT")) {
queryType = QueryType.LAT;
} else if (hiveQuery.contains("lon") || hiveQuery.contains("LON")) {
queryType = QueryType.LON;
}
}
float topLimit = -1;
float bottomLimit = -1;
if( queryType != QueryType.NOLIMIT ) {
if (hiveQuery.contains("<")) {
String[] querySplitted = hiveQuery.split(" ");
int i = Arrays.asList(querySplitted).indexOf("<");
topLimit = Float.valueOf(querySplitted[i+1]);
}
if (hiveQuery.contains(">")) {
String[] querySplitted = hiveQuery.split(" ");
int i = Arrays.asList(querySplitted).indexOf(">");
bottomLimit = Float.valueOf(querySplitted[i+1]);
}
}
//System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndex] QueryType = " + queryType.toString()
// +", topLimit = " + topLimit + ", bottomLimit = " + bottomLimit );
//LOG.info("[SAMAN][NetCDFInputFormatPrunerByFileIndex] QueryType = " + queryType.toString()
// + ", topLimit = " + topLimit + ", bottomLimit = " + bottomLimit);
/* End Analyzing Query here */
System.out.println( "[SAMANPruner] beginning of getSplits" );
LOG.info( "[SAMANPruner] beginning of getSplits" );
//System.out.println( "[SAMAN] " + files.length );
//LOG.info( "[SAMAN] " + files.length );
// Save the number of input files in the job-conf
job.setLong(NUM_INPUT_FILES, files.length);
long totalSize = 0; // compute total size
for (FileStatus file: files) { // check we have valid files
if (file.isDir()) {
throw new IOException("Not a file: " + file.getPath());
}
totalSize += file.getLen();
}
//long minSize = Math.max(job.getLong("mapred.min.split.size", 1),
// minSplitSize);
// generate splits
ArrayList<NetCDFFileSplit> splits = new ArrayList<NetCDFFileSplit>(numSplits);
ArrayList<NetCDFFileSplit> finalSplits = new ArrayList<NetCDFFileSplit>();
NetworkTopology clusterMap = new NetworkTopology();
for (FileStatus file: files) {
Path path = file.getPath();
int fileIndex = 0;
int dimIndex = 0;
if( queryType == QueryType.TIME || queryType == QueryType.NOLIMIT){
if( path.getName().contains("lat") || path.getName().contains("lon") )
continue;
}else if( queryType == QueryType.LAT ){
if( !path.getName().contains("lat") )
continue;
}else if( queryType == QueryType.LON ){
if( !path.getName().contains("lon") )
continue;
}
if( queryType == QueryType.TIME ){
String[] parts = path.getName().split("-");
fileIndex = Integer.valueOf(parts[1]);
}
else if( queryType == QueryType.LAT || queryType == QueryType.LON ){
if( path.getName().contains("_") ){
String[] parts = path.getName().split("_");
fileIndex = Integer.valueOf(parts[2]);
dimIndex = Integer.valueOf(parts[0].substring(7));
}else{
//dimIndex = Integer.valueOf(path.getName().substring(7));
String[] parts = path.getName().split("-");
dimIndex = Integer.valueOf(parts[1]);
}
}
//LOG.info("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] File name is : " + path.getName());
System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] File name is : " + path.getName());
FileSystem fs = path.getFileSystem(job);
long length = file.getLen();
BlockLocation[] blkLocations = fs.getFileBlockLocations(file, 0, length);
if ((length != 0) && isSplitable(fs, path)) {
long blockSize = file.getBlockSize();
netInfo = getNetCDFInfo(path, fs, job);
long recStart = netInfo.recStart;
long[] chunkStarts = netInfo.chunkStarts;
long smallSize = netInfo.smallRecSize;
long recSize = netInfo.recSize;
long splitSize = 0;
int chunkIndex = 0;
long bytesRemaining = chunkStarts[chunkStarts.length-1] + recSize - recStart - 2*smallSize;
long thisStart = recStart; // file position
long thisChunk = 0;
long blockNo = 1;
long numChunksPerKey = 0;
if( queryType == QueryType.LAT ){
long chunkSize = netInfo.timeLength * netInfo.lonLength * 4;
numChunksPerKey = blockSize / chunkSize;
}else if( queryType == QueryType.LON ){
long chunkSize = netInfo.timeLength * netInfo.latLength * 4;
numChunksPerKey = blockSize / chunkSize;
}
System.out.println( "[SAMAN][NetCDFInputFormat][getSplits] numChunksPerKey = " + numChunksPerKey );
//LOG.info( "[SAMAN] NetCDFInputFormatPruner.getSplits => recStart = " + recStart + ", chunkStarts = " + chunkStarts +
// ", smallSize = " + smallSize + ", recSize = " + recSize + ", bytesRemaining = " + bytesRemaining +
// ", thisStart = " + thisStart);
//System.out.println( "[SAMAN] NetCDFInputFormatPruner.getSplits => recStart = " + recStart + ", chunkStarts = " + chunkStarts +
// ", smallSize = " + smallSize + ", recSize = " + recSize + ", bytesRemaining = " + bytesRemaining +
// ", thisStart = " + thisStart);
while ( bytesRemaining > 0) {
while ( chunkIndex < chunkStarts.length && chunkStarts[chunkIndex] < blockNo * blockSize ) {
chunkIndex++;
}
long tempStart = thisStart;
long endChunk;
if (chunkIndex >= chunkStarts.length) {
splitSize = chunkStarts[chunkStarts.length-1] + recSize - thisStart - smallSize;
//bytesRemaining should be 0 after this round
}
else {
splitSize = chunkStarts[chunkIndex] - thisStart - smallSize;
thisStart = chunkStarts[chunkIndex];
}
endChunk = chunkIndex;
blockNo++;
//LOG.info( "[SAMAN] NetCDFInputFormatPruner.getSplits => splitSize="+splitSize+", thisStart="+thisStart+
// ", endChunk="+endChunk+", blockNo="+blockNo);
System.out.println( "[SAMAN] NetCDFInputFormatPruner.getSplits => splitSize="+splitSize+", thisStart="+thisStart+
", endChunk="+endChunk+", blockNo="+blockNo);
String[] splitHosts = getSplitHosts(blkLocations, tempStart, splitSize, clusterMap);
NetCDFFileSplit split = new NetCDFFileSplit(path, tempStart, splitSize, splitHosts);
if( queryType == QueryType.TIME ) {
if ((topLimit < thisChunk + (fileIndex*netInfo.timeLength)) && (topLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
if ((bottomLimit > endChunk + (fileIndex*netInfo.timeLength)) && (bottomLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
blockToNodes.put( split, splitHosts );
// Put the nodes with the specified split into the node to block set
System.out.println( "[SAMAN][NetCDFInputFormat][getSplits] Put the nodes with the specified split into the node to block set" );
for( int i = 0; i < splitHosts.length; i++ ){
Set<NetCDFFileSplit> splitList = nodeToBlocks.get(splitHosts[i]);
if( splitList == null ){
splitList = new LinkedHashSet<NetCDFFileSplit>();
nodeToBlocks.put( splitHosts[i], splitList );
}
splitList.add( split );
}
System.out.println("[SAMAN][NetCDFInputFormat][getSplits] set start and end!" );
split.getFileSplit().startChunk.add(thisChunk);
split.getFileSplit().endChunk.add(endChunk);
} else if( queryType == QueryType.LAT || queryType == QueryType.LON ){
//System.out.println( "[SAMAN][NetCDFInputFormat][getSplits] file = "
// + path.getName() + ", topLimit = " + topLimit + ", bottomLimit = " + bottomLimit + ", dimIndex = " + dimIndex );
/*
if( topLimit < dimIndex*numChunksPerKey && (topLimit != -1) ){
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
if( bottomLimit > dimIndex*numChunksPerKey && (bottomLimit != -1) ){
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}*/
if (topLimit < thisChunk && (topLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
if (bottomLimit > endChunk && (bottomLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
/*
if ((topLimit < thisChunk) && (topLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
if ((bottomLimit > endChunk) && (bottomLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
*/
//split.getNetCDFFileSplit().endChunk = (long)topLimit;
/*
split.getFileSplit().startChunk.add(thisChunk);
split.getFileSplit().endChunk.add(endChunk);
*/
// Put the block into the block to node set
blockToNodes.put( split, splitHosts );
// Put the nodes with the specified split into the node to block set
for( int i = 0; i < splitHosts.length; i++ ){
Set<NetCDFFileSplit> splitList = nodeToBlocks.get(splitHosts[i]);
if( splitList == null ){
splitList = new LinkedHashSet<NetCDFFileSplit>();
nodeToBlocks.put( splitHosts[i], splitList );
}
splitList.add( split );
}
// For the test, we would assign everything statically.
if( bottomLimit > thisChunk && (bottomLimit != -1) ){
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] startChunk = "
+ bottomLimit );
split.getFileSplit().startChunk.add((long)bottomLimit);
}else{
split.getFileSplit().startChunk.add(thisChunk);
}
if( topLimit < endChunk && (topLimit != -1) ){
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] endChunk = "
+ endChunk );
split.getFileSplit().endChunk.add((long)topLimit);
}else{
split.getFileSplit().endChunk.add(endChunk);
}
} else {
if ((topLimit < thisChunk) && (topLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
if ((bottomLimit > endChunk) && (bottomLimit != -1)) {
bytesRemaining -= splitSize;
thisChunk = endChunk;
continue;
}
blockToNodes.put( split, splitHosts );
// Put the nodes with the specified split into the node to block set
for( int i = 0; i < splitHosts.length; i++ ){
Set<NetCDFFileSplit> splitList = nodeToBlocks.get(splitHosts[i]);
if( splitList == null ){
splitList = new LinkedHashSet<NetCDFFileSplit>();
nodeToBlocks.put( splitHosts[i], splitList );
}
splitList.add( split );
}
split.getFileSplit().startChunk.add(thisChunk);
split.getFileSplit().endChunk.add(endChunk);
}
splits.add(split);
bytesRemaining -= splitSize;
thisChunk = endChunk;
//LOG.info( "[SAMAN] NetCDFInputFormatPruner.getSplits => bytesRemaining="+bytesRemaining+", thisChunk="+thisChunk );
//System.out.println( "[SAMAN] NetCDFInputFormatPruner.getSplits => bytesRemaining="+bytesRemaining+", thisChunk="+thisChunk );
}
} else if (length != 0) {
String[] splitHosts = getSplitHosts(blkLocations,0,length,clusterMap);
//splits.add(new FileSplit(path, 0, length, splitHosts));
} else {
//Create empty hosts array for zero length files
//splits.add(new FileSplit(path, 0, length, new String[0]));
}
}
// Now it's time to merge non-complete splits.
// Check if each split has enough space to include another split too
Set<String> completedNodes = new HashSet<String>();
ArrayList<NetCDFFileSplit> validBlocks = new ArrayList<NetCDFFileSplit>();
long curSplitSize = 0;
Multiset<String> splitsPerNode = HashMultiset.create();
for (Iterator<Map.Entry<String, Set<NetCDFFileSplit>>> iter = nodeToBlocks
.entrySet().iterator(); iter.hasNext();) {
Map.Entry<String, Set<NetCDFFileSplit>> one = iter.next();
String node = one.getKey();
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] node is = " + node );
// Skip the node if it has previously been marked as completed.
if (completedNodes.contains(node)) {
continue;
}
Set<NetCDFFileSplit> blocksInCurrentNode = one.getValue();
// for each block, copy it into validBlocks. Delete it from
// blockToNodes so that the same block does not appear in
// two different splits.
Iterator<NetCDFFileSplit> oneBlockIter = blocksInCurrentNode.iterator();
while (oneBlockIter.hasNext()) {
NetCDFFileSplit oneblock = oneBlockIter.next();
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] " +
"split is: " + oneblock.getFileSplit().getPath());
// Remove all blocks which may already have been assigned to other
// splits.
if(!blockToNodes.containsKey(oneblock)) {
oneBlockIter.remove();
continue;
}
validBlocks.add(oneblock);
if( queryType == QueryType.LAT ){
curSplitSize += (oneblock.getFileSplit().endChunk.get(0) - oneblock.getFileSplit().startChunk.get(0)) * 4 * netInfo.lonLength * netInfo.timeLength;
}else if( queryType == QueryType.LON ){
curSplitSize += (oneblock.getFileSplit().endChunk.get(0) - oneblock.getFileSplit().startChunk.get(0)) * 4 * netInfo.latLength * netInfo.timeLength;
}else if( queryType == QueryType.TIME ){
curSplitSize += (oneblock.getFileSplit().endChunk.get(0) - oneblock.getFileSplit().startChunk.get(0)) * 4 * netInfo.latLength * netInfo.lonLength;
}else{
curSplitSize += (oneblock.getFileSplit().endChunk.get(0) - oneblock.getFileSplit().startChunk.get(0)) * 4 * netInfo.latLength * netInfo.lonLength;
}
blockToNodes.remove(oneblock);
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] curSplitSize = " + curSplitSize );
//curSplitSize += singleSplitSize;
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] " +
"Added to valid blocks!" );
// if the accumulated split size exceeds the maximum, then
// create this split.
if (blockSize != 0 && curSplitSize >= blockSize) {
// create an input split and add it to the splits array
addCreatedSplit(finalSplits, Collections.singleton(node), validBlocks);
//totalLength -= curSplitSize;
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] " +
"addCreatedSplit called!" );
curSplitSize = 0;
splitsPerNode.add(node);
// Remove entries from blocksInNode so that we don't walk these
// again.
//blocksInCurrentNode.removeAll(validBlocks);
validBlocks.clear();
// Done creating a single split for this node. Move on to the next
// node so that splits are distributed across nodes.
//break;
}
}
if( !validBlocks.isEmpty() ){
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] validBlocks not empty!" );
addCreatedSplit(finalSplits, Collections.singleton(node), validBlocks);
curSplitSize = 0;
splitsPerNode.add(node);
blocksInCurrentNode.removeAll(validBlocks);
validBlocks.clear();
}
}
Set<NetCDFFileSplit> singleSplitsSet = blockToNodes.keySet();
Iterator itrSingle = singleSplitsSet.iterator();
while( itrSingle.hasNext() ){
NetCDFFileSplit temp = (NetCDFFileSplit)itrSingle.next();
addCreatedSingleSplit( finalSplits, temp.getLocations() , temp );
}
Iterator itr = finalSplits.iterator();
while( itr.hasNext() ){
NetCDFFileSplit temp = (NetCDFFileSplit)itr.next();
String[] locations = temp.getFileSplit().getLocations();
String locationsString = "";
for( int i = 0; i < locations.length; i++ )
locationsString += locations[i];
String pathsString = "";
List<Path> paths = temp.getFileSplit().getPaths();
for( Path path : paths )
pathsString += path.getName()+",";
String startsString = "";
List<Long> starts = temp.getFileSplit().startChunk;
for( Long start : starts )
startsString += (start+",");
String endsString = "";
List<Long> ends = temp.getFileSplit().endChunk;
for( Long end : ends )
endsString += (end+",");
System.out.println( "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] " +
"locations="+locationsString+","+
"paths="+pathsString+","+
"starts="+startsString+","+
"ends="+endsString+",");
}
return finalSplits.toArray(new NetCDFFileSplit[finalSplits.size()]);
}
/**
* Create a single split from the list of blocks specified in validBlocks
* Add this new split into splitList.
*/
private void addCreatedSplit(List<NetCDFFileSplit> splitList,
Collection<String> locations,
ArrayList<NetCDFFileSplit> validBlocks) {
// create an input split
List<Path> fl = new LinkedList<Path>();
List<Long> offset = new LinkedList<Long>();
List<Long> length = new LinkedList<Long>();
List<Long> startChunk = new LinkedList<Long>();
List<Long> endChunk = new LinkedList<Long>();
for (int i = 0; i < validBlocks.size(); i++) {
fl.add(validBlocks.get(i).getFileSplit().getPaths().get(0));
offset.add(validBlocks.get(i).getFileSplit().getStart());
length.add(validBlocks.get(i).getFileSplit().getLength());
startChunk.add(validBlocks.get(i).getFileSplit().startChunk.get(0));
endChunk.add(validBlocks.get(i).getFileSplit().endChunk.get(0));
}
// add this split to the list that is returned
NetCDFFileSplit thissplit = new NetCDFFileSplit(fl, offset,
length, locations.toArray(new String[0]), startChunk, endChunk);
splitList.add(thissplit);
}
private void addCreatedSingleSplit( List<NetCDFFileSplit> splitList,
String[] locations,
NetCDFFileSplit validBlock){
List<Path> fl = new LinkedList<Path>();
List<Long> offset = new LinkedList<Long>();
List<Long> length = new LinkedList<Long>();
List<Long> startChunk = new LinkedList<Long>();
List<Long> endChunk = new LinkedList<Long>();
fl.add( validBlock.getFileSplit().getPaths().get(0) );
offset.add( validBlock.getFileSplit().getStart() );
length.add( validBlock.getFileSplit().getLength() );
startChunk.add( validBlock.getFileSplit().startChunk.get(0) );
endChunk.add( validBlock.getFileSplit().endChunk.get(0) );
NetCDFFileSplit thissplit = new NetCDFFileSplit( fl, offset,
length, locations, startChunk, endChunk );
splitList.add( thissplit );
}
@Override
public RecordReader<Text, NetCDFArrayWritable> getRecordReader(
InputSplit genericSplit, JobConf job,
Reporter reporter)
throws IOException {
reporter.setStatus(genericSplit.toString());
//LOG.info( "[SAMAN] return getRecordReader" );
//System.out.println( "[SAMAN] return getRecordReader" );
return new NetCDFReaderWithMetaMultiFile(job, (NetCDFFileSplit) genericSplit);
}
}
| |
package com.swfarm.biz.chain.bo.estapi;
import org.apache.commons.lang.StringUtils;
public class CreateOrderParam {
private String orderNo;
private String trackingNumber;
private String productCode;
private String cargoCode;
private String paymentCode;
private String initialCountryCode;
private String destinationCountryCode;
private String pieces;
private String prealertintegrationtype;
private String insurType;
private String insurValue;
private String buyerId;
private String returnSign;
private Double customerWeight;
private String transactionId;
private String shipperCompanyName;
private String shipperName;
private String shipperStateOrProvince;
private String shipperCity;
private String shipperAddress;
private String shipperTelephone;
private String shipperFax;
private String shipperPostCode;
private String consigneeCompanyName;
private String consigneeName;
private String street;
private String city;
private String stateOrProvince;
private String consigneeTelephone;
private String consigneeFax;
private String consigneePostCode;
private String consigneeEmail;
private String mctCode;
private String note;
private DeclareInvoice[] declareInvoices;
public String getOrderNo() {
return orderNo;
}
public void setOrderNo(String orderNo) {
this.orderNo = orderNo;
}
public String getTrackingNumber() {
return trackingNumber;
}
public void setTrackingNumber(String trackingNumber) {
this.trackingNumber = trackingNumber;
}
public String getProductCode() {
return productCode;
}
public void setProductCode(String productCode) {
this.productCode = productCode;
}
public String getCargoCode() {
return cargoCode;
}
public void setCargoCode(String cargoCode) {
this.cargoCode = cargoCode;
}
public String getPaymentCode() {
return paymentCode;
}
public void setPaymentCode(String paymentCode) {
this.paymentCode = paymentCode;
}
public String getInitialCountryCode() {
return initialCountryCode;
}
public void setInitialCountryCode(String initialCountryCode) {
this.initialCountryCode = initialCountryCode;
}
public String getDestinationCountryCode() {
return destinationCountryCode;
}
public void setDestinationCountryCode(String destinationCountryCode) {
this.destinationCountryCode = destinationCountryCode;
}
public String getPieces() {
return pieces;
}
public void setPieces(String pieces) {
this.pieces = pieces;
}
public String getPrealertintegrationtype() {
return prealertintegrationtype;
}
public void setPrealertintegrationtype(String prealertintegrationtype) {
this.prealertintegrationtype = prealertintegrationtype;
}
public String getInsurType() {
return insurType;
}
public void setInsurType(String insurType) {
this.insurType = insurType;
}
public String getInsurValue() {
return insurValue;
}
public void setInsurValue(String insurValue) {
this.insurValue = insurValue;
}
public String getBuyerId() {
return buyerId;
}
public void setBuyerId(String buyerId) {
this.buyerId = buyerId;
}
public String getReturnSign() {
return returnSign;
}
public void setReturnSign(String returnSign) {
this.returnSign = returnSign;
}
public Double getCustomerWeight() {
return customerWeight;
}
public void setCustomerWeight(Double customerWeight) {
this.customerWeight = customerWeight;
}
public String getTransactionId() {
return transactionId;
}
public void setTransactionId(String transactionId) {
this.transactionId = transactionId;
}
public String getShipperCompanyName() {
return shipperCompanyName;
}
public void setShipperCompanyName(String shipperCompanyName) {
this.shipperCompanyName = shipperCompanyName;
}
public String getShipperName() {
return shipperName;
}
public void setShipperName(String shipperName) {
this.shipperName = shipperName;
}
public String getShipperStateOrProvince() {
return shipperStateOrProvince;
}
public void setShipperStateOrProvince(String shipperStateOrProvince) {
this.shipperStateOrProvince = shipperStateOrProvince;
}
public String getShipperCity() {
return shipperCity;
}
public void setShipperCity(String shipperCity) {
this.shipperCity = shipperCity;
}
public String getShipperAddress() {
return shipperAddress;
}
public void setShipperAddress(String shipperAddress) {
this.shipperAddress = shipperAddress;
}
public String getShipperTelephone() {
return shipperTelephone;
}
public void setShipperTelephone(String shipperTelephone) {
this.shipperTelephone = shipperTelephone;
}
public String getShipperFax() {
return shipperFax;
}
public void setShipperFax(String shipperFax) {
this.shipperFax = shipperFax;
}
public String getShipperPostCode() {
return shipperPostCode;
}
public void setShipperPostCode(String shipperPostCode) {
this.shipperPostCode = shipperPostCode;
}
public String getConsigneeCompanyName() {
return consigneeCompanyName;
}
public void setConsigneeCompanyName(String consigneeCompanyName) {
this.consigneeCompanyName = consigneeCompanyName;
}
public String getConsigneeName() {
return consigneeName;
}
public void setConsigneeName(String consigneeName) {
this.consigneeName = consigneeName;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getStateOrProvince() {
return stateOrProvince;
}
public void setStateOrProvince(String stateOrProvince) {
this.stateOrProvince = stateOrProvince;
}
public String getConsigneeTelephone() {
return consigneeTelephone;
}
public void setConsigneeTelephone(String consigneeTelephone) {
this.consigneeTelephone = consigneeTelephone;
}
public String getConsigneeFax() {
return consigneeFax;
}
public void setConsigneeFax(String consigneeFax) {
this.consigneeFax = consigneeFax;
}
public String getConsigneePostCode() {
return consigneePostCode;
}
public void setConsigneePostCode(String consigneePostCode) {
this.consigneePostCode = consigneePostCode;
}
public String getConsigneeEmail() {
return consigneeEmail;
}
public void setConsigneeEmail(String consigneeEmail) {
this.consigneeEmail = consigneeEmail;
}
public String getMctCode() {
return mctCode;
}
public void setMctCode(String mctCode) {
this.mctCode = mctCode;
}
public String getNote() {
return note;
}
public void setNote(String note) {
this.note = note;
}
public DeclareInvoice[] getDeclareInvoices() {
return declareInvoices;
}
public void setDeclareInvoices(DeclareInvoice[] declareInvoices) {
this.declareInvoices = declareInvoices;
}
public String buildXML(String authToken, String service) {
StringBuffer stringBuffer = new StringBuffer();
if (StringUtils.isNotEmpty(service) && StringUtils.isNotEmpty(authToken)) {
String headerXml = EstExpressBaseInfo.getCommenHearder(service, authToken);
stringBuffer.append(headerXml);
}
stringBuffer.append("<arg1>");
if (StringUtils.isNotEmpty(orderNo))
stringBuffer.append("<orderNo>" + orderNo + "</orderNo>");
if (StringUtils.isNotEmpty(trackingNumber))
stringBuffer.append("<trackingNumber>" + trackingNumber + "</trackingNumber>");
if (StringUtils.isNotEmpty(productCode))
stringBuffer.append("<productCode>" + productCode + "</productCode>");
if (StringUtils.isNotEmpty(cargoCode))
stringBuffer.append("<cargoCode>" + cargoCode + "</cargoCode>");
if (StringUtils.isNotEmpty(paymentCode))
stringBuffer.append("<paymentCode>" + paymentCode + "</paymentCode>");
if (StringUtils.isNotEmpty(initialCountryCode))
stringBuffer.append("<initialCountryCode>" + initialCountryCode + "</initialCountryCode>");
if (StringUtils.isNotEmpty(destinationCountryCode))
stringBuffer.append("<destinationCountryCode>" + destinationCountryCode + "</destinationCountryCode>");
if (StringUtils.isNotEmpty(pieces))
stringBuffer.append("<pieces>" + pieces + "</pieces>");
if (StringUtils.isNotEmpty(prealertintegrationtype))
stringBuffer.append("<prealertintegrationtype>" + prealertintegrationtype + "</prealertintegrationtype>");
if (StringUtils.isNotEmpty(insurType))
stringBuffer.append("<insurType>" + insurType + "</insurType>");
if (StringUtils.isNotEmpty(insurValue))
stringBuffer.append("<insurValue>" + insurValue + "</insurValue>");
if (StringUtils.isNotEmpty(buyerId))
stringBuffer.append("<buyerId>" + buyerId + "</buyerId>");
if (StringUtils.isNotEmpty(returnSign))
stringBuffer.append("<returnSign>" + returnSign + "</returnSign>");
if (customerWeight != null)
stringBuffer.append("<customerWeight>" + customerWeight + "</customerWeight>");
if (StringUtils.isNotEmpty(transactionId))
stringBuffer.append("<transactionId>" + transactionId + "</transactionId>");
if (StringUtils.isNotEmpty(shipperCompanyName))
stringBuffer.append("<shipperCompanyName>" + shipperCompanyName + "</shipperCompanyName>");
if (StringUtils.isNotEmpty(shipperName))
stringBuffer.append("<shipperName>" + shipperName + "</shipperName>");
if (StringUtils.isNotEmpty(shipperStateOrProvince))
stringBuffer.append("<shipperStateOrProvince>" + shipperStateOrProvince + "</shipperStateOrProvince>");
if (StringUtils.isNotEmpty(shipperCity))
stringBuffer.append("<shipperCity>" + shipperCity + "</shipperCity>");
if (StringUtils.isNotEmpty(shipperAddress))
stringBuffer.append("<shipperAddress>" + shipperAddress + "</shipperAddress>");
if (StringUtils.isNotEmpty(shipperTelephone))
stringBuffer.append("<shipperTelephone>" + shipperTelephone + "</shipperTelephone>");
if (StringUtils.isNotEmpty(shipperFax))
stringBuffer.append("<shipperFax>" + shipperFax + "</shipperFax>");
if (StringUtils.isNotEmpty(shipperPostCode))
stringBuffer.append("<shipperPostCode>" + shipperPostCode + "</shipperPostCode>");
if (StringUtils.isNotEmpty(consigneeCompanyName))
stringBuffer.append("<consigneeCompanyName>" + consigneeCompanyName + "</consigneeCompanyName>");
if (StringUtils.isNotEmpty(consigneeName))
stringBuffer.append("<consigneeName>" + consigneeName + "</consigneeName>");
if (StringUtils.isNotEmpty(street))
stringBuffer.append("<street>" + street + "</street>");
if (StringUtils.isNotEmpty(city))
stringBuffer.append("<city>" + city + "</city>");
if (StringUtils.isNotEmpty(stateOrProvince))
stringBuffer.append("<stateOrProvince>" + stateOrProvince + "</stateOrProvince>");
if (StringUtils.isNotEmpty(consigneeTelephone))
stringBuffer.append("<consigneeTelephone>" + consigneeTelephone + "</consigneeTelephone>");
if (StringUtils.isNotEmpty(consigneeFax))
stringBuffer.append("<consigneeFax>" + consigneeFax + "</consigneeFax>");
if (StringUtils.isNotEmpty(consigneePostCode))
stringBuffer.append("<consigneePostCode>" + consigneePostCode + "</consigneePostCode>");
if (StringUtils.isNotEmpty(consigneeEmail))
stringBuffer.append("<consigneeEmail>" + consigneeEmail + "</consigneeEmail>");
if (StringUtils.isNotEmpty(mctCode))
stringBuffer.append("<mctCode>" + mctCode + "</mctCode>");
if (StringUtils.isNotEmpty(note))
stringBuffer.append("<note>" + note + "</note>");
if (declareInvoices != null && declareInvoices.length > 0)
for (int i = 0; i < declareInvoices.length; i++)
stringBuffer.append(declareInvoices[i].toXml());
stringBuffer.append("</arg1>");
if (StringUtils.isNotEmpty(service)) {
String enderXml = EstExpressBaseInfo.getCommenEnder(service);
stringBuffer.append(enderXml);
}
return stringBuffer.toString();
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth.endpoint.user.impl;
import org.apache.oltu.oauth2.common.utils.JSONUtils;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.testng.IObjectFactory;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.ObjectFactory;
import org.testng.annotations.Test;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils;
import org.wso2.carbon.identity.core.persistence.JDBCPersistenceManager;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.oauth.cache.AuthorizationGrantCache;
import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration;
import org.wso2.carbon.identity.oauth.endpoint.util.ClaimUtil;
import org.wso2.carbon.identity.oauth2.RequestObjectException;
import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationResponseDTO;
import org.wso2.carbon.identity.oauth2.internal.OAuth2ServiceComponentHolder;
import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
import org.wso2.carbon.identity.openidconnect.OpenIDConnectClaimFilterImpl;
import org.wso2.carbon.identity.openidconnect.RequestObjectService;
import org.wso2.carbon.identity.openidconnect.dao.ScopeClaimMappingDAOImpl;
import org.wso2.carbon.identity.openidconnect.internal.OpenIDConnectServiceComponentHolder;
import org.wso2.carbon.identity.openidconnect.model.RequestedClaim;
import org.wso2.carbon.registry.core.service.RegistryService;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import static org.junit.Assert.assertNull;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.powermock.api.mockito.PowerMockito.mockStatic;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
/**
* This class contains tests for UserInfoJSONResponseBuilder.
*/
@PrepareForTest({OAuthServerConfiguration.class, OAuth2Util.class, IdentityTenantUtil.class, RegistryService.class,
AuthorizationGrantCache.class, ClaimUtil.class, IdentityUtil.class, UserInfoEndpointConfig.class,
JDBCPersistenceManager.class})
@PowerMockIgnore({"javax.management.*"})
public class UserInfoJSONResponseBuilderTest extends UserInfoResponseBaseTest {
private UserInfoJSONResponseBuilder userInfoJSONResponseBuilder;
Connection con = null;
@Mock
private RequestObjectService requestObjectService;
@BeforeClass
public void setUpTest() throws Exception {
OAuth2ServiceComponentHolder.getInstance().setScopeClaimMappingDAO(new ScopeClaimMappingDAOImpl());
userInfoJSONResponseBuilder = new UserInfoJSONResponseBuilder();
TestUtils.initiateH2Base();
con = TestUtils.getConnection();
}
@ObjectFactory
public IObjectFactory getObjectFactory() {
return new org.powermock.modules.testng.PowerMockObjectFactory();
}
private void setUpRequestObjectService() throws RequestObjectException {
List<RequestedClaim> requestedClaims = Collections.emptyList();
when(requestObjectService.getRequestedClaimsForIDToken(anyString())).
thenReturn(requestedClaims);
when(requestObjectService.getRequestedClaimsForUserInfo(anyString())).
thenReturn(requestedClaims);
OpenIDConnectServiceComponentHolder.getInstance()
.getOpenIDConnectClaimFilters()
.add(new OpenIDConnectClaimFilterImpl());
OpenIDConnectServiceComponentHolder.setRequestObjectService(requestObjectService);
}
private void mockDataSource() throws SQLException {
mockStatic(JDBCPersistenceManager.class);
DataSource dataSource = Mockito.mock(DataSource.class);
JDBCPersistenceManager jdbcPersistenceManager = Mockito.mock(JDBCPersistenceManager.class);
Mockito.when(dataSource.getConnection()).thenReturn(con);
Mockito.when(jdbcPersistenceManager.getInstance()).thenReturn(jdbcPersistenceManager);
Mockito.when(jdbcPersistenceManager.getDataSource()).thenReturn(dataSource);
}
@DataProvider(name = "responseStringInputs")
public Object[][] responseStringInputs() {
return getOidcScopeFilterTestData();
}
@Test(dataProvider = "responseStringInputs")
public void testGetResponseString(Map<String, Object> inputClaims,
Map<String, List<String>> oidcScopeMap,
boolean getClaimsFromCache,
String[] requestedScopes,
Map<String, Object> expectedClaims) throws Exception {
try {
setUpRequestObjectService();
prepareForResponseClaimTest(inputClaims, oidcScopeMap, getClaimsFromCache);
mockDataSource();
mockObjectsRelatedToTokenValidation();
mockStatic(FrameworkUtils.class);
when(FrameworkUtils.resolveUserIdFromUsername(anyInt(), anyString(), anyString()))
.thenReturn(AUTHORIZED_USER_ID);
AuthenticatedUser authenticatedUser = new AuthenticatedUser();
authenticatedUser.setUserName(AUTHORIZED_USER_NAME);
authenticatedUser.setTenantDomain(TENANT_DOT_COM);
authenticatedUser.setUserStoreDomain(JDBC_DOMAIN);
authenticatedUser.setUserId(AUTHORIZED_USER_ID);
authenticatedUser.setAuthenticatedSubjectIdentifier(AUTHORIZED_USER_ID);
mockAccessTokenDOInOAuth2Util(authenticatedUser);
String responseString =
userInfoJSONResponseBuilder.getResponseString(
getTokenResponseDTO(AUTHORIZED_USER_FULL_QUALIFIED, requestedScopes));
Map<String, Object> claimsInResponse = JSONUtils.parseJSON(responseString);
assertNotNull(claimsInResponse);
assertFalse(claimsInResponse.isEmpty());
assertNotNull(claimsInResponse.get(sub));
for (Map.Entry<String, Object> expectClaimEntry : expectedClaims.entrySet()) {
assertTrue(claimsInResponse.containsKey(expectClaimEntry.getKey()));
assertNotNull(claimsInResponse.get(expectClaimEntry.getKey()));
assertEquals(expectClaimEntry.getValue(), claimsInResponse.get(expectClaimEntry.getKey()));
}
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
}
@Test
public void testEssentialClaims() throws Exception {
final Map<String, Object> inputClaims = new HashMap<>();
inputClaims.put(firstName, FIRST_NAME_VALUE);
inputClaims.put(lastName, LAST_NAME_VALUE);
inputClaims.put(email, EMAIL_VALUE);
final Map<String, List<String>> oidcScopeMap = new HashMap<>();
oidcScopeMap.put(OIDC_SCOPE, Collections.singletonList(firstName));
List<String> essentialClaims = Collections.singletonList(email);
prepareForResponseClaimTest(inputClaims, oidcScopeMap, false);
setUpRequestObjectService();
// Mock for essential claims.
when(OAuth2Util.getEssentialClaims(anyString(), anyString())).thenReturn(essentialClaims);
when(authorizationGrantCacheEntry.getEssentialClaims()).thenReturn(ESSENTIAL_CLAIM_JSON);
mockDataSource();
mockObjectsRelatedToTokenValidation();
mockStatic(FrameworkUtils.class);
when(FrameworkUtils.resolveUserIdFromUsername(anyInt(), anyString(), anyString()))
.thenReturn(AUTHORIZED_USER_ID);
AuthenticatedUser authenticatedUser = new AuthenticatedUser();
authenticatedUser.setUserName(AUTHORIZED_USER_NAME);
authenticatedUser.setTenantDomain(TENANT_DOT_COM);
authenticatedUser.setUserStoreDomain(JDBC_DOMAIN);
authenticatedUser.setUserId(AUTHORIZED_USER_ID);
authenticatedUser.setAuthenticatedSubjectIdentifier(AUTHORIZED_USER_ID);
mockAccessTokenDOInOAuth2Util(authenticatedUser);
String responseString =
userInfoJSONResponseBuilder.getResponseString(getTokenResponseDTO(AUTHORIZED_USER_FULL_QUALIFIED));
Map<String, Object> claimsInResponse = JSONUtils.parseJSON(responseString);
assertNotNull(claimsInResponse);
assertNotNull(claimsInResponse.get(sub));
// Assert that claims not in scope were not sent
assertNull(claimsInResponse.get(lastName));
// Assert claim in scope was sent
assertNotNull(claimsInResponse.get(firstName));
assertEquals(claimsInResponse.get(firstName), FIRST_NAME_VALUE);
// Assert whether essential claims are available even though they were not in requested scope.
assertNotNull(claimsInResponse.get(email));
assertEquals(claimsInResponse.get(email), EMAIL_VALUE);
}
@Test
public void testUpdateAtClaim() throws Exception {
String updateAtValue = "1509556412";
testLongClaimInUserInfoResponse(UPDATED_AT, updateAtValue);
}
@Test
public void testEmailVerified() throws Exception {
String emailVerifiedClaimValue = "true";
testBooleanClaimInUserInfoResponse(EMAIL_VERIFIED, emailVerifiedClaimValue);
}
@Test
public void testPhoneNumberVerified() throws Exception {
String phoneNumberVerifiedClaimValue = "true";
testBooleanClaimInUserInfoResponse(PHONE_NUMBER_VERIFIED, phoneNumberVerifiedClaimValue);
}
private void testBooleanClaimInUserInfoResponse(String claimUri, String claimValue) throws Exception {
initSingleClaimTest(claimUri, claimValue);
setUpRequestObjectService();
mockDataSource();
mockObjectsRelatedToTokenValidation();
mockStatic(FrameworkUtils.class);
Mockito.when(FrameworkUtils.resolveUserIdFromUsername(anyInt(), anyString(), anyString()))
.thenReturn(AUTHORIZED_USER_ID);
AuthenticatedUser authenticatedUser = new AuthenticatedUser();
authenticatedUser.setUserName(AUTHORIZED_USER_NAME);
authenticatedUser.setTenantDomain(TENANT_DOT_COM);
authenticatedUser.setUserStoreDomain(JDBC_DOMAIN);
authenticatedUser.setUserId(AUTHORIZED_USER_ID);
authenticatedUser.setAuthenticatedSubjectIdentifier(AUTHORIZED_USER_ID);
mockAccessTokenDOInOAuth2Util(authenticatedUser);
String responseString =
userInfoJSONResponseBuilder.getResponseString(getTokenResponseDTO(AUTHORIZED_USER_FULL_QUALIFIED));
Map<String, Object> claimsInResponse = JSONUtils.parseJSON(responseString);
assertSubjectClaimPresent(claimsInResponse);
assertNotNull(claimsInResponse.get(claimUri));
// Assert whether the returned claim is of Boolean type
assertEquals(claimsInResponse.get(claimUri), Boolean.parseBoolean(claimValue));
}
private void testLongClaimInUserInfoResponse(String claimUri, String claimValue) throws Exception {
initSingleClaimTest(claimUri, claimValue);
mockDataSource();
mockObjectsRelatedToTokenValidation();
mockStatic(FrameworkUtils.class);
when(FrameworkUtils.resolveUserIdFromUsername(anyInt(), anyString(), anyString()))
.thenReturn(AUTHORIZED_USER_ID);
AuthenticatedUser authenticatedUser = new AuthenticatedUser();
authenticatedUser.setUserName(AUTHORIZED_USER_NAME);
authenticatedUser.setTenantDomain(TENANT_DOT_COM);
authenticatedUser.setUserStoreDomain(JDBC_DOMAIN);
authenticatedUser.setUserId(AUTHORIZED_USER_ID);
authenticatedUser.setAuthenticatedSubjectIdentifier(AUTHORIZED_USER_ID);
mockAccessTokenDOInOAuth2Util(authenticatedUser);
String responseString =
userInfoJSONResponseBuilder.getResponseString(getTokenResponseDTO(AUTHORIZED_USER_FULL_QUALIFIED));
Map<String, Object> claimsInResponse = JSONUtils.parseJSON(responseString);
assertSubjectClaimPresent(claimsInResponse);
assertNotNull(claimsInResponse.get(claimUri));
assertTrue(claimsInResponse.get(claimUri) instanceof Integer || claimsInResponse.get(claimUri) instanceof Long);
}
@DataProvider(name = "subjectClaimDataProvider")
public Object[][] provideSubjectData() {
return getSubjectClaimTestData();
}
@Test(dataProvider = "subjectClaimDataProvider")
public void testSubjectClaim(Map<String, Object> inputClaims,
Object authorizedUser,
boolean appendTenantDomain,
boolean appendUserStoreDomain,
String expectedSubjectValue) throws Exception {
try {
AuthenticatedUser authzUser = (AuthenticatedUser) authorizedUser;
prepareForSubjectClaimTest(authzUser, inputClaims, appendTenantDomain, appendUserStoreDomain);
updateAuthenticatedSubjectIdentifier(authzUser, appendTenantDomain, appendUserStoreDomain, inputClaims);
when(userInfoJSONResponseBuilder.retrieveUserClaims(any(OAuth2TokenValidationResponseDTO.class)))
.thenReturn(inputClaims);
mockDataSource();
mockObjectsRelatedToTokenValidation();
String responseString =
userInfoJSONResponseBuilder
.getResponseString(getTokenResponseDTO((authzUser).toFullQualifiedUsername()));
Map<String, Object> claimsInResponse = JSONUtils.parseJSON(responseString);
assertSubjectClaimPresent(claimsInResponse);
assertEquals(claimsInResponse.get(sub), expectedSubjectValue);
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
}
}
| |
package org.hisp.dhis.light.utils;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Sets;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.Validate;
import org.hisp.dhis.commons.filter.FilterUtils;
import org.hisp.dhis.dataanalysis.DataAnalysisService;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetService;
import org.hisp.dhis.datavalue.DataExportParams;
import org.hisp.dhis.datavalue.DataValue;
import org.hisp.dhis.datavalue.DataValueService;
import org.hisp.dhis.datavalue.DeflatedDataValue;
import org.hisp.dhis.expression.ExpressionService;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.CalendarPeriodType;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.YearlyPeriodType;
import org.hisp.dhis.setting.SettingKey;
import org.hisp.dhis.setting.SystemSettingManager;
import org.hisp.dhis.system.filter.OrganisationUnitWithDataSetsFilter;
import org.hisp.dhis.system.filter.PastAndCurrentPeriodFilter;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.User;
import org.hisp.dhis.user.UserCredentials;
import org.hisp.dhis.validation.ValidationResult;
import org.hisp.dhis.validation.ValidationRule;
import org.hisp.dhis.validation.ValidationService;
import org.joda.time.DateTime;
import java.util.*;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public class FormUtilsImpl
implements FormUtils
{
public static final Integer DEFAULT_MAX_PERIODS = 10;
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private CurrentUserService currentUserService;
public void setCurrentUserService( CurrentUserService currentUserService )
{
this.currentUserService = currentUserService;
}
private OrganisationUnitService organisationUnitService;
public void setOrganisationUnitService( OrganisationUnitService organisationUnitService )
{
this.organisationUnitService = organisationUnitService;
}
private DataValueService dataValueService;
public void setDataValueService( DataValueService dataValueService )
{
this.dataValueService = dataValueService;
}
private DataSetService dataSetService;
public void setDataSetService( DataSetService dataSetService )
{
this.dataSetService = dataSetService;
}
private DataAnalysisService stdDevOutlierAnalysisService;
public void setStdDevOutlierAnalysisService( DataAnalysisService stdDevOutlierAnalysisService )
{
this.stdDevOutlierAnalysisService = stdDevOutlierAnalysisService;
}
private DataAnalysisService minMaxOutlierAnalysisService;
public void setMinMaxOutlierAnalysisService( DataAnalysisService minMaxOutlierAnalysisService )
{
this.minMaxOutlierAnalysisService = minMaxOutlierAnalysisService;
}
private SystemSettingManager systemSettingManager;
public void setSystemSettingManager( SystemSettingManager systemSettingManager )
{
this.systemSettingManager = systemSettingManager;
}
private ValidationService validationService;
public void setValidationService( ValidationService validationService )
{
this.validationService = validationService;
}
private ExpressionService expressionService;
public void setExpressionService( ExpressionService expressionService )
{
this.expressionService = expressionService;
}
// -------------------------------------------------------------------------
// Utils
// -------------------------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
public Map<String, DeflatedDataValue> getValidationViolations( OrganisationUnit organisationUnit,
Collection<DataElement> dataElements, Period period )
{
Map<String, DeflatedDataValue> validationErrorMap = new HashMap<>();
Double factor = (Double) systemSettingManager.getSystemSetting( SettingKey.FACTOR_OF_DEVIATION );
Date from = new DateTime( period.getStartDate() ).minusYears( 2 ).toDate();
Collection<DeflatedDataValue> stdDevs = stdDevOutlierAnalysisService.analyse(
Sets.newHashSet( organisationUnit ), dataElements, Sets.newHashSet( period ), factor, from );
Collection<DeflatedDataValue> minMaxs = minMaxOutlierAnalysisService.analyse(
Sets.newHashSet( organisationUnit ), dataElements, Sets.newHashSet( period ), null, from );
Collection<DeflatedDataValue> deflatedDataValues = CollectionUtils.union( stdDevs, minMaxs );
for ( DeflatedDataValue deflatedDataValue : deflatedDataValues )
{
String key = String.format( "DE%dOC%d", deflatedDataValue.getDataElementId(),
deflatedDataValue.getCategoryOptionComboId() );
validationErrorMap.put( key, deflatedDataValue );
}
return validationErrorMap;
}
@Override
public List<String> getValidationRuleViolations( OrganisationUnit organisationUnit, DataSet dataSet, Period period )
{
List<ValidationResult> validationRuleResults = new ArrayList<>( validationService.startInteractiveValidationAnalysis(
dataSet, period, organisationUnit, null ) );
List<String> validationRuleViolations = new ArrayList<>( validationRuleResults.size() );
for ( ValidationResult result : validationRuleResults )
{
ValidationRule rule = result.getValidationRule();
StringBuilder sb = new StringBuilder();
sb.append( expressionService.getExpressionDescription( rule.getLeftSide().getExpression() ) );
sb.append( " " ).append( rule.getOperator().getMathematicalOperator() ).append( " " );
sb.append( expressionService.getExpressionDescription( rule.getRightSide().getExpression() ) );
validationRuleViolations.add( sb.toString() );
}
return validationRuleViolations;
}
@Override
public Map<String, String> getDataValueMap( OrganisationUnit organisationUnit, DataSet dataSet, Period period )
{
Map<String, String> dataValueMap = new HashMap<>();
List<DataValue> values = dataValueService.getDataValues( new DataExportParams()
.setDataElements( dataSet.getDataElements() )
.setPeriods( Sets.newHashSet( period ) )
.setOrganisationUnits( Sets.newHashSet( organisationUnit ) ) );
for ( DataValue dataValue : values )
{
DataElement dataElement = dataValue.getDataElement();
DataElementCategoryOptionCombo optionCombo = dataValue.getCategoryOptionCombo();
String key = String.format( "DE%dOC%d", dataElement.getId(), optionCombo.getId() );
String value = dataValue.getValue();
dataValueMap.put( key, value );
}
return dataValueMap;
}
@Override
public List<OrganisationUnit> organisationUnitWithDataSetsFilter( Collection<OrganisationUnit> organisationUnits )
{
List<OrganisationUnit> ous = new ArrayList<>( organisationUnits );
FilterUtils.filter( ous, new OrganisationUnitWithDataSetsFilter() );
return ous;
}
@Override
public List<OrganisationUnit> getSortedOrganisationUnitsForCurrentUser()
{
User user = currentUserService.getCurrentUser();
Validate.notNull( user );
List<OrganisationUnit> organisationUnits = new ArrayList<>( user.getOrganisationUnits() );
Collections.sort( organisationUnits );
return organisationUnitWithDataSetsFilter( organisationUnits );
}
@Override
public List<DataSet> getDataSetsForCurrentUser( Integer organisationUnitId )
{
Validate.notNull( organisationUnitId );
OrganisationUnit organisationUnit = organisationUnitService.getOrganisationUnit( organisationUnitId );
List<DataSet> dataSets = new ArrayList<>( organisationUnit.getDataSets() );
UserCredentials userCredentials = currentUserService.getCurrentUser().getUserCredentials();
if ( !userCredentials.isSuper() )
{
dataSets.retainAll( userCredentials.getAllDataSets() );
}
return dataSets;
}
@Override
public List<Period> getPeriodsForDataSet( Integer dataSetId )
{
return getPeriodsForDataSet( dataSetId, 0, DEFAULT_MAX_PERIODS );
}
@Override
public List<Period> getPeriodsForDataSet( Integer dataSetId, int first, int max )
{
Validate.notNull( dataSetId );
DataSet dataSet = dataSetService.getDataSet( dataSetId );
CalendarPeriodType periodType;
if ( dataSet.getPeriodType().getName().equalsIgnoreCase( "Yearly" ) )
{
periodType = new YearlyPeriodType();
}
else
{
periodType = (CalendarPeriodType) dataSet.getPeriodType();
}
//TODO implement properly
if ( dataSet.getOpenFuturePeriods() > 0 )
{
List<Period> periods = periodType.generatePeriods( new Date() );
Collections.reverse( periods );
return periods;
}
else
{
List<Period> periods = periodType.generateLast5Years( new Date() );
FilterUtils.filter( periods, new PastAndCurrentPeriodFilter() );
Collections.reverse( periods );
if ( periods.size() > (first + max) )
{
periods = periods.subList( first, max );
}
return periods;
}
}
}
| |
/**
* Licensed to the Sakai Foundation (SF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.sakaiproject.nakamura.auth.trusted;
import static org.apache.sling.jcr.resource.JcrResourceConstants.AUTHENTICATION_INFO_CREDENTIALS;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.sling.auth.core.spi.AuthenticationHandler;
import org.apache.sling.auth.core.spi.AuthenticationInfo;
import org.sakaiproject.nakamura.api.auth.trusted.TrustedTokenService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import javax.jcr.Credentials;
import javax.jcr.SimpleCredentials;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Authentication handler for trusted authentication sources. These sources will
* authenticate users externally and eventually pass through this handler to establish a
* trusted relationship continuing into the container.
*/
@Component(immediate = true)
@Service
public final class TrustedAuthenticationHandler implements AuthenticationHandler {
/**
* Authentication type name
*/
public static final String TRUSTED_AUTH = TrustedAuthenticationHandler.class.getName();
/**
* Attribute name for storage of the TrustedAuthentication object in the requests
*/
static final String RA_AUTHENTICATION_TRUST = "sakai-trusted-authentication-trust";
/**
* The attribute name for the AuthenticationInformation in the request
*/
static final String RA_AUTHENTICATION_INFO = "sakai-trusted-authentication-authinfo";
/**
* Path on which this authentication should be activated. Its active on all paths, as
* the authentication itself is performed by the TrustedAuthenticationServlet that
* places credentials in the session. Those credentials are then used to authenticate
* all subsequent requests.
*/
@Property(value = "/")
static final String PATH_PROPERTY = AuthenticationHandler.PATH_PROPERTY;
@Property(value = "Trusted Authentication Handler")
static final String DESCRIPTION_PROPERTY = "service.description";
@Property(value = "The Sakai Foundation")
static final String VENDOR_PROPERTY = "service.vendor";
private static final Logger LOGGER = LoggerFactory.getLogger(TrustedAuthenticationHandler.class);
@Reference
protected TrustedTokenService trustedTokenService;
// -------------------- AuthenticationHandler methods --------------------
/**
* {@inheritDoc}
*
* @see org.apache.sling.auth.corei.AuthenticationHandler#extractCredentials(javax.servlet.http.HttpServletRequest,
* javax.servlet.http.HttpServletResponse)
*/
public AuthenticationInfo extractCredentials(HttpServletRequest request,
HttpServletResponse response) {
LOGGER.debug("Calling TrustedAuthenticationHandler extractCredentials ");
// check for existing authentication information in the request
Object auth = request.getAttribute(RA_AUTHENTICATION_TRUST);
if ( auth instanceof TrustedAuthentication ) {
TrustedAuthentication trustedAuthentication = (TrustedAuthentication) auth;
if ( trustedAuthentication.isValid() ) {
Object authInfo = request.getAttribute(RA_AUTHENTICATION_INFO);
if ( authInfo instanceof AuthenticationInfo ) {
AuthenticationInfo authenticationInfo = (AuthenticationInfo) authInfo;
Credentials credentials = (Credentials)authenticationInfo.get(AUTHENTICATION_INFO_CREDENTIALS);
if ( credentials instanceof SimpleCredentials ) {
LOGGER.debug("Got AuthInfo {} credentials {} ",authInfo, credentials);
return authenticationInfo;
} else {
LOGGER.debug("Creadentials not SimpleCredentials :{} ",credentials);
}
} else {
LOGGER.debug("Authentication Info not AuthenticationInfo :{} ",authInfo);
}
} else {
LOGGER.debug("Authentication not trusted {} ", auth);
}
} else {
LOGGER.debug("No Existing TrustedAuthentication in request attributes, found {} ", auth);
}
// create a new authentication in the request.
TrustedAuthentication trustedAuthentication = new TrustedAuthentication(request, response);
if (trustedAuthentication.isValid()) {
request.setAttribute(RA_AUTHENTICATION_TRUST, trustedAuthentication);
// construct the authentication info and store credentials on the request
AuthenticationInfo authInfo = new AuthenticationInfo(TRUSTED_AUTH);
authInfo.put(AUTHENTICATION_INFO_CREDENTIALS, trustedAuthentication.getCredentials());
request.setAttribute(RA_AUTHENTICATION_INFO, authInfo);
LOGGER.debug("Trusted Authentication is valid {} ",trustedAuthentication);
return authInfo;
} else {
LOGGER.debug("Trusted Authentication is not valid {} ",trustedAuthentication);
// no valid credentials found in the request.
return null;
}
}
/**
* {@inheritDoc}
*
* @see orgorg.apache.sling.auth.coreuthenticationHandler#dropCredentials(javax.servlet.http.HttpServletRequest,
* javax.servlet.http.HttpServletResponse)
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="BC_VACUOUS_INSTANCEOF",justification="Could be injected from annother bundle")
public void dropCredentials(HttpServletRequest request, HttpServletResponse response)
throws IOException {
if ( trustedTokenService instanceof TrustedTokenServiceImpl ) {
((TrustedTokenServiceImpl) trustedTokenService).dropCredentials(request,response);
}
request.setAttribute(RA_AUTHENTICATION_INFO, null);
request.setAttribute(RA_AUTHENTICATION_TRUST, null);
}
/**
* {@inheritDoc}
*
* @see org.aporg.apache.sling.auth.coreenticationHandler#requestCredentials(javax.servlet.http.HttpServletRequest,
* javax.servlet.http.HttpServletResponse)
*/
public boolean requestCredentials(HttpServletRequest arg0, HttpServletResponse arg1)
throws IOException {
// forces credentials to be requested from the client, in the trusted senario this
// would redirect to the login landing page.
return false;
}
/**
* Authentication information for storage in session and/or request.<br/>
* <br/>
* By being an inner, static class with a private constructor, it is harder for an
* external source to inject into the authentication chain.
*/
final class TrustedAuthentication {
private final Credentials cred;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="BC_VACUOUS_INSTANCEOF",justification="Could be injected from annother bundle")
private TrustedAuthentication(HttpServletRequest req, HttpServletResponse response) {
// This is placed here by the TrustedAuthenticationServlet, that will be in the same
// web container as Sling and so sharing a session.
if ( trustedTokenService instanceof TrustedTokenServiceImpl ) {
cred = ((TrustedTokenServiceImpl) trustedTokenService).getCredentials(req, response);
LOGGER.debug("Got Credentials from the trusted token service as {} ", cred);
} else {
cred = null;
LOGGER.error("TrustedTokenService is not the expected implementation, " +
"there is a rogue implementation in the OSGi container, all creadentials will be null");
}
}
Credentials getCredentials() {
return cred;
}
boolean isValid() {
return cred != null;
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.value.DfaRelationValue.RelationType;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.MethodUtils;
import com.siyeh.ig.psiutils.TypeUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import static com.intellij.codeInspection.dataFlow.MethodContract.ValueConstraint.*;
import static com.intellij.codeInspection.dataFlow.StandardMethodContract.createConstraintArray;
/**
* @author peter
*/
public class HardcodedContracts {
private static final Pattern FIRST_OR_LAST = Pattern.compile("first|last");
private static final Pattern CONTAINS_KEY_VALUE = Pattern.compile("containsKey|containsValue");
public static List<MethodContract> getHardcodedContracts(@NotNull PsiMethod method, @Nullable PsiMethodCallExpression call) {
PsiClass owner = method.getContainingClass();
if (owner == null ||
InjectedLanguageManager.getInstance(owner.getProject()).isInjectedFragment(owner.getContainingFile())) {
return Collections.emptyList();
}
final int paramCount = method.getParameterList().getParametersCount();
String className = owner.getQualifiedName();
if (className == null) return Collections.emptyList();
String methodName = method.getName();
if ("java.lang.System".equals(className)) {
if ("exit".equals(methodName)) {
return Collections.singletonList(new StandardMethodContract(createConstraintArray(paramCount), THROW_EXCEPTION));
}
}
else if ("com.google.common.base.Preconditions".equals(className)) {
if ("checkNotNull".equals(methodName) && paramCount > 0) {
return failIfNull(0, paramCount);
}
if (("checkArgument".equals(methodName) || "checkState".equals(methodName)) && paramCount > 0) {
MethodContract.ValueConstraint[] constraints = createConstraintArray(paramCount);
constraints[0] = FALSE_VALUE;
return Collections.singletonList(new StandardMethodContract(constraints, THROW_EXCEPTION));
}
}
else if ("java.util.Objects".equals(className)) {
if ("requireNonNull".equals(methodName) && paramCount > 0) {
return failIfNull(0, paramCount);
}
}
else if (CommonClassNames.JAVA_LANG_STRING.equals(className)) {
if (("charAt".equals(methodName) || "codePointAt".equals(methodName)) && paramCount == 1) {
return Arrays.asList(nonnegativeArgumentContract(0),
specialFieldRangeContract(0, RelationType.LT, SpecialField.STRING_LENGTH));
}
else if (("substring".equals(methodName) || "subSequence".equals(methodName)) && paramCount <= 2) {
List<MethodContract> contracts = new ArrayList<>(5);
contracts.add(nonnegativeArgumentContract(0));
contracts.add(specialFieldRangeContract(0, RelationType.LE, SpecialField.STRING_LENGTH));
if (paramCount == 2) {
contracts.add(nonnegativeArgumentContract(1));
contracts.add(specialFieldRangeContract(1, RelationType.LE, SpecialField.STRING_LENGTH));
contracts.add(MethodContract
.singleConditionContract(ContractValue.argument(0), RelationType.LE.getNegated(), ContractValue.argument(1),
THROW_EXCEPTION));
}
return contracts;
}
else if ("isEmpty".equals(methodName) && paramCount == 0) {
return SpecialField.STRING_LENGTH.getEmptyContracts();
}
}
else if (MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_COLLECTION, PsiType.BOOLEAN, "isEmpty")) {
return SpecialField.COLLECTION_SIZE.getEmptyContracts();
}
else if (MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_COLLECTION, PsiType.BOOLEAN, "contains", (PsiType)null)) {
return Collections.singletonList(MethodContract.singleConditionContract(
ContractValue.qualifier().specialField(SpecialField.COLLECTION_SIZE), RelationType.EQ, ContractValue.zero(), FALSE_VALUE));
}
else if (MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_SET, PsiType.BOOLEAN, "equals", (PsiType)null) ||
MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_LIST, PsiType.BOOLEAN, "equals", (PsiType)null)) {
return Collections.singletonList(MethodContract.singleConditionContract(
ContractValue.qualifier().specialField(SpecialField.COLLECTION_SIZE), RelationType.NE,
ContractValue.argument(0).specialField(SpecialField.COLLECTION_SIZE), FALSE_VALUE));
}
else if (MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_LIST, null, "get", PsiType.INT)) {
return Arrays.asList(nonnegativeArgumentContract(0),
specialFieldRangeContract(0, RelationType.LT, SpecialField.COLLECTION_SIZE));
}
else if (MethodUtils.methodMatches(method, "java.util.SortedSet", null, FIRST_OR_LAST)) {
return Collections.singletonList(MethodContract.singleConditionContract(
ContractValue.qualifier().specialField(SpecialField.COLLECTION_SIZE), RelationType.EQ,
ContractValue.zero(), THROW_EXCEPTION));
}
else if (MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_MAP, PsiType.BOOLEAN, "isEmpty")) {
return SpecialField.MAP_SIZE.getEmptyContracts();
}
else if (MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_MAP, PsiType.BOOLEAN, CONTAINS_KEY_VALUE, (PsiType)null)) {
return Collections.singletonList(MethodContract.singleConditionContract(
ContractValue.qualifier().specialField(SpecialField.MAP_SIZE), RelationType.EQ, ContractValue.zero(), FALSE_VALUE));
}
else if (MethodUtils.methodMatches(method, CommonClassNames.JAVA_UTIL_MAP, PsiType.BOOLEAN, "equals", (PsiType)null)) {
return Collections.singletonList(MethodContract.singleConditionContract(
ContractValue.qualifier().specialField(SpecialField.MAP_SIZE), RelationType.NE,
ContractValue.argument(0).specialField(SpecialField.MAP_SIZE), FALSE_VALUE));
}
else if ("org.apache.commons.lang.Validate".equals(className) ||
"org.apache.commons.lang3.Validate".equals(className) ||
"org.springframework.util.Assert".equals(className)) {
if (("isTrue".equals(methodName) || "state".equals(methodName)) && paramCount > 0) {
MethodContract.ValueConstraint[] constraints = createConstraintArray(paramCount);
constraints[0] = FALSE_VALUE;
return Collections.singletonList(new StandardMethodContract(constraints, THROW_EXCEPTION));
}
if ("notNull".equals(methodName) && paramCount > 0) {
MethodContract.ValueConstraint[] constraints = createConstraintArray(paramCount);
constraints[0] = NULL_VALUE;
return Collections.singletonList(new StandardMethodContract(constraints, THROW_EXCEPTION));
}
}
else if (isJunit(className) || isTestng(className) ||
className.startsWith("com.google.common.truth.") ||
className.startsWith("org.assertj.core.api.")) {
return handleTestFrameworks(paramCount, className, methodName, call);
}
else if (TypeUtils.isOptional(owner)) {
if (DfaOptionalSupport.isOptionalGetMethodName(methodName) || "orElseThrow".equals(methodName)) {
return Arrays.asList(optionalAbsentContract(THROW_EXCEPTION), MethodContract.trivialContract(NOT_NULL_VALUE));
}
else if ("isPresent".equals(methodName)) {
return Arrays.asList(optionalAbsentContract(FALSE_VALUE), MethodContract.trivialContract(TRUE_VALUE));
}
}
return Collections.emptyList();
}
static MethodContract optionalAbsentContract(MethodContract.ValueConstraint returnValue) {
return MethodContract
.singleConditionContract(ContractValue.qualifier(), RelationType.IS, ContractValue.optionalValue(false), returnValue);
}
static MethodContract nonnegativeArgumentContract(int argNumber) {
return MethodContract
.singleConditionContract(ContractValue.argument(argNumber), RelationType.LT, ContractValue.zero(), THROW_EXCEPTION);
}
static MethodContract specialFieldRangeContract(int index, RelationType type, SpecialField specialField) {
return MethodContract.singleConditionContract(ContractValue.argument(index), type.getNegated(),
ContractValue.qualifier().specialField(specialField), THROW_EXCEPTION);
}
private static boolean isJunit(String className) {
return className.startsWith("junit.framework.") || className.startsWith("org.junit.");
}
private static boolean isTestng(String className) {
return className.startsWith("org.testng.");
}
private static boolean isNotNullMatcher(PsiExpression expr) {
if (expr instanceof PsiMethodCallExpression) {
String calledName = ((PsiMethodCallExpression)expr).getMethodExpression().getReferenceName();
if ("notNullValue".equals(calledName)) {
return true;
}
if ("not".equals(calledName)) {
PsiExpression[] notArgs = ((PsiMethodCallExpression)expr).getArgumentList().getExpressions();
if (notArgs.length == 1 &&
notArgs[0] instanceof PsiMethodCallExpression &&
"equalTo".equals(((PsiMethodCallExpression)notArgs[0]).getMethodExpression().getReferenceName())) {
PsiExpression[] equalArgs = ((PsiMethodCallExpression)notArgs[0]).getArgumentList().getExpressions();
if (equalArgs.length == 1 && ExpressionUtils.isNullLiteral(equalArgs[0])) {
return true;
}
}
}
if ("is".equals(calledName)) {
PsiExpression[] args = ((PsiMethodCallExpression)expr).getArgumentList().getExpressions();
if (args.length == 1) return isNotNullMatcher(args[0]);
}
}
return false;
}
private static List<MethodContract> handleTestFrameworks(int paramCount, String className, String methodName,
@Nullable PsiMethodCallExpression call) {
if (("assertThat".equals(methodName) || "assumeThat".equals(methodName) || "that".equals(methodName)) && call != null) {
return handleAssertThat(paramCount, call);
}
if (!isJunit(className) && !isTestng(className)) {
return Collections.emptyList();
}
boolean testng = isTestng(className);
if ("fail".equals(methodName)) {
return Collections.singletonList(new StandardMethodContract(createConstraintArray(paramCount), THROW_EXCEPTION));
}
if (paramCount == 0) return Collections.emptyList();
int checkedParam = testng ? 0 : paramCount - 1;
MethodContract.ValueConstraint[] constraints = createConstraintArray(paramCount);
if ("assertTrue".equals(methodName) || "assumeTrue".equals(methodName)) {
constraints[checkedParam] = FALSE_VALUE;
return Collections.singletonList(new StandardMethodContract(constraints, THROW_EXCEPTION));
}
if ("assertFalse".equals(methodName) || "assumeFalse".equals(methodName)) {
constraints[checkedParam] = TRUE_VALUE;
return Collections.singletonList(new StandardMethodContract(constraints, THROW_EXCEPTION));
}
if ("assertNull".equals(methodName)) {
constraints[checkedParam] = NOT_NULL_VALUE;
return Collections.singletonList(new StandardMethodContract(constraints, THROW_EXCEPTION));
}
if ("assertNotNull".equals(methodName) || "assumeNotNull".equals(methodName)) {
return failIfNull(checkedParam, paramCount);
}
return Collections.emptyList();
}
@NotNull
private static List<MethodContract> handleAssertThat(int paramCount, @NotNull PsiMethodCallExpression call) {
PsiExpression[] args = call.getArgumentList().getExpressions();
if (args.length == paramCount) {
for (int i = 1; i < args.length; i++) {
if (isNotNullMatcher(args[i])) {
return failIfNull(i - 1, paramCount);
}
}
if (args.length == 1 && hasNotNullChainCall(call)) {
return failIfNull(0, 1);
}
}
return Collections.emptyList();
}
private static boolean hasNotNullChainCall(PsiMethodCallExpression call) {
Iterable<PsiElement> exprParents = SyntaxTraverser.psiApi().parents(call).
takeWhile(e -> !(e instanceof PsiStatement) && !(e instanceof PsiMember));
return ContainerUtil.exists(exprParents, HardcodedContracts::isNotNullCall);
}
private static boolean isNotNullCall(PsiElement ref) {
return ref instanceof PsiReferenceExpression &&
"isNotNull".equals(((PsiReferenceExpression)ref).getReferenceName()) &&
ref.getParent() instanceof PsiMethodCallExpression;
}
@NotNull
private static List<MethodContract> failIfNull(int argIndex, int argCount) {
MethodContract.ValueConstraint[] constraints = createConstraintArray(argCount);
constraints[argIndex] = NULL_VALUE;
return Collections.singletonList(new StandardMethodContract(constraints, THROW_EXCEPTION));
}
public static boolean isHardcodedPure(PsiMethod method) {
String qName = PsiUtil.getMemberQualifiedName(method);
if ("java.lang.System.exit".equals(qName)) {
return false;
}
if ("java.util.Objects.requireNonNull".equals(qName)) {
PsiParameter[] parameters = method.getParameterList().getParameters();
if (parameters.length == 2 && parameters[1].getType().getCanonicalText().contains("Supplier")) {
return false;
}
}
return true;
}
public static boolean hasHardcodedContracts(@Nullable PsiElement element) {
if (element instanceof PsiMethod) {
return !getHardcodedContracts((PsiMethod)element, null).isEmpty();
}
if (element instanceof PsiParameter) {
PsiElement parent = element.getParent();
return parent != null && hasHardcodedContracts(parent.getParent());
}
return false;
}
}
| |
package CreateGraph;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.PriorityQueue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.utils.VertexObject;
/**
*
*This class is used to generate K nearest neighbors according to weight values. Usually the value of k will be around 50. This reduces
*the dataset size and computational complexity. The input to this class is the output from Neighboorhood job.
*
*Input format:
*
*user:itemsclicked \t Nei1:commonItems,nei2:commonItems...Nein:commonItems
*
*
*Output Format:
*
*user:itemsclickd:sum \t Nei1:weight,Nei2:weight...Nein:weight
*
* @author rohitp
*
*/
public class KNN_Weight extends Configured implements Tool{
public static String inputPath_;
public static String outputPath_;
public static String kNeighbors;
public static int numReducers_;
public static void configure(String[] args) throws IOException {
inputPath_ = args[0];
outputPath_ = args[1];
kNeighbors = args[2];
numReducers_ = Integer.parseInt(args[3].trim());
}
/**
* Function to call the mapreduce jobs in this class.
*/
public int run(String[] args) throws Exception {
System.out.println("\n*****Loading the Configuration for this Job*****\n");
configure(args);
System.out.println("\n*****Done....*****\n");
System.out.println("Computing the KNN and weights");
JobConf conf1 = createJob1(args);
RunningJob job1 = JobClient.runJob(conf1);
System.out.println("\n*****Done....*****\n");
return 0;
}
public static JobConf createJob1(String[] args) throws Exception
{
JobConf jobConf = new JobConf(KNN_Weight.class);
jobConf.setJarByClass(KNN_Weight.class);
jobConf.setJobName("KNN_Weights");
jobConf.setInputFormat(KeyValueTextInputFormat.class);
jobConf.setOutputFormat(TextOutputFormat.class);
System.out.println("Input Path on HDFS: " +inputPath_);
System.out.println("Output Path on HDFS: " + outputPath_);
/*Pass the mappingsFile to mapper*/
jobConf.setNumReduceTasks(numReducers_);
jobConf.set("numNeighbors", kNeighbors);
Path usersInput = new Path(inputPath_);
FileInputFormat.setInputPaths(jobConf,usersInput);
FileSystem.get(jobConf).delete(new Path(outputPath_), true);
FileOutputFormat.setOutputPath(jobConf, new Path(outputPath_));
jobConf.setMapperClass(KNN_Weights_Mapper.class);
jobConf.setReducerClass(IdentityReducer.class);
jobConf.setMapOutputKeyClass(Text.class);
jobConf.setMapOutputValueClass(Text.class);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
return jobConf;
}
/**
* Second
* @author rohitp
*
*/
public static final class KNN_Weights_Mapper extends MapReduceBase implements Mapper<Text, Text, Text, Text> {
public static int kNeighbors_;
public void configure(JobConf jobConf) {
kNeighbors_ = Integer.parseInt(jobConf.get("numNeighbors"));
}
/**
* Class to compare two vertices based on the weight property
* @author rohit
*
*/
public class VertexWeightComparator implements Comparator<VertexObject>
{
public int compare(VertexObject x, VertexObject y)
{
// Assume neither string is null. Real code should
// probably be more robust
if (x.numCommonItems < y.numCommonItems)
{
return -1;
}
if (x.numCommonItems > y.numCommonItems)
{
return 1;
}
return 0;
}
}
/**
* Function to add the vertex objects to the priority queue.
* @param queue
* @param obj
* @param size
*/
public static void addToQueue(PriorityQueue<VertexObject> queue, VertexObject obj)
{
if(queue.size() == kNeighbors_)
{
VertexObject head = queue.peek();
if(obj.numCommonItems > head.numCommonItems)
{
queue.poll();
// System.out.println("Removing " + head.vertexName + " :-:" + head.numCommonItems + " to queue");
queue.add(obj);
// System.out.println("Adding " + obj.vertexName + " :-:" + obj.numCommonItems + " to queue");
}
}
else
{
queue.add(obj);
// System.out.println("Adding " + obj.vertexName + " :-:" + obj.numCommonItems + " to queue");
}
}
/**
* <pre>
* Input -
* key - userID1:itemcClicked
* value - {userID2:commonItems,userID3:commonItems ... userIDn:commonItems}
* Output -
* key - userID1:itemcClicked:WeightSum
* value - {userID2:weight,userID3:weight, ... userIDk:weight}
* </pre>
*/
public void map(Text key, Text value, OutputCollector<Text, Text> collector, Reporter reporter)
throws IOException {
try
{
Comparator<VertexObject> comparator = new VertexWeightComparator();
PriorityQueue<VertexObject> queue = new PriorityQueue<VertexObject>(kNeighbors_, comparator);
/*values will be just one.*/
String neighbors = value.toString();
String[] temp = neighbors.split(",");
for(int i=0;i<temp.length;i++)
{
String user_commonCount = temp[i].trim();
String[] temp1 = user_commonCount.split("\\:");
VertexObject obj = new VertexObject();
obj.vertexName = temp1[0].trim();
obj.numCommonItems = Integer.parseInt(temp1[1].trim());
/*Add it to the queue*/
addToQueue(queue,obj);
}
double sum = 0.0;
StringBuilder st = new StringBuilder();
Iterator<VertexObject> it = queue.iterator();
while(it.hasNext())
{
VertexObject obj = it.next();
sum = sum+obj.numCommonItems;
}
while(queue.size() != 0)
{
VertexObject obj = queue.remove();
double weight = obj.numCommonItems/sum;
st.append(obj.vertexName+":"+weight+",");
}
collector.collect(new Text(key.toString()+":"+sum), new Text(st.toString()));
}catch(Exception e)
{
e.printStackTrace();
}
}
}
public static void main(String args[]) throws Exception
{
System.out.println("\nRunning the K Neighborhood creation job ***Updated***\n");
ToolRunner.run(new Configuration(), new KNN_Weight(), args);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.checks;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Collections;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.controller.PrereqCheckRequest;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
import org.apache.ambari.server.orm.dao.ServiceConfigDAO;
import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
import org.apache.ambari.server.orm.entities.ServiceConfigEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.MaintenanceState;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.stack.PrereqCheckStatus;
import org.apache.ambari.server.state.stack.PrerequisiteCheck;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Provider;
public class ServiceCheckValidityCheckTest {
private static final String CLUSTER_NAME = "cluster1";
private static final long CLUSTER_ID = 1L;
private static final String SERVICE_NAME = "HDFS";
private static final long CONFIG_CREATE_TIMESTAMP = 1461518722202L;
private static final String COMMAND_DETAIL = "HDFS service check";
private static final long SERVICE_CHECK_START_TIME = CONFIG_CREATE_TIMESTAMP - 2000L;
private static final String SERVICE_COMPONENT_NAME = "service component";
private ServiceCheckValidityCheck serviceCheckValidityCheck;
private ServiceConfigDAO serviceConfigDAO;
private HostRoleCommandDAO hostRoleCommandDAO;
private Service service;
private AmbariMetaInfo ambariMetaInfo;
@Before
public void setUp() throws Exception {
final Clusters clusters = mock(Clusters.class);
service = mock(Service.class);
serviceConfigDAO = mock(ServiceConfigDAO.class);
hostRoleCommandDAO = mock(HostRoleCommandDAO.class);
ambariMetaInfo = mock(AmbariMetaInfo.class);
serviceCheckValidityCheck = new ServiceCheckValidityCheck();
serviceCheckValidityCheck.hostRoleCommandDAOProvider = new Provider<HostRoleCommandDAO>() {
@Override
public HostRoleCommandDAO get() {
return hostRoleCommandDAO;
}
};
serviceCheckValidityCheck.serviceConfigDAOProvider = new Provider<ServiceConfigDAO>() {
@Override
public ServiceConfigDAO get() {
return serviceConfigDAO;
}
};
serviceCheckValidityCheck.clustersProvider = new Provider<Clusters>() {
@Override
public Clusters get() {
return clusters;
}
};
Cluster cluster = mock(Cluster.class);
when(clusters.getCluster(CLUSTER_NAME)).thenReturn(cluster);
when(cluster.getClusterId()).thenReturn(CLUSTER_ID);
when(cluster.getServices()).thenReturn(ImmutableMap.of(SERVICE_NAME, service));
when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP", "2.2"));
when(service.getName()).thenReturn(SERVICE_NAME);
when(service.getDesiredStackId()).thenReturn(new StackId("HDP", "2.2"));
serviceCheckValidityCheck.ambariMetaInfo = new Provider<AmbariMetaInfo>() {
@Override
public AmbariMetaInfo get() {
return ambariMetaInfo;
}
};
when(ambariMetaInfo.isServiceWithNoConfigs(Mockito.anyString(), Mockito.anyString(),
Mockito.anyString())).thenReturn(false);
}
@Test
public void testWithNullCommandDetailAtCommand() throws AmbariException {
ServiceComponent serviceComponent = mock(ServiceComponent.class);
when(serviceComponent.isVersionAdvertised()).thenReturn(true);
when(service.getMaintenanceState()).thenReturn(MaintenanceState.OFF);
when(service.getServiceComponents()).thenReturn(ImmutableMap.of(SERVICE_COMPONENT_NAME, serviceComponent));
ServiceConfigEntity serviceConfigEntity = new ServiceConfigEntity();
serviceConfigEntity.setServiceName(SERVICE_NAME);
serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
HostRoleCommandEntity hostRoleCommandEntity1 = new HostRoleCommandEntity();
hostRoleCommandEntity1.setRoleCommand(RoleCommand.SERVICE_CHECK);
hostRoleCommandEntity1.setCommandDetail(null);
hostRoleCommandEntity1.setStartTime(SERVICE_CHECK_START_TIME);
hostRoleCommandEntity1.setRole(Role.ZOOKEEPER_SERVER);
HostRoleCommandEntity hostRoleCommandEntity2 = new HostRoleCommandEntity();
hostRoleCommandEntity2.setRoleCommand(RoleCommand.SERVICE_CHECK);
hostRoleCommandEntity2.setCommandDetail(COMMAND_DETAIL);
hostRoleCommandEntity2.setStartTime(SERVICE_CHECK_START_TIME);
hostRoleCommandEntity2.setRole(Role.HDFS_SERVICE_CHECK);
when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(asList(hostRoleCommandEntity1, hostRoleCommandEntity2));
PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
try {
serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
} catch (NullPointerException ex){
Assert.fail("serviceCheckValidityCheck failed due to null at start_time were not handled");
}
Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
}
@Test
public void testFailWhenServiceWithOutdatedServiceCheckExists() throws AmbariException {
ServiceComponent serviceComponent = mock(ServiceComponent.class);
when(serviceComponent.isVersionAdvertised()).thenReturn(true);
when(service.getMaintenanceState()).thenReturn(MaintenanceState.OFF);
when(service.getServiceComponents()).thenReturn(ImmutableMap.of(SERVICE_COMPONENT_NAME, serviceComponent));
ServiceConfigEntity serviceConfigEntity = new ServiceConfigEntity();
serviceConfigEntity.setServiceName(SERVICE_NAME);
serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
HostRoleCommandEntity hostRoleCommandEntity = new HostRoleCommandEntity();
hostRoleCommandEntity.setRoleCommand(RoleCommand.SERVICE_CHECK);
hostRoleCommandEntity.setCommandDetail(COMMAND_DETAIL);
hostRoleCommandEntity.setStartTime(SERVICE_CHECK_START_TIME);
hostRoleCommandEntity.setRole(Role.HDFS_SERVICE_CHECK);
when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(singletonList(hostRoleCommandEntity));
PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
}
@Test
public void testFailWhenServiceWithNoServiceCheckExists() throws AmbariException {
ServiceComponent serviceComponent = mock(ServiceComponent.class);
when(serviceComponent.isVersionAdvertised()).thenReturn(true);
when(service.getMaintenanceState()).thenReturn(MaintenanceState.OFF);
when(service.getServiceComponents()).thenReturn(ImmutableMap.of(SERVICE_COMPONENT_NAME, serviceComponent));
ServiceConfigEntity serviceConfigEntity = new ServiceConfigEntity();
serviceConfigEntity.setServiceName(SERVICE_NAME);
serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(Collections.emptyList());
PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
}
@Test
public void testFailWhenServiceWithOutdatedServiceCheckExistsRepeated() throws AmbariException {
ServiceComponent serviceComponent = mock(ServiceComponent.class);
when(serviceComponent.isVersionAdvertised()).thenReturn(true);
when(service.getMaintenanceState()).thenReturn(MaintenanceState.OFF);
when(service.getServiceComponents()).thenReturn(ImmutableMap.of(SERVICE_COMPONENT_NAME, serviceComponent));
ServiceConfigEntity serviceConfigEntity = new ServiceConfigEntity();
serviceConfigEntity.setServiceName(SERVICE_NAME);
serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
HostRoleCommandEntity hostRoleCommandEntity1 = new HostRoleCommandEntity();
hostRoleCommandEntity1.setRoleCommand(RoleCommand.SERVICE_CHECK);
hostRoleCommandEntity1.setCommandDetail(COMMAND_DETAIL);
hostRoleCommandEntity1.setStartTime(SERVICE_CHECK_START_TIME);
hostRoleCommandEntity1.setRole(Role.HDFS_SERVICE_CHECK);
HostRoleCommandEntity hostRoleCommandEntity2 = new HostRoleCommandEntity();
hostRoleCommandEntity2.setRoleCommand(RoleCommand.SERVICE_CHECK);
hostRoleCommandEntity2.setCommandDetail(COMMAND_DETAIL);
hostRoleCommandEntity2.setStartTime(CONFIG_CREATE_TIMESTAMP - 1L);
hostRoleCommandEntity2.setRole(Role.HDFS_SERVICE_CHECK);
when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(asList(hostRoleCommandEntity1, hostRoleCommandEntity2));
PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.manager.oracle.util;
import java.net.URL;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.sqoop.manager.oracle.OraOopTestCase;
/**
* Class to load an Oracle table with data based on configuration file.
*/
public final class OracleData {
private OracleData() {
}
enum KeyType {
PRIMARY, UNIQUE
}
private static ClassLoader classLoader;
static {
classLoader = Thread.currentThread().getContextClassLoader();
if (classLoader == null) {
classLoader = OraOopTestCase.class.getClassLoader();
}
}
private static String getColumnList(OracleTableDefinition tableDefinition) {
StringBuilder result = new StringBuilder();
String delim = "";
for (OracleDataDefinition column : tableDefinition.getColumnList()) {
result.append(delim).append(column.getColumnName()).append(" ").append(
column.getDataType());
delim = ",\n";
}
return result.toString();
}
private static String
getDataExpression(List<OracleDataDefinition> columnList) {
StringBuilder result = new StringBuilder();
for (OracleDataDefinition column : columnList) {
result.append("l_ret_rec.").append(column.getColumnName()).append(" := ")
.append(column.getDataExpression()).append(";\n");
}
return result.toString();
}
private static void createPackageSpec(Connection conn,
OracleTableDefinition tableDefinition) throws Exception {
String pkgSql =
IOUtils.toString(classLoader.getResource(
"oraoop/pkg_tst_product_gen.psk").openStream());
pkgSql =
pkgSql.replaceAll("\\$COLUMN_LIST", getColumnList(tableDefinition));
pkgSql = pkgSql.replaceAll("\\$TABLE_NAME", tableDefinition.getTableName());
PreparedStatement stmt = conn.prepareStatement(pkgSql);
stmt.execute();
}
private static void createPackageBody(Connection conn,
OracleTableDefinition tableDefinition) throws Exception {
String pkgSql =
IOUtils.toString(classLoader.getResource(
"oraoop/pkg_tst_product_gen.pbk").openStream());
String columnList = getColumnList(tableDefinition);
if (tableDefinition.isIndexOrganizedTable()) {
columnList += "\n," + getKeyString(KeyType.PRIMARY, tableDefinition);
}
pkgSql =
pkgSql.replaceAll("\\$COLUMN_LIST", columnList);
pkgSql = pkgSql.replaceAll("\\$TABLE_NAME", tableDefinition.getTableName());
pkgSql =
pkgSql.replaceAll("\\$DATA_EXPRESSION_LIST",
getDataExpression(tableDefinition.getColumnList()));
pkgSql =
pkgSql.replaceAll("\\$TABLE_ORGANIZATION_CLAUSE",
tableDefinition.isIndexOrganizedTable()
? "ORGANIZATION INDEX OVERFLOW NOLOGGING" : "");
pkgSql =
pkgSql.replaceAll("\\$PARTITION_CLAUSE", tableDefinition
.getPartitionClause());
PreparedStatement stmt = conn.prepareStatement(pkgSql);
stmt.execute();
}
private static String getKeyColumns(KeyType keyType,
OracleTableDefinition tableDefinition) {
String result = null;
List<String> columns = null;
switch (keyType) {
case PRIMARY:
columns = tableDefinition.getPrimaryKeyColumns();
break;
case UNIQUE:
columns = tableDefinition.getUniqueKeyColumns();
break;
default:
throw new RuntimeException("Missing key type.");
}
if (columns != null && columns.size() > 0) {
StringBuilder keyColumnList = new StringBuilder();
String delim = "";
for (String column : columns) {
keyColumnList.append(delim).append(column);
delim = ",";
}
result = keyColumnList.toString();
}
return result;
}
private static String getKeyString(KeyType keyType,
OracleTableDefinition tableDefinition) {
String keySql = null;
String keyColumnList = getKeyColumns(keyType, tableDefinition);
if (keyColumnList!=null) {
keySql = "constraint \"$TABLE_NAME_"
+ ((keyType == KeyType.PRIMARY) ? "PK\" primary key"
: "UK\" unique") + "($PK_COLUMN_LIST) ";
keySql = keySql.replaceAll("\\$PK_COLUMN_LIST", keyColumnList);
keySql =
keySql.replaceAll("\\$TABLE_NAME", tableDefinition.getTableName());
}
return keySql;
}
private static void createKey(Connection conn, KeyType keyType,
OracleTableDefinition tableDefinition) throws Exception {
String keySql = getKeyString(keyType, tableDefinition);
String keyColumnList = getKeyColumns(keyType, tableDefinition);
if (keySql!=null) {
keySql = "alter table \"$TABLE_NAME\" add " + keySql
+ " using index (create unique index \"$TABLE_NAME_"
+ ((keyType == KeyType.PRIMARY) ? "PK\"" : "UK\"")
+ " on \"$TABLE_NAME\"($PK_COLUMN_LIST) " + "parallel nologging)";
keySql = keySql.replaceAll("\\$PK_COLUMN_LIST", keyColumnList);
keySql =
keySql.replaceAll("\\$TABLE_NAME", tableDefinition.getTableName());
PreparedStatement stmt = conn.prepareStatement(keySql);
stmt.execute();
}
}
public static int getParallelProcesses(Connection conn) throws Exception {
PreparedStatement stmt =
conn.prepareStatement("SELECT cc.value value"
+ "\n"
+ "FROM"
+ "\n"
+ " (SELECT to_number(value) value"
+ "\n"
+ " FROM v$parameter"
+ "\n"
+ " WHERE name='parallel_max_servers'"
+ "\n"
+ " ) pms,"
+ "\n"
+ " (SELECT to_number(value) value"
+ "\n"
+ " FROM v$parameter"
+ "\n"
+ " WHERE name='parallel_threads_per_cpu'"
+ "\n"
+ " ) ptpc,"
+ "\n"
+ " (SELECT to_number(value) value FROM v$parameter "
+ " WHERE name='cpu_count'"
+ "\n" + " ) cc");
ResultSet res = stmt.executeQuery();
res.next();
return res.getInt(1);
}
public static void createTable(Connection conn,
OracleTableDefinition tableDefinition, int parallelDegree,
int rowsPerSlave) throws Exception {
createPackageSpec(conn, tableDefinition);
createPackageBody(conn, tableDefinition);
CallableStatement procStmt =
conn.prepareCall("begin \"PKG_ODG_" + tableDefinition.getTableName()
+ "\".prc_load_table(?,?); end;");
procStmt.setInt(1, parallelDegree);
procStmt.setInt(2, rowsPerSlave);
procStmt.execute();
if (!tableDefinition.isIndexOrganizedTable()) {
createKey(conn, KeyType.PRIMARY, tableDefinition);
}
createKey(conn, KeyType.UNIQUE, tableDefinition);
}
public static void createTable(Connection conn, String fileName,
int parallelDegree, int rowsPerSlave) throws Exception {
URL file = classLoader.getResource("oraoop/" + fileName);
OracleTableDefinition tableDefinition = new OracleTableDefinition(file);
createTable(conn, tableDefinition, parallelDegree, rowsPerSlave);
}
}
| |
package infinitystorage.tile;
import infinitystorage.apiimpl.storage.NBTStorage;
import infinitystorage.inventory.IItemValidator;
import infinitystorage.tile.data.ITileDataConsumer;
import infinitystorage.tile.data.ITileDataProducer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.datasync.DataSerializers;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.IItemHandler;
import net.minecraftforge.items.ItemHandlerHelper;
import infinitystorage.InfinityStorageItems;
import infinitystorage.apiimpl.storage.fluid.FluidStorageNBT;
import infinitystorage.apiimpl.storage.fluid.FluidUtils;
import infinitystorage.apiimpl.storage.item.ItemStorageNBT;
import infinitystorage.block.EnumFluidStorageType;
import infinitystorage.block.EnumItemStorageType;
import infinitystorage.inventory.ItemHandlerBasic;
import infinitystorage.inventory.ItemHandlerFluid;
import infinitystorage.inventory.ItemValidatorBasic;
import infinitystorage.tile.config.IComparable;
import infinitystorage.tile.config.IFilterable;
import infinitystorage.tile.config.IType;
import infinitystorage.tile.data.TileDataParameter;
public class TileDiskManipulator extends TileNode implements IComparable, IFilterable, IType {
public static final TileDataParameter<Integer> COMPARE = IComparable.createParameter();
public static final TileDataParameter<Integer> MODE = IFilterable.createParameter();
public static final TileDataParameter<Integer> TYPE = IType.createParameter();
public static final int INSERT = 0, EXTRACT = 1;
public static final TileDataParameter<Integer> IO_MODE = new TileDataParameter<>(DataSerializers.VARINT, INSERT, new ITileDataProducer<Integer, TileDiskManipulator>() {
@Override
public Integer getValue(TileDiskManipulator tile) {
return tile.ioMode;
}
}, new ITileDataConsumer<Integer, TileDiskManipulator>() {
@Override
public void setValue(TileDiskManipulator tile, Integer value) {
tile.ioMode = value;
tile.markDirty();
}
});
private static final String NBT_COMPARE = "Compare";
private static final String NBT_MODE = "Mode";
private static final String NBT_TYPE = "Type";
private int compare = 0;
private int mode = IFilterable.WHITELIST;
private int type = IType.ITEMS;
private int ioMode = INSERT;
private ItemStorage[] itemStorages;
private FluidStorage[] fluidStorages;
public TileDiskManipulator() {
dataManager.addWatchedParameter(COMPARE);
dataManager.addWatchedParameter(MODE);
dataManager.addWatchedParameter(TYPE);
dataManager.addWatchedParameter(IO_MODE);
itemStorages = new ItemStorage[6];
fluidStorages = new FluidStorage[6];
}
private ItemHandlerBasic disks = new ItemHandlerBasic(12, this, IItemValidator.STORAGE_DISK) {
@Override
protected void onContentsChanged(int slot){
super.onContentsChanged(slot);
if(FMLCommonHandler.instance().getEffectiveSide() == Side.SERVER && slot < 6){
NBTStorage.constructFromDrive(getStackInSlot(slot), slot, itemStorages, fluidStorages, s -> new ItemStorage(s), s -> new FluidStorage(s));
}
}
@Override
public ItemStack extractItem(int slot, int amount, boolean simulate){
if (slot < 6){
if(itemStorages[slot] != null){
itemStorages[slot].writeToNBT();
}
if(fluidStorages[slot] != null){
fluidStorages[slot].writeToNBT();
}
}
return super.extractItem(slot, amount, simulate);
}
};
public class ItemStorage extends ItemStorageNBT {
public ItemStorage(ItemStack disk) {
super(disk.getTagCompound(), EnumItemStorageType.getById(disk.getItemDamage()).getCapacity(), TileDiskManipulator.this);
}
@Override
public int getPriority() {
return 0;
}
@Override
public ItemStack insertItem(ItemStack stack, int size, boolean simulate) {
if (!IFilterable.canTake(itemFilters, mode, getCompare(), stack)) {
return ItemHandlerHelper.copyStackWithSize(stack, size);
}
return super.insertItem(stack, size, simulate);
}
@Override
public ItemStack extractItem(ItemStack stack, int size, int flags){
if(!IFilterable.canTake(itemFilters, mode, getCompare(), stack)){
return null;
}
return super.extractItem(stack, size, flags);
}
}
public class FluidStorage extends FluidStorageNBT {
public FluidStorage(ItemStack disk) {
super(disk.getTagCompound(), EnumFluidStorageType.getById(disk.getItemDamage()).getCapacity(), TileDiskManipulator.this);
}
@Override
public int getPriority() {
return 0;
}
@Override
public FluidStack insertFluid(FluidStack stack, int size, boolean simulate) {
if (!IFilterable.canTakeFluids(fluidFilters, mode, getCompare(), stack)) {
return FluidUtils.copyStackWithSize(stack, size);
}
return super.insertFluid(stack, size, simulate);
}
@Override
public FluidStack extractFluid(FluidStack stack, int size, int flags){
if(IFilterable.canTakeFluids(fluidFilters, mode, getCompare(), stack)){
return null;
}
return super.extractFluid(stack, size, flags);
}
}
private ItemHandlerBasic itemFilters = new ItemHandlerBasic(9, this);
private ItemHandlerFluid fluidFilters = new ItemHandlerFluid(9, this);
@Override
public int getEnergyUsage() {
return 0;
}
@Override
public void updateNode() {
int slot = 0;
if (type == IType.ITEMS) {
while (slot < itemStorages.length && itemStorages[slot] == null) slot++;
if (slot == itemStorages.length) return;
ItemStorage storage = itemStorages[slot];
if (ioMode == INSERT) {
insertIntoNetwork(storage, slot);
} else if (ioMode == EXTRACT) {
extractFromNetwork(storage, slot);
}
} else if (type == IType.FLUIDS) {
while (slot < fluidStorages.length && fluidStorages[slot] == null) slot++;
if (slot == fluidStorages.length) return;
FluidStorage storage = fluidStorages[slot];
if (ioMode == INSERT) {
insertIntoNetwork(storage, slot);
} else if (ioMode == EXTRACT) {
extractFromNetwork(storage, slot);
}
}
}
private void insertIntoNetwork(ItemStorage storage, int slot) {
if (storage.getStored() == 0) {
moveDriveToOutput(slot);
return;
}
ItemStack extracted = null;
int ii = 0;
do {
ItemStack stack = null;
while (storage.getItems().size() > ii && stack == null) {
stack = storage.getItems().get(ii++);
}
if (stack != null){
extracted = storage.extractItem(stack, 1, compare);
}
} while (storage.getItems().size() > ii && extracted == null);
if (extracted == null) {
moveDriveToOutput(slot);
return;
}
ItemStack leftOver = network.insertItem(extracted, extracted.stackSize, false);
if (leftOver != null){
storage.insertItem(leftOver, leftOver.stackSize, false);
}
}
private void extractFromNetwork(ItemStorage storage, int slot) {
}
private void insertIntoNetwork(FluidStorage storage, int slot) {
if (storage.getStored() == 0) {
moveDriveToOutput(slot);
return;
}
FluidStack extracted = null;
int ii = 0;
do {
FluidStack stack = storage.getStacks().get(ii);
while (stack == null && storage.getStacks().size() > ii){
ii++;
}
if (stack != null){
extracted = storage.extractFluid(stack, 1, compare);
}
} while (extracted == null && storage.getStacks().size() > ii);
if (extracted == null) {
moveDriveToOutput(slot);
return;
}
FluidStack leftOver = network.insertFluid(extracted, extracted.amount, false);
if (leftOver != null){
storage.insertFluid(leftOver, leftOver.amount, false);
}
}
private void extractFromNetwork(FluidStorage storage, int slot) {
}
private void moveDriveToOutput(int slot) {
ItemStack disk = disks.getStackInSlot(slot);
if (disk != null) {
int i = 6;
while (disks.getStackInSlot(i) != null && i < 12){
i++;
}
if (i == 12){
return;
}
if (slot < 6) {
if (itemStorages[slot] != null) {
itemStorages[slot].writeToNBT();
itemStorages[slot] = null;
}
if (fluidStorages[slot] != null) {
fluidStorages[slot].writeToNBT();
fluidStorages[slot] = null;
}
}
disks.extractItem(slot, 1, false);
disks.insertItem(i, disk, false);
}
}
@Override
public int getCompare() {
return compare;
}
@Override
public void setCompare(int compare) {
this.compare = compare;
}
@Override
public int getType() {
return this.type;
}
@Override
public void setType(int type) {
this.type = type;
}
@Override
public IItemHandler getFilterInventory() {
return getType() == IType.ITEMS ? itemFilters : fluidFilters;
}
@Override
public void setMode(int mode) {
this.mode = mode;
}
@Override
public int getMode() {
return this.mode;
}
public IItemHandler getDisks() {
return disks;
}
@Override
public void read(NBTTagCompound tag) {
super.read(tag);
readItems(disks, 0, tag);
readItems(itemFilters, 1, tag);
readItems(fluidFilters, 2, tag);
if (tag.hasKey(NBT_COMPARE)) {
compare = tag.getInteger(NBT_COMPARE);
}
if (tag.hasKey(NBT_MODE)) {
mode = tag.getInteger(NBT_MODE);
}
if (tag.hasKey(NBT_TYPE)) {
type = tag.getInteger(NBT_TYPE);
}
}
@Override
public NBTTagCompound write(NBTTagCompound tag) {
super.write(tag);
writeItems(disks, 0, tag);
writeItems(itemFilters, 1, tag);
writeItems(fluidFilters, 2, tag);
tag.setInteger(NBT_COMPARE, compare);
tag.setInteger(NBT_MODE, mode);
tag.setInteger(NBT_TYPE, type);
return tag;
}
@Override
public IItemHandler getDrops() {
return disks;
}
@Override
public <T> T getCapability(Capability<T> capability, EnumFacing facing) {
if (capability == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY) {
return (T) disks;
}
return super.getCapability(capability, facing);
}
@Override
public boolean hasCapability(Capability<?> capability, EnumFacing facing) {
return capability == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY || super.hasCapability(capability, facing);
}
public void onBreak() {
for (ItemStorage storage : itemStorages)
if (storage != null)
storage.writeToNBT();
for (FluidStorage storage : fluidStorages)
if (storage != null)
storage.writeToNBT();
}
}
| |
package com.asksunny.rpc.client;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.util.concurrent.atomic.AtomicLong;
import javax.net.SocketFactory;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.asksunny.cli.utils.CLICommand;
import com.asksunny.cli.utils.CLIOptionAnnotationBasedBinder;
import com.asksunny.cli.utils.annotation.CLIOptionBinding;
import com.asksunny.protocol.rpc.CLIRPCObjectFormatter;
import com.asksunny.protocol.rpc.ProtocolDecodeHandler;
import com.asksunny.protocol.rpc.RPCEnvelope;
import com.asksunny.protocol.rpc.RPCJavaEnvelope;
import com.asksunny.protocol.rpc.RPCObject;
import com.asksunny.protocol.rpc.RPCShellEnvelope;
import com.asksunny.protocol.rpc.RPCStreamEnvelope;
import com.asksunny.protocol.rpc.StreamProtocolDecoder;
import com.asksunny.protocol.rpc.StreamProtocolEncoder;
public class RPCBatchClient {
final static String SHELL = "shell";
final static String ADMIN = "admin";
final static String JAVA = "java";
final static String STREAM = "stream";
final static String TOREMOTE = "toRemote";
final static String FROMREMOTE = "fromRemote";
final static String HELP1 = "help";
final static String HELP2 = "?";
final static Logger log = LoggerFactory.getLogger(RPCBatchClient.class);
@CLIOptionBinding(shortOption = 'S', longOption = "server-address", hasValue = true, description = "Server hostname or IP")
String remoteHost;
@CLIOptionBinding(shortOption = 'p', longOption = "remote-port", hasValue = true, description = "Server TCP port, no default value this is requested")
int remotePort;
@CLIOptionBinding(shortOption = 's', longOption = "ssl", hasValue = false, description = "Enable SSL or not, default no SSL. use Java keytool to genenerate self signed certificate \"keytool -genkey -keystore MtRpcServerKeystore -keyalg RSA\"")
boolean ssl = false;
@CLIOptionBinding(shortOption = 'h', longOption = "help", hasValue = false, description = "Print this menu")
boolean showHelp;
Socket clientSocket;
InputStream sin = null;
OutputStream sout = null;
AtomicLong state = new AtomicLong(0L);
StreamProtocolDecoder decoder = new StreamProtocolDecoder();
StreamProtocolEncoder encoder = new StreamProtocolEncoder();
public void connect() throws IOException {
if (state.compareAndSet(0, 1)) {
this.clientSocket = connectToRemoteHost();
sin = this.clientSocket.getInputStream();
sout = this.clientSocket.getOutputStream();
}
}
public RPCEnvelope sendRequest(RPCEnvelope req) throws IOException {
RPCEnvelope env = null;
try {
encoder.encode(sout, req);
env = decoder.decodeNow(sin);
return env;
} catch (IOException iex) {
shutdown();
} catch (Exception ex) {
log.warn("Unexpected err", ex);
env = null;
}
return env;
}
public void sendRequest(RPCEnvelope req, ProtocolDecodeHandler handler)
throws IOException {
try {
encoder.encode(sout, req);
RPCEnvelope env = decoder.decodeNow(sin);
handler.onReceive(env);
} catch (IOException iex) {
shutdown();
} catch (Exception ex) {
log.warn("Unexpected err", ex);
}
}
public void shutdown() {
if (sin != null) {
try {
sin.close();
} catch (Exception ex) {
;
}
sin = null;
}
if (sout != null) {
try {
sout.close();
} catch (Exception ex) {
;
}
sout = null;
}
if (clientSocket != null) {
try {
clientSocket.close();
} catch (Exception ex) {
;
}
clientSocket = null;
}
}
protected Socket connectToRemoteHost() throws IOException {
Socket client = null;
if (ssl) {
SSLSocketFactory sslsocketfactory = (SSLSocketFactory) SSLSocketFactory
.getDefault();
SSLSocket sslsocket = (SSLSocket) sslsocketfactory.createSocket(
this.remoteHost, this.remotePort);
client = sslsocket;
} else {
SocketFactory socketfactory = (SocketFactory) SocketFactory
.getDefault();
Socket sslsocket = (SSLSocket) socketfactory.createSocket(
this.remoteHost, this.remotePort);
client = sslsocket;
}
client.setSoTimeout(30 * 1000);
client.setKeepAlive(true);
return client;
}
public RPCBatchClient() {
super();
}
public boolean isSsl() {
return ssl;
}
public void setSsl(boolean ssl) {
this.ssl = ssl;
}
public String getRemoteHost() {
return remoteHost;
}
public int getRemotePort() {
return remotePort;
}
public void execute(String[] args) throws Exception
{
CLICommand remoteCommand = new CLICommand();
remoteCommand.setCmdArray(args);
RPCEnvelope request = createEnvelope(remoteCommand);
RPCEnvelope response = sendRequest(request);
if(response!=null && response.getRpcObjects()!=null){
CLIRPCObjectFormatter formatter = new CLIRPCObjectFormatter();
for (RPCObject rpcObject : response.getRpcObjects()) {
System.out.println(formatter.format(rpcObject));
}
}else{
System.out.println();
}
}
protected RPCEnvelope createEnvelope(CLICommand remoteCommand)
throws IOException {
RPCEnvelope envelope = null;
String cmd = remoteCommand.peek();
if (cmd.equalsIgnoreCase(JAVA)) {
remoteCommand.shift();
String ncmd = remoteCommand.peek();
if (ncmd.equalsIgnoreCase(HELP1) || ncmd.equalsIgnoreCase(HELP2)) {
printHelp(RPCEnvelope.RPC_ENVELOPE_TYPE_JAVA);
} else {
envelope = RPCJavaEnvelope.createJavaEnvelope(remoteCommand);
}
} else if (cmd.equalsIgnoreCase(STREAM)) {
remoteCommand.shift();
String action = remoteCommand.shift();
if (action.equalsIgnoreCase(HELP1)
|| action.equalsIgnoreCase(HELP2)) {
printHelp(RPCEnvelope.RPC_ENVELOPE_TYPE_STREAM);
} else if (action.equalsIgnoreCase(TOREMOTE)) {
RPCStreamEnvelope env = new RPCStreamEnvelope();
env.setRpcType(RPCEnvelope.RPC_TYPE_RESPONSE);
String source = remoteCommand.shift();
String destination = remoteCommand.shift();
if (source != null && destination != null) {
File sf = new File(source);
long length = sf.length();
env.setSource(source);
env.setDestination(destination);
env.setStream(new FileInputStream(sf));
env.setLength(length);
envelope = env;
} else {
System.err
.println("Please provide source and destination path");
printHelp(RPCEnvelope.RPC_ENVELOPE_TYPE_STREAM);
}
} else if (action.equalsIgnoreCase(FROMREMOTE)) {
RPCStreamEnvelope env = new RPCStreamEnvelope();
env.setRpcType(RPCEnvelope.RPC_TYPE_REQUEST);
String source = remoteCommand.shift();
String destination = remoteCommand.shift();
if (source != null && destination != null) {
env.setSource(source);
env.setDestination(destination);
env.setLength(-1);
envelope = env;
} else {
System.err
.println("Please provide source and destination path");
printHelp(RPCEnvelope.RPC_ENVELOPE_TYPE_STREAM);
}
} else {
System.err.printf("Invalid stream command:%s\n", action);
}
} else if (cmd.equalsIgnoreCase(ADMIN)) {
remoteCommand.shift();
String action = remoteCommand.shift();
if (action.equalsIgnoreCase(HELP1)
|| action.equalsIgnoreCase(HELP2)) {
printHelp(RPCEnvelope.RPC_ENVELOPE_TYPE_ADMIN);
}
} else if (cmd.equalsIgnoreCase(SHELL)) {
remoteCommand.shift();
String ncmd = remoteCommand.peek();
if (ncmd.equalsIgnoreCase(HELP1) || ncmd.equalsIgnoreCase(HELP2)) {
printHelp(RPCEnvelope.RPC_ENVELOPE_TYPE_SHELL);
}
envelope = RPCShellEnvelope.createShellCommand(remoteCommand);
} else {
envelope = RPCShellEnvelope.createShellCommand(remoteCommand);
}
return envelope;
}
protected void printHelp(int envelopeType) {
switch (envelopeType) {
case RPCEnvelope.RPC_ENVELOPE_TYPE_ADMIN:
System.err
.println("Available command: admin ping|echo|heartbeat|shutdown");
break;
case RPCEnvelope.RPC_ENVELOPE_TYPE_STREAM:
System.err
.println("Available command: stream toRemote|fromRemote");
break;
case RPCEnvelope.RPC_ENVELOPE_TYPE_SHELL:
System.err
.println("Available command: shell shell_command|shell_script");
break;
case RPCEnvelope.RPC_ENVELOPE_TYPE_JAVA:
System.err
.println("Available command: [java path_jar_file] class_name parameter_list_form_class");
break;
default:
System.err
.println("Available command: java, shell, stream, admin. Type [command] help for detail of each command.");
break;
}
}
public static void main(String[] args) throws Exception {
RPCBatchClient client = new RPCBatchClient();
Options opts = CLIOptionAnnotationBasedBinder.getOptions(client);
CommandLine cmdline = CLIOptionAnnotationBasedBinder.bindPosix(opts,
args, client);
if (client.remotePort < 1 || client.showHelp) {
HelpFormatter f = new HelpFormatter();
f.printHelp(
"RPCBatchClient -S rpc_server_name -p port_number [options] remote_command_list",
opts);
System.exit(1);
}
String[] rargs = cmdline.getArgs();
try {
client.connect();
client.execute(rargs);
} finally {
client.shutdown();
}
}
}
| |
/**
* Copyright 2009 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.net.BindException;
import java.net.Socket;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWrapper;
import org.apache.zookeeper.server.NIOServerCnxn;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.persistence.FileTxnLog;
/**
* TODO: Most of the code in this class is ripped from ZooKeeper tests. Instead
* of redoing it, we should contribute updates to their code which let us more
* easily access testing helper objects.
*/
public class MiniZooKeeperCluster {
private static final Log LOG = LogFactory.getLog(MiniZooKeeperCluster.class);
// TODO: make this more configurable?
private static final int TICK_TIME = 2000;
private static final int CONNECTION_TIMEOUT = 30000;
private boolean started;
private int clientPort = 21810; // use non-standard port
private NIOServerCnxn.Factory standaloneServerFactory;
/** Create mini ZooKeeper cluster. */
public MiniZooKeeperCluster() {
this.started = false;
}
// / XXX: From o.a.zk.t.ClientBase
private static void setupTestEnv() {
// during the tests we run with 100K prealloc in the logs.
// on windows systems prealloc of 64M was seen to take ~15seconds
// resulting in test failure (client timeout on first session).
// set env and directly in order to handle static init/gc issues
System.setProperty("zookeeper.preAllocSize", "100");
FileTxnLog.setPreallocSize(100);
}
/**
* @param baseDir
* @return ClientPort server bound to.
* @throws IOException
* @throws InterruptedException
*/
public int startup(File baseDir) throws IOException,
InterruptedException {
setupTestEnv();
shutdown();
File dir = new File(baseDir, "zookeeper").getAbsoluteFile();
recreateDir(dir);
ZooKeeperServer server = new ZooKeeperServer(dir, dir, TICK_TIME);
while (true) {
try {
standaloneServerFactory = new NIOServerCnxn.Factory(clientPort);
} catch (BindException e) {
LOG.info("Faild binding ZK Server to client port: " + clientPort);
//this port is already in use. try to use another
clientPort++;
continue;
}
break;
}
standaloneServerFactory.startup(server);
if (!waitForServerUp(clientPort, CONNECTION_TIMEOUT)) {
throw new IOException("Waiting for startup of standalone server");
}
started = true;
return clientPort;
}
private void recreateDir(File dir) throws IOException {
if (dir.exists()) {
FileUtil.fullyDelete(dir);
}
try {
dir.mkdirs();
} catch (SecurityException e) {
throw new IOException("creating dir: " + dir, e);
}
}
/**
* @throws IOException
*/
public void shutdown() throws IOException {
if (!started) {
return;
}
standaloneServerFactory.shutdown();
if (!waitForServerDown(clientPort, CONNECTION_TIMEOUT)) {
throw new IOException("Waiting for shutdown of standalone server");
}
started = false;
}
// XXX: From o.a.zk.t.ClientBase
private static boolean waitForServerDown(int port, long timeout) {
long start = System.currentTimeMillis();
while (true) {
try {
Socket sock = new Socket("localhost", port);
try {
OutputStream outstream = sock.getOutputStream();
outstream.write("stat".getBytes());
outstream.flush();
} finally {
sock.close();
}
} catch (IOException e) {
return true;
}
if (System.currentTimeMillis() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
// XXX: From o.a.zk.t.ClientBase
private static boolean waitForServerUp(int port, long timeout) {
long start = System.currentTimeMillis();
while (true) {
try {
Socket sock = new Socket("localhost", port);
BufferedReader reader = null;
try {
OutputStream outstream = sock.getOutputStream();
outstream.write("stat".getBytes());
outstream.flush();
Reader isr = new InputStreamReader(sock.getInputStream());
reader = new BufferedReader(isr);
String line = reader.readLine();
if (line != null && line.startsWith("Zookeeper version:")) {
return true;
}
} finally {
sock.close();
if (reader != null) {
reader.close();
}
}
} catch (IOException e) {
// ignore as this is expected
LOG.info("server localhost:" + port + " not up " + e);
}
if (System.currentTimeMillis() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.server.util;
import java.util.HashSet;
import java.util.Map.Entry;
import junit.framework.TestCase;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.mock.MockInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.KeyExtent;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.DataFileColumnFamily;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.hadoop.io.Text;
public class CloneTest extends TestCase {
public void testNoFiles() throws Exception {
MockInstance mi = new MockInstance();
Connector conn = mi.getConnector("", new PasswordToken(""));
KeyExtent ke = new KeyExtent(new Text("0"), null, null);
Mutation mut = ke.getPrevRowUpdateMutation();
TabletsSection.ServerColumnFamily.TIME_COLUMN.put(mut, new Value("M0".getBytes()));
TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN.put(mut, new Value("/default_tablet".getBytes()));
BatchWriter bw1 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
bw1.addMutation(mut);
bw1.close();
BatchWriter bw2 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
MetadataTableUtil.initializeClone("0", "1", conn, bw2);
int rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(0, rc);
// scan tables metadata entries and confirm the same
}
public void testFilesChange() throws Exception {
MockInstance mi = new MockInstance();
Connector conn = mi.getConnector("", new PasswordToken(""));
KeyExtent ke = new KeyExtent(new Text("0"), null, null);
Mutation mut = ke.getPrevRowUpdateMutation();
TabletsSection.ServerColumnFamily.TIME_COLUMN.put(mut, new Value("M0".getBytes()));
TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN.put(mut, new Value("/default_tablet".getBytes()));
mut.put(DataFileColumnFamily.NAME.toString(), "/default_tablet/0_0.rf", "1,200");
BatchWriter bw1 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
bw1.addMutation(mut);
bw1.flush();
BatchWriter bw2 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
MetadataTableUtil.initializeClone("0", "1", conn, bw2);
Mutation mut2 = new Mutation(ke.getMetadataEntry());
mut2.putDelete(DataFileColumnFamily.NAME.toString(), "/default_tablet/0_0.rf");
mut2.put(DataFileColumnFamily.NAME.toString(), "/default_tablet/1_0.rf", "2,300");
bw1.addMutation(mut2);
bw1.flush();
int rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(1, rc);
rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(0, rc);
Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
scanner.setRange(new KeyExtent(new Text("1"), null, null).toMetadataRange());
HashSet<String> files = new HashSet<String>();
for (Entry<Key,Value> entry : scanner) {
if (entry.getKey().getColumnFamily().equals(DataFileColumnFamily.NAME))
files.add(entry.getKey().getColumnQualifier().toString());
}
assertEquals(1, files.size());
assertTrue(files.contains("../0/default_tablet/1_0.rf"));
}
// test split where files of children are the same
public void testSplit1() throws Exception {
MockInstance mi = new MockInstance();
Connector conn = mi.getConnector("", new PasswordToken(""));
BatchWriter bw1 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
bw1.addMutation(createTablet("0", null, null, "/default_tablet", "/default_tablet/0_0.rf"));
bw1.flush();
BatchWriter bw2 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
MetadataTableUtil.initializeClone("0", "1", conn, bw2);
bw1.addMutation(createTablet("0", "m", null, "/default_tablet", "/default_tablet/0_0.rf"));
bw1.addMutation(createTablet("0", null, "m", "/t-1", "/default_tablet/0_0.rf"));
bw1.flush();
int rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(0, rc);
Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
scanner.setRange(new KeyExtent(new Text("1"), null, null).toMetadataRange());
HashSet<String> files = new HashSet<String>();
int count = 0;
for (Entry<Key,Value> entry : scanner) {
if (entry.getKey().getColumnFamily().equals(DataFileColumnFamily.NAME)) {
files.add(entry.getKey().getColumnQualifier().toString());
count++;
}
}
assertEquals(1, count);
assertEquals(1, files.size());
assertTrue(files.contains("../0/default_tablet/0_0.rf"));
}
// test split where files of children differ... like majc and split occurred
public void testSplit2() throws Exception {
MockInstance mi = new MockInstance();
Connector conn = mi.getConnector("", new PasswordToken(""));
BatchWriter bw1 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
bw1.addMutation(createTablet("0", null, null, "/default_tablet", "/default_tablet/0_0.rf"));
bw1.flush();
BatchWriter bw2 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
MetadataTableUtil.initializeClone("0", "1", conn, bw2);
bw1.addMutation(createTablet("0", "m", null, "/default_tablet", "/default_tablet/1_0.rf"));
Mutation mut3 = createTablet("0", null, "m", "/t-1", "/default_tablet/1_0.rf");
mut3.putDelete(DataFileColumnFamily.NAME.toString(), "/default_tablet/0_0.rf");
bw1.addMutation(mut3);
bw1.flush();
int rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(1, rc);
rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(0, rc);
Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
scanner.setRange(new KeyExtent(new Text("1"), null, null).toMetadataRange());
HashSet<String> files = new HashSet<String>();
int count = 0;
for (Entry<Key,Value> entry : scanner) {
if (entry.getKey().getColumnFamily().equals(DataFileColumnFamily.NAME)) {
files.add(entry.getKey().getColumnQualifier().toString());
count++;
}
}
assertEquals(1, files.size());
assertEquals(2, count);
assertTrue(files.contains("../0/default_tablet/1_0.rf"));
}
private static Mutation deleteTablet(String tid, String endRow, String prevRow, String dir, String file) throws Exception {
KeyExtent ke = new KeyExtent(new Text(tid), endRow == null ? null : new Text(endRow), prevRow == null ? null : new Text(prevRow));
Mutation mut = new Mutation(ke.getMetadataEntry());
TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.putDelete(mut);
TabletsSection.ServerColumnFamily.TIME_COLUMN.putDelete(mut);
TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN.putDelete(mut);
mut.putDelete(DataFileColumnFamily.NAME.toString(), file);
return mut;
}
private static Mutation createTablet(String tid, String endRow, String prevRow, String dir, String file) throws Exception {
KeyExtent ke = new KeyExtent(new Text(tid), endRow == null ? null : new Text(endRow), prevRow == null ? null : new Text(prevRow));
Mutation mut = ke.getPrevRowUpdateMutation();
TabletsSection.ServerColumnFamily.TIME_COLUMN.put(mut, new Value("M0".getBytes()));
TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN.put(mut, new Value(dir.getBytes()));
mut.put(DataFileColumnFamily.NAME.toString(), file, "10,200");
return mut;
}
// test two tablets splitting into four
public void testSplit3() throws Exception {
MockInstance mi = new MockInstance();
Connector conn = mi.getConnector("", new PasswordToken(""));
BatchWriter bw1 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
bw1.addMutation(createTablet("0", "m", null, "/d1", "/d1/file1"));
bw1.addMutation(createTablet("0", null, "m", "/d2", "/d2/file2"));
bw1.flush();
BatchWriter bw2 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
MetadataTableUtil.initializeClone("0", "1", conn, bw2);
bw1.addMutation(createTablet("0", "f", null, "/d1", "/d1/file3"));
bw1.addMutation(createTablet("0", "m", "f", "/d3", "/d1/file1"));
bw1.addMutation(createTablet("0", "s", "m", "/d2", "/d2/file2"));
bw1.addMutation(createTablet("0", null, "s", "/d4", "/d2/file2"));
bw1.flush();
int rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(0, rc);
Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
scanner.setRange(new KeyExtent(new Text("1"), null, null).toMetadataRange());
HashSet<String> files = new HashSet<String>();
int count = 0;
for (Entry<Key,Value> entry : scanner) {
if (entry.getKey().getColumnFamily().equals(DataFileColumnFamily.NAME)) {
files.add(entry.getKey().getColumnQualifier().toString());
count++;
}
}
assertEquals(2, count);
assertEquals(2, files.size());
assertTrue(files.contains("../0/d1/file1"));
assertTrue(files.contains("../0/d2/file2"));
}
// test cloned marker
public void testClonedMarker() throws Exception {
MockInstance mi = new MockInstance();
Connector conn = mi.getConnector("", new PasswordToken(""));
BatchWriter bw1 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
bw1.addMutation(createTablet("0", "m", null, "/d1", "/d1/file1"));
bw1.addMutation(createTablet("0", null, "m", "/d2", "/d2/file2"));
bw1.flush();
BatchWriter bw2 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
MetadataTableUtil.initializeClone("0", "1", conn, bw2);
bw1.addMutation(deleteTablet("0", "m", null, "/d1", "/d1/file1"));
bw1.addMutation(deleteTablet("0", null, "m", "/d2", "/d2/file2"));
bw1.flush();
bw1.addMutation(createTablet("0", "f", null, "/d1", "/d1/file3"));
bw1.addMutation(createTablet("0", "m", "f", "/d3", "/d1/file1"));
bw1.addMutation(createTablet("0", "s", "m", "/d2", "/d2/file3"));
bw1.addMutation(createTablet("0", null, "s", "/d4", "/d4/file3"));
bw1.flush();
int rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(1, rc);
bw1.addMutation(deleteTablet("0", "m", "f", "/d3", "/d1/file1"));
bw1.flush();
bw1.addMutation(createTablet("0", "m", "f", "/d3", "/d1/file3"));
bw1.flush();
rc = MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertEquals(0, rc);
Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
scanner.setRange(new KeyExtent(new Text("1"), null, null).toMetadataRange());
HashSet<String> files = new HashSet<String>();
int count = 0;
for (Entry<Key,Value> entry : scanner) {
if (entry.getKey().getColumnFamily().equals(DataFileColumnFamily.NAME)) {
files.add(entry.getKey().getColumnQualifier().toString());
count++;
}
}
assertEquals(3, count);
assertEquals(3, files.size());
assertTrue(files.contains("../0/d1/file1"));
assertTrue(files.contains("../0/d2/file3"));
assertTrue(files.contains("../0/d4/file3"));
}
// test two tablets splitting into four
public void testMerge() throws Exception {
MockInstance mi = new MockInstance();
Connector conn = mi.getConnector("", new PasswordToken(""));
BatchWriter bw1 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
bw1.addMutation(createTablet("0", "m", null, "/d1", "/d1/file1"));
bw1.addMutation(createTablet("0", null, "m", "/d2", "/d2/file2"));
bw1.flush();
BatchWriter bw2 = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
MetadataTableUtil.initializeClone("0", "1", conn, bw2);
bw1.addMutation(deleteTablet("0", "m", null, "/d1", "/d1/file1"));
Mutation mut = createTablet("0", null, null, "/d2", "/d2/file2");
mut.put(DataFileColumnFamily.NAME.toString(), "/d1/file1", "10,200");
bw1.addMutation(mut);
bw1.flush();
try {
MetadataTableUtil.checkClone("0", "1", conn, bw2);
assertTrue(false);
} catch (TabletIterator.TabletDeletedException tde) {}
}
}
| |
/*-
* #%L
* PropertiesFramework :: Core
* %%
* Copyright (C) 2017 LeanFrameworks
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package com.github.leanframeworks.propertiesframework.base.binding;
import com.github.leanframeworks.propertiesframework.api.property.ReadableProperty;
import com.github.leanframeworks.propertiesframework.api.property.WritableProperty;
import com.github.leanframeworks.propertiesframework.api.transform.Transformer;
import com.github.leanframeworks.propertiesframework.base.property.CompositeReadableProperty;
import com.github.leanframeworks.propertiesframework.base.transform.ChainedTransformer;
import java.util.Arrays;
import java.util.Collection;
/**
* Utility class that can be used to help binding properties and transform their values.
* <p>
* This binder utility will create {@link SimpleBinding}s between properties. These bindings can be broken by calling
* their {@link SimpleBinding#dispose()} method.
*
* @see ReadableProperty
* @see WritableProperty
* @see SimpleBinding
*/
public final class Binder {
/**
* Private constructor for utility class.
*/
private Binder() {
// Nothing to be done
}
/**
* Specifies the master property that is part of the binding.
*
* @param master Master property.
* @param <MO> Type of value to be read from the master property.
* @return DSL object.
*/
public static <MO> SingleMasterBinding<MO, MO> from(ReadableProperty<MO> master) {
return new SingleMasterBinding<>(master, null);
}
/**
* Specifies the master properties that are part of the binding.
*
* @param masters Master properties.
* @param <MO> Type of value to be read from the master properties.
* @return DSL object.
*/
public static <MO> MultipleMasterBinding<MO, Collection<MO>> from(Collection<ReadableProperty<MO>> masters) {
return new MultipleMasterBinding<>(masters, null);
}
/**
* Specifies the master properties that are part of the binding.
*
* @param masters Master properties.
* @param <MO> Type of value to be read from the master properties.
* @return DSL object.
*/
@SafeVarargs
public static <MO> MultipleMasterBinding<MO, Collection<MO>> from(ReadableProperty<MO>... masters) {
return new MultipleMasterBinding<>(Arrays.asList(masters), null);
}
/**
* Builder class that is part of the DSL for binding properties.
*
* @param <MO> Type of data that can be read from the master property.
* @param <SI> Type of data that can be written to slave properties.
*/
public static class SingleMasterBinding<MO, SI> {
/**
* Master property.
*/
private final ReadableProperty<MO> master;
/**
* Master property value transformer.
*/
private final ChainedTransformer<MO, SI> transformer;
/**
* Constructor specifying the master property to be bound and the transformer to be applied.
*
* @param master Master property that is part of the binding.
* @param transformer Transformer to be applied.
*/
public SingleMasterBinding(ReadableProperty<MO> master, Transformer<MO, SI> transformer) {
this.master = master;
this.transformer = new ChainedTransformer<>(transformer);
}
/**
* Specifies a transformer to be used to transform the master property value.
*
* @param transformer Transformer to be used by the binding.
* @param <TSI> Type of output of the specified transformer.
* @return Builder object to continue building the binding.
*/
public <TSI> SingleMasterBinding<MO, TSI> transform(Transformer<? super SI, TSI> transformer) {
return new SingleMasterBinding<>(master, this.transformer.chain(transformer));
}
/**
* Specifies the slave property that is part of the bind and creates the binding between the master and the
* slave.
*
* @param slave Slave property.
* @return Binding between the master and the slave.
*/
public SimpleBinding<MO, SI> to(WritableProperty<SI> slave) {
return new SimpleBinding<>(master, transformer, slave);
}
/**
* Specifies the slave properties that are part of the bind and creates the binding between the master and the
* slaves.
*
* @param slaves Slave properties.
* @return Binding between the master and the slaves.
*/
public SimpleBinding<MO, SI> to(Collection<WritableProperty<? super SI>> slaves) {
return new SimpleBinding<>(master, transformer, slaves);
}
/**
* Specifies the slave properties that are part of the bind and creates the binding between the master and the
* slaves.
*
* @param slaves Slave properties.
* @return Binding between the master and the slaves.
*/
@SafeVarargs
public final SimpleBinding<MO, SI> to(WritableProperty<SI>... slaves) {
return to(Arrays.asList(slaves));
}
}
/**
* Builder class that is part of the DSL for binding properties.
*
* @param <MO> Type of data that can be read from the master properties.
* @param <SI> Type of data that can be written to slave properties.
*/
public static class MultipleMasterBinding<MO, SI> {
/**
* Master properties.
*/
private final Collection<ReadableProperty<MO>> masters;
/**
* Master properties values transformer.
*/
private final ChainedTransformer<Collection<MO>, SI> transformer;
/**
* Constructor specifying the master properties to be bound and the transformer to be applied.
*
* @param masters Master properties that are part of the binding.
* @param transformer Transformer to be applied.
*/
public MultipleMasterBinding(Collection<ReadableProperty<MO>> masters, Transformer<Collection<MO>,
SI> transformer) {
this.masters = masters;
this.transformer = new ChainedTransformer<>(transformer);
}
/**
* Specifies a transformer to be used to transform the collection of master properties values.
*
* @param transformer Transformer to be used by the binding.
* @param <TSI> Type of output of the specified transformer.
* @return Builder object to continue building the binding.
*/
public <TSI> MultipleMasterBinding<MO, TSI> transform(Transformer<? super SI, TSI> transformer) {
return new MultipleMasterBinding<>(masters, this.transformer.chain(transformer));
}
/**
* Specifies the slave property that is part of the bind and creates the binding between the masters and the
* slave.
*
* @param slave Slave property.
* @return Binding between the masters and the slave.
*/
public SimpleBinding<Collection<MO>, SI> to(WritableProperty<? super SI> slave) {
return new SimpleBinding<>(new CompositeReadableProperty<>(masters), transformer, slave);
}
/**
* Specifies the slave properties that are part of the bind and creates the binding between the masters and the
* slaves.
*
* @param slaves Slave properties.
* @return Binding between the masters and the slaves.
*/
public SimpleBinding<Collection<MO>, SI> to(Collection<WritableProperty<? super SI>> slaves) {
return new SimpleBinding<>(new CompositeReadableProperty<>(masters), transformer, slaves);
}
/**
* Specifies the slave properties that are part of the bind and creates the binding between the masters and the
* slaves.
*
* @param slaves Slave properties.
* @return Binding between the masters and the slaves.
*/
@SafeVarargs
public final SimpleBinding<Collection<MO>, SI> to(WritableProperty<? super SI>... slaves) {
return to(Arrays.asList(slaves));
}
}
}
| |
/*
* Copyright 2013 Michael Mackenzie High
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package autumn.lang.compiler.ast.nodes;
import autumn.lang.compiler.ast.commons.ConstructList;
import autumn.lang.compiler.ast.commons.IAnnotated;
import autumn.lang.compiler.ast.commons.IBinaryOperation;
import autumn.lang.compiler.ast.commons.IConstruct;
import autumn.lang.compiler.ast.commons.IConversionOperation;
import autumn.lang.compiler.ast.commons.IDatum;
import autumn.lang.compiler.ast.commons.IDirective;
import autumn.lang.compiler.ast.commons.IDocumented;
import autumn.lang.compiler.ast.commons.IExpression;
import autumn.lang.compiler.ast.commons.IRecord;
import autumn.lang.compiler.ast.commons.IStatement;
import autumn.lang.compiler.ast.commons.IUnaryOperation;
import autumn.lang.compiler.ast.literals.BigDecimalLiteral;
import autumn.lang.compiler.ast.literals.BigIntegerLiteral;
import autumn.lang.compiler.ast.literals.ByteLiteral;
import autumn.lang.compiler.ast.literals.CharLiteral;
import autumn.lang.compiler.ast.literals.DoubleLiteral;
import autumn.lang.compiler.ast.literals.FloatLiteral;
import autumn.lang.compiler.ast.literals.IntLiteral;
import autumn.lang.compiler.ast.literals.LongLiteral;
import autumn.lang.compiler.ast.literals.ShortLiteral;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
/**
* An instance of this class is an AST node that represents the greater-than-or-equal operator.
*
* <p>
* <table border="1">
* <tr> <td> <b>Property Name</b> </td> <td> <b>Property Description</b> </td> </tr>
* <tr> <td> <code>left_operand</code> </td> <td>This expression produces the operation's left-operand.</td> </tr>
* <tr> <td> <code>right_operand</code> </td> <td>This expression produces the operation's right-operand.</td> </tr>
* <tr> <td> <code>location</code> </td> <td>This is the source-location information regarding this construct.</td> </tr>
* </table>
* </p>
*
* <p> This file was auto-generated on (Sun May 31 11:54:12 EDT 2015).</p>
*/
@SuppressWarnings("unchecked")
public final class GreaterThanOrEqualsOperation extends Object implements IBinaryOperation
{
private IExpression left_operand;
private IExpression right_operand;
private SourceLocation location = new SourceLocation();
/**
* Setter.
*
* @param value is the new value of property <code>left_operand</code>.
* @return a copy of this object with property <code>left_operand</code> set to value.
*/
public GreaterThanOrEqualsOperation setLeftOperand(final IExpression value)
{
final GreaterThanOrEqualsOperation result = this.copy();
result.left_operand = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>left_operand</code>.
*/
public IExpression getLeftOperand()
{
final IExpression value = this.left_operand;
return value;
}
/**
* Setter.
*
* @param value is the new value of property <code>right_operand</code>.
* @return a copy of this object with property <code>right_operand</code> set to value.
*/
public GreaterThanOrEqualsOperation setRightOperand(final IExpression value)
{
final GreaterThanOrEqualsOperation result = this.copy();
result.right_operand = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>right_operand</code>.
*/
public IExpression getRightOperand()
{
final IExpression value = this.right_operand;
return value;
}
/**
* Setter.
*
* @param value is the new value of property <code>location</code>.
* @return a copy of this object with property <code>location</code> set to value.
*/
public GreaterThanOrEqualsOperation setLocation(final SourceLocation value)
{
final GreaterThanOrEqualsOperation result = this.copy();
result.location = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>location</code>.
*/
public SourceLocation getLocation()
{
final SourceLocation value = this.location;
return value;
}
/**
* This method creates a new instance of this class.
*
* @param left_operand is the value for property <code>left_operand</code>.
* @param right_operand is the value for property <code>right_operand</code>.
* @param location is the value for property <code>location</code>.
* @return a new instance of this class.
*/
public static GreaterThanOrEqualsOperation create(IExpression left_operand, IExpression right_operand, SourceLocation location)
{
GreaterThanOrEqualsOperation object = new GreaterThanOrEqualsOperation();
object = object.setLeftOperand(left_operand);
object = object.setRightOperand(right_operand);
object = object.setLocation(location);
return object;
}
/**
* This method welcomes a visitor that wants to visit this object.
*
* @param visitor is the visitor that is visiting this object.
*/
public void accept(final IAstVisitor visitor)
{
visitor.visit(this);
}
/**
* This method creates a shallow copy of this object.
*
* @return a shallow copy of this object.
*/
public GreaterThanOrEqualsOperation copy()
{
final GreaterThanOrEqualsOperation result = new GreaterThanOrEqualsOperation();
result.left_operand = this.left_operand;
result.right_operand = this.right_operand;
result.location = this.location;
return result;
}
/**
* This method creates a map representation of this struct.
*
* <p>
* Each key is the name of a field.
* Each value is the result of calling the key field's getter.
* </p>
*
* @return a map containing the entries in this struct.
*/
public Map<String, Object> toMap()
{
final Map<String, Object> map = new TreeMap<String, Object>();
map.put("left_operand", this.getLeftOperand());
map.put("right_operand", this.getRightOperand());
map.put("location", this.getLocation());
return map;
}
/**
* {@inheritDoc}
*/
@Override
public String toString()
{
return this.toMap().toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server.coordinator;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.inject.Inject;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2IntMaps;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2LongMap;
import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.utils.ZKPaths;
import org.apache.druid.client.DataSourcesSnapshot;
import org.apache.druid.client.DruidDataSource;
import org.apache.druid.client.DruidServer;
import org.apache.druid.client.ImmutableDruidDataSource;
import org.apache.druid.client.ImmutableDruidServer;
import org.apache.druid.client.ServerInventoryView;
import org.apache.druid.client.coordinator.Coordinator;
import org.apache.druid.client.indexing.IndexingServiceClient;
import org.apache.druid.common.config.JacksonConfigManager;
import org.apache.druid.curator.discovery.ServiceAnnouncer;
import org.apache.druid.discovery.DruidLeaderSelector;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.guice.annotations.CoordinatorIndexingServiceHelper;
import org.apache.druid.guice.annotations.Self;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.concurrent.ScheduledExecutorFactory;
import org.apache.druid.java.util.common.concurrent.ScheduledExecutors;
import org.apache.druid.java.util.common.guava.Comparators;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.lifecycle.LifecycleStop;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.metadata.MetadataRuleManager;
import org.apache.druid.metadata.MetadataSegmentManager;
import org.apache.druid.server.DruidNode;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorBalancer;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorCleanupOvershadowed;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorCleanupUnneeded;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorHelper;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorLogger;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorRuleRunner;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorSegmentCompactor;
import org.apache.druid.server.coordinator.helper.DruidCoordinatorSegmentInfoLoader;
import org.apache.druid.server.coordinator.rules.LoadRule;
import org.apache.druid.server.coordinator.rules.Rule;
import org.apache.druid.server.initialization.ZkPathsConfig;
import org.apache.druid.server.lookup.cache.LookupCoordinatorManager;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.SegmentId;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.stream.Collectors;
/**
*
*/
@ManageLifecycle
public class DruidCoordinator
{
/**
* This comparator orders "freshest" segments first, i. e. segments with most recent intervals.
*
* It is used in historical nodes' {@link LoadQueuePeon}s to make historicals load more recent segment first.
*
* It is also used in {@link DruidCoordinatorRuntimeParams} for {@link
* DruidCoordinatorRuntimeParams#getUsedSegments()} - a collection of segments to be considered during some
* coordinator run for different {@link DruidCoordinatorHelper}s. The order matters only for {@link
* DruidCoordinatorRuleRunner}, which tries to apply the rules while iterating the segments in the order imposed by
* this comparator. In {@link LoadRule} the throttling limit may be hit (via {@link ReplicationThrottler}; see
* {@link CoordinatorDynamicConfig#getReplicationThrottleLimit()}). So before we potentially hit this limit, we want
* to schedule loading the more recent segments (among all of those that need to be loaded).
*
* In both {@link LoadQueuePeon}s and {@link DruidCoordinatorRuleRunner}, we want to load more recent segments first
* because presumably they are queried more often and contain are more important data for users, so if the Druid
* cluster has availability problems and struggling to make all segments available immediately, at least we try to
* make more "important" (more recent) segments available as soon as possible.
*/
static final Comparator<DataSegment> SEGMENT_COMPARATOR_RECENT_FIRST = Ordering
.from(Comparators.intervalsByEndThenStart())
.onResultOf(DataSegment::getInterval)
.compound(Ordering.<DataSegment>natural())
.reverse();
private static final EmittingLogger log = new EmittingLogger(DruidCoordinator.class);
private final Object lock = new Object();
private final DruidCoordinatorConfig config;
private final ZkPathsConfig zkPaths;
private final JacksonConfigManager configManager;
private final MetadataSegmentManager segmentsMetadata;
private final ServerInventoryView serverInventoryView;
private final MetadataRuleManager metadataRuleManager;
private final CuratorFramework curator;
private final ServiceEmitter emitter;
private final IndexingServiceClient indexingServiceClient;
private final ScheduledExecutorService exec;
private final LoadQueueTaskMaster taskMaster;
private final Map<String, LoadQueuePeon> loadManagementPeons;
private final ServiceAnnouncer serviceAnnouncer;
private final DruidNode self;
private final Set<DruidCoordinatorHelper> indexingServiceHelpers;
private final BalancerStrategyFactory factory;
private final LookupCoordinatorManager lookupCoordinatorManager;
private final DruidLeaderSelector coordLeaderSelector;
private final DruidCoordinatorSegmentCompactor segmentCompactor;
private volatile boolean started = false;
private volatile SegmentReplicantLookup segmentReplicantLookup = null;
@Inject
public DruidCoordinator(
DruidCoordinatorConfig config,
ZkPathsConfig zkPaths,
JacksonConfigManager configManager,
MetadataSegmentManager segmentsMetadata,
ServerInventoryView serverInventoryView,
MetadataRuleManager metadataRuleManager,
CuratorFramework curator,
ServiceEmitter emitter,
ScheduledExecutorFactory scheduledExecutorFactory,
IndexingServiceClient indexingServiceClient,
LoadQueueTaskMaster taskMaster,
ServiceAnnouncer serviceAnnouncer,
@Self DruidNode self,
@CoordinatorIndexingServiceHelper Set<DruidCoordinatorHelper> indexingServiceHelpers,
BalancerStrategyFactory factory,
LookupCoordinatorManager lookupCoordinatorManager,
@Coordinator DruidLeaderSelector coordLeaderSelector,
DruidCoordinatorSegmentCompactor segmentCompactor
)
{
this(
config,
zkPaths,
configManager,
segmentsMetadata,
serverInventoryView,
metadataRuleManager,
curator,
emitter,
scheduledExecutorFactory,
indexingServiceClient,
taskMaster,
serviceAnnouncer,
self,
new ConcurrentHashMap<>(),
indexingServiceHelpers,
factory,
lookupCoordinatorManager,
coordLeaderSelector,
segmentCompactor
);
}
DruidCoordinator(
DruidCoordinatorConfig config,
ZkPathsConfig zkPaths,
JacksonConfigManager configManager,
MetadataSegmentManager segmentsMetadata,
ServerInventoryView serverInventoryView,
MetadataRuleManager metadataRuleManager,
CuratorFramework curator,
ServiceEmitter emitter,
ScheduledExecutorFactory scheduledExecutorFactory,
IndexingServiceClient indexingServiceClient,
LoadQueueTaskMaster taskMaster,
ServiceAnnouncer serviceAnnouncer,
DruidNode self,
ConcurrentMap<String, LoadQueuePeon> loadQueuePeonMap,
Set<DruidCoordinatorHelper> indexingServiceHelpers,
BalancerStrategyFactory factory,
LookupCoordinatorManager lookupCoordinatorManager,
DruidLeaderSelector coordLeaderSelector,
DruidCoordinatorSegmentCompactor segmentCompactor
)
{
this.config = config;
this.zkPaths = zkPaths;
this.configManager = configManager;
this.segmentsMetadata = segmentsMetadata;
this.serverInventoryView = serverInventoryView;
this.metadataRuleManager = metadataRuleManager;
this.curator = curator;
this.emitter = emitter;
this.indexingServiceClient = indexingServiceClient;
this.taskMaster = taskMaster;
this.serviceAnnouncer = serviceAnnouncer;
this.self = self;
this.indexingServiceHelpers = indexingServiceHelpers;
this.exec = scheduledExecutorFactory.create(1, "Coordinator-Exec--%d");
this.loadManagementPeons = loadQueuePeonMap;
this.factory = factory;
this.lookupCoordinatorManager = lookupCoordinatorManager;
this.coordLeaderSelector = coordLeaderSelector;
this.segmentCompactor = segmentCompactor;
}
public boolean isLeader()
{
return coordLeaderSelector.isLeader();
}
public Map<String, LoadQueuePeon> getLoadManagementPeons()
{
return loadManagementPeons;
}
/**
* @return tier -> { dataSource -> underReplicationCount } map
*/
public Map<String, Object2LongMap<String>> computeUnderReplicationCountsPerDataSourcePerTier()
{
final Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = new HashMap<>();
if (segmentReplicantLookup == null) {
return underReplicationCountsPerDataSourcePerTier;
}
final Iterable<DataSegment> dataSegments = segmentsMetadata.iterateAllUsedSegments();
final DateTime now = DateTimes.nowUtc();
for (final DataSegment segment : dataSegments) {
final List<Rule> rules = metadataRuleManager.getRulesWithDefault(segment.getDataSource());
for (final Rule rule : rules) {
if (!(rule instanceof LoadRule && rule.appliesTo(segment, now))) {
continue;
}
((LoadRule) rule)
.getTieredReplicants()
.forEach((final String tier, final Integer ruleReplicants) -> {
int currentReplicants = segmentReplicantLookup.getLoadedReplicants(segment.getId(), tier);
Object2LongMap<String> underReplicationPerDataSource = underReplicationCountsPerDataSourcePerTier
.computeIfAbsent(tier, ignored -> new Object2LongOpenHashMap<>());
((Object2LongOpenHashMap<String>) underReplicationPerDataSource)
.addTo(segment.getDataSource(), Math.max(ruleReplicants - currentReplicants, 0));
});
break; // only the first matching rule applies
}
}
return underReplicationCountsPerDataSourcePerTier;
}
public Object2IntMap<String> computeNumsUnavailableUsedSegmentsPerDataSource()
{
if (segmentReplicantLookup == null) {
return Object2IntMaps.emptyMap();
}
final Object2IntOpenHashMap<String> numsUnavailableUsedSegmentsPerDataSource = new Object2IntOpenHashMap<>();
final Iterable<DataSegment> dataSegments = segmentsMetadata.iterateAllUsedSegments();
for (DataSegment segment : dataSegments) {
if (segmentReplicantLookup.getLoadedReplicants(segment.getId()) == 0) {
numsUnavailableUsedSegmentsPerDataSource.addTo(segment.getDataSource(), 1);
} else {
numsUnavailableUsedSegmentsPerDataSource.addTo(segment.getDataSource(), 0);
}
}
return numsUnavailableUsedSegmentsPerDataSource;
}
public Map<String, Double> getLoadStatus()
{
final Map<String, Double> loadStatus = new HashMap<>();
final Collection<ImmutableDruidDataSource> dataSources =
segmentsMetadata.getImmutableDataSourcesWithAllUsedSegments();
for (ImmutableDruidDataSource dataSource : dataSources) {
final Set<DataSegment> segments = Sets.newHashSet(dataSource.getSegments());
final int numUsedSegments = segments.size();
// remove loaded segments
for (DruidServer druidServer : serverInventoryView.getInventory()) {
final DruidDataSource loadedView = druidServer.getDataSource(dataSource.getName());
if (loadedView != null) {
// This does not use segments.removeAll(loadedView.getSegments()) for performance reasons.
// Please see https://github.com/apache/incubator-druid/pull/5632 and LoadStatusBenchmark for more info.
for (DataSegment serverSegment : loadedView.getSegments()) {
segments.remove(serverSegment);
}
}
}
final int numUnloadedSegments = segments.size();
loadStatus.put(
dataSource.getName(),
100 * ((double) (numUsedSegments - numUnloadedSegments) / (double) numUsedSegments)
);
}
return loadStatus;
}
public long remainingSegmentSizeBytesForCompaction(String dataSource)
{
return segmentCompactor.getRemainingSegmentSizeBytes(dataSource);
}
public CoordinatorDynamicConfig getDynamicConfigs()
{
return CoordinatorDynamicConfig.current(configManager);
}
public CoordinatorCompactionConfig getCompactionConfig()
{
return CoordinatorCompactionConfig.current(configManager);
}
public void markSegmentAsUnused(DataSegment segment)
{
log.info("Marking segment[%s] as unused", segment.getId());
segmentsMetadata.markSegmentAsUnused(segment.getId().toString());
}
public String getCurrentLeader()
{
return coordLeaderSelector.getCurrentLeader();
}
public void moveSegment(
DruidCoordinatorRuntimeParams params,
ImmutableDruidServer fromServer,
ImmutableDruidServer toServer,
DataSegment segment,
final LoadPeonCallback callback
)
{
if (segment == null) {
log.makeAlert(new IAE("Can not move null DataSegment"), "Exception moving null segment").emit();
if (callback != null) {
callback.execute();
}
throw new ISE("Cannot move null DataSegment");
}
SegmentId segmentId = segment.getId();
try {
if (fromServer.getMetadata().equals(toServer.getMetadata())) {
throw new IAE("Cannot move [%s] to and from the same server [%s]", segmentId, fromServer.getName());
}
ImmutableDruidDataSource dataSource = params.getDataSourcesSnapshot().getDataSource(segment.getDataSource());
if (dataSource == null) {
throw new IAE("Unable to find dataSource for segment [%s] in metadata", segmentId);
}
// get segment information from MetadataSegmentManager instead of getting it from fromServer's.
// This is useful when MetadataSegmentManager and fromServer DataSegment's are different for same
// identifier (say loadSpec differs because of deep storage migration).
final DataSegment segmentToLoad = dataSource.getSegment(segment.getId());
if (segmentToLoad == null) {
throw new IAE("No segment metadata found for segment Id [%s]", segment.getId());
}
final LoadQueuePeon loadPeon = loadManagementPeons.get(toServer.getName());
if (loadPeon == null) {
throw new IAE("LoadQueuePeon hasn't been created yet for path [%s]", toServer.getName());
}
final LoadQueuePeon dropPeon = loadManagementPeons.get(fromServer.getName());
if (dropPeon == null) {
throw new IAE("LoadQueuePeon hasn't been created yet for path [%s]", fromServer.getName());
}
final ServerHolder toHolder = new ServerHolder(toServer, loadPeon);
if (toHolder.getAvailableSize() < segmentToLoad.getSize()) {
throw new IAE(
"Not enough capacity on server [%s] for segment [%s]. Required: %,d, available: %,d.",
toServer.getName(),
segmentToLoad,
segmentToLoad.getSize(),
toHolder.getAvailableSize()
);
}
final String toLoadQueueSegPath =
ZKPaths.makePath(zkPaths.getLoadQueuePath(), toServer.getName(), segmentId.toString());
final LoadPeonCallback loadPeonCallback = () -> {
dropPeon.unmarkSegmentToDrop(segmentToLoad);
if (callback != null) {
callback.execute();
}
};
// mark segment to drop before it is actually loaded on server
// to be able to account this information in DruidBalancerStrategy immediately
dropPeon.markSegmentToDrop(segmentToLoad);
try {
loadPeon.loadSegment(
segmentToLoad,
() -> {
try {
if (serverInventoryView.isSegmentLoadedByServer(toServer.getName(), segment) &&
curator.checkExists().forPath(toLoadQueueSegPath) == null &&
!dropPeon.getSegmentsToDrop().contains(segment)) {
dropPeon.dropSegment(segment, loadPeonCallback);
} else {
loadPeonCallback.execute();
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
);
}
catch (Exception e) {
dropPeon.unmarkSegmentToDrop(segmentToLoad);
throw new RuntimeException(e);
}
}
catch (Exception e) {
log.makeAlert(e, "Exception moving segment %s", segmentId).emit();
if (callback != null) {
callback.execute();
}
}
}
@LifecycleStart
public void start()
{
synchronized (lock) {
if (started) {
return;
}
started = true;
coordLeaderSelector.registerListener(
new DruidLeaderSelector.Listener()
{
@Override
public void becomeLeader()
{
DruidCoordinator.this.becomeLeader();
}
@Override
public void stopBeingLeader()
{
DruidCoordinator.this.stopBeingLeader();
}
}
);
}
}
@LifecycleStop
public void stop()
{
synchronized (lock) {
if (!started) {
return;
}
coordLeaderSelector.unregisterListener();
started = false;
exec.shutdownNow();
}
}
private void becomeLeader()
{
synchronized (lock) {
if (!started) {
return;
}
log.info("I am the leader of the coordinators, all must bow!");
log.info("Starting coordination in [%s]", config.getCoordinatorStartDelay());
segmentsMetadata.startPollingDatabasePeriodically();
metadataRuleManager.start();
lookupCoordinatorManager.start();
serviceAnnouncer.announce(self);
final int startingLeaderCounter = coordLeaderSelector.localTerm();
final List<Pair<? extends CoordinatorRunnable, Duration>> coordinatorRunnables = new ArrayList<>();
coordinatorRunnables.add(
Pair.of(
new CoordinatorHistoricalManagerRunnable(startingLeaderCounter),
config.getCoordinatorPeriod()
)
);
if (indexingServiceClient != null) {
coordinatorRunnables.add(
Pair.of(
new CoordinatorIndexingServiceRunnable(
makeIndexingServiceHelpers(),
startingLeaderCounter
),
config.getCoordinatorIndexingPeriod()
)
);
}
for (final Pair<? extends CoordinatorRunnable, Duration> coordinatorRunnable : coordinatorRunnables) {
ScheduledExecutors.scheduleWithFixedDelay(
exec,
config.getCoordinatorStartDelay(),
coordinatorRunnable.rhs,
new Callable<ScheduledExecutors.Signal>()
{
private final CoordinatorRunnable theRunnable = coordinatorRunnable.lhs;
@Override
public ScheduledExecutors.Signal call()
{
if (coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) {
theRunnable.run();
}
if (coordLeaderSelector.isLeader()
&& startingLeaderCounter == coordLeaderSelector.localTerm()) { // (We might no longer be leader)
return ScheduledExecutors.Signal.REPEAT;
} else {
return ScheduledExecutors.Signal.STOP;
}
}
}
);
}
}
}
private void stopBeingLeader()
{
synchronized (lock) {
log.info("I am no longer the leader...");
for (String server : loadManagementPeons.keySet()) {
LoadQueuePeon peon = loadManagementPeons.remove(server);
peon.stop();
}
loadManagementPeons.clear();
serviceAnnouncer.unannounce(self);
lookupCoordinatorManager.stop();
metadataRuleManager.stop();
segmentsMetadata.stopPollingDatabasePeriodically();
}
}
private List<DruidCoordinatorHelper> makeIndexingServiceHelpers()
{
List<DruidCoordinatorHelper> helpers = new ArrayList<>();
helpers.add(new DruidCoordinatorSegmentInfoLoader(DruidCoordinator.this));
helpers.add(segmentCompactor);
helpers.addAll(indexingServiceHelpers);
log.info(
"Done making indexing service helpers [%s]",
helpers.stream().map(helper -> helper.getClass().getName()).collect(Collectors.toList())
);
return ImmutableList.copyOf(helpers);
}
public abstract class CoordinatorRunnable implements Runnable
{
private final long startTimeNanos = System.nanoTime();
private final List<DruidCoordinatorHelper> helpers;
private final int startingLeaderCounter;
protected CoordinatorRunnable(List<DruidCoordinatorHelper> helpers, final int startingLeaderCounter)
{
this.helpers = helpers;
this.startingLeaderCounter = startingLeaderCounter;
}
@Override
public void run()
{
ListeningExecutorService balancerExec = null;
try {
synchronized (lock) {
if (!coordLeaderSelector.isLeader()) {
log.info("LEGGO MY EGGO. [%s] is leader.", coordLeaderSelector.getCurrentLeader());
stopBeingLeader();
return;
}
}
List<Boolean> allStarted = Arrays.asList(
segmentsMetadata.isPollingDatabasePeriodically(),
serverInventoryView.isStarted()
);
for (Boolean aBoolean : allStarted) {
if (!aBoolean) {
log.error("InventoryManagers not started[%s]", allStarted);
stopBeingLeader();
return;
}
}
balancerExec = MoreExecutors.listeningDecorator(Execs.multiThreaded(
getDynamicConfigs().getBalancerComputeThreads(),
"coordinator-cost-balancer-%s"
));
BalancerStrategy balancerStrategy = factory.createBalancerStrategy(balancerExec);
// Do coordinator stuff.
DataSourcesSnapshot dataSourcesSnapshot = segmentsMetadata.getSnapshotOfDataSourcesWithAllUsedSegments();
DruidCoordinatorRuntimeParams params =
DruidCoordinatorRuntimeParams
.newBuilder()
.withStartTimeNanos(startTimeNanos)
.withSnapshotOfDataSourcesWithAllUsedSegments(dataSourcesSnapshot)
.withDynamicConfigs(getDynamicConfigs())
.withCompactionConfig(getCompactionConfig())
.withEmitter(emitter)
.withBalancerStrategy(balancerStrategy)
.build();
for (DruidCoordinatorHelper helper : helpers) {
// Don't read state and run state in the same helper otherwise racy conditions may exist
if (coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) {
params = helper.run(params);
if (params == null) {
// This helper wanted to cancel the run. No log message, since the helper should have logged a reason.
return;
}
}
}
}
catch (Exception e) {
log.makeAlert(e, "Caught exception, ignoring so that schedule keeps going.").emit();
}
finally {
if (balancerExec != null) {
balancerExec.shutdownNow();
}
}
}
}
private class CoordinatorHistoricalManagerRunnable extends CoordinatorRunnable
{
public CoordinatorHistoricalManagerRunnable(final int startingLeaderCounter)
{
super(
ImmutableList.of(
new DruidCoordinatorSegmentInfoLoader(DruidCoordinator.this),
params -> {
List<ImmutableDruidServer> servers = serverInventoryView
.getInventory()
.stream()
.filter(DruidServer::segmentReplicatable)
.map(DruidServer::toImmutableDruidServer)
.collect(Collectors.toList());
if (log.isDebugEnabled()) {
// Display info about all historical servers
log.debug("Servers");
for (ImmutableDruidServer druidServer : servers) {
log.debug(" %s", druidServer);
log.debug(" -- DataSources");
for (ImmutableDruidDataSource druidDataSource : druidServer.getDataSources()) {
log.debug(" %s", druidDataSource);
}
}
}
// Find all historical servers, group them by subType and sort by ascending usage
Set<String> decommissioningServers = params.getCoordinatorDynamicConfig().getDecommissioningNodes();
final DruidCluster cluster = new DruidCluster();
for (ImmutableDruidServer server : servers) {
if (!loadManagementPeons.containsKey(server.getName())) {
LoadQueuePeon loadQueuePeon = taskMaster.giveMePeon(server);
loadQueuePeon.start();
log.info("Created LoadQueuePeon for server[%s].", server.getName());
loadManagementPeons.put(server.getName(), loadQueuePeon);
}
cluster.add(
new ServerHolder(
server,
loadManagementPeons.get(server.getName()),
decommissioningServers.contains(server.getHost())
)
);
}
segmentReplicantLookup = SegmentReplicantLookup.make(cluster);
// Stop peons for servers that aren't there anymore.
final Set<String> disappeared = Sets.newHashSet(loadManagementPeons.keySet());
for (ImmutableDruidServer server : servers) {
disappeared.remove(server.getName());
}
for (String name : disappeared) {
log.info("Removing listener for server[%s] which is no longer there.", name);
LoadQueuePeon peon = loadManagementPeons.remove(name);
peon.stop();
}
return params.buildFromExisting()
.withDruidCluster(cluster)
.withDatabaseRuleManager(metadataRuleManager)
.withLoadManagementPeons(loadManagementPeons)
.withSegmentReplicantLookup(segmentReplicantLookup)
.withBalancerReferenceTimestamp(DateTimes.nowUtc())
.build();
},
new DruidCoordinatorRuleRunner(DruidCoordinator.this),
new DruidCoordinatorCleanupUnneeded(),
new DruidCoordinatorCleanupOvershadowed(DruidCoordinator.this),
new DruidCoordinatorBalancer(DruidCoordinator.this),
new DruidCoordinatorLogger(DruidCoordinator.this)
),
startingLeaderCounter
);
}
}
private class CoordinatorIndexingServiceRunnable extends CoordinatorRunnable
{
public CoordinatorIndexingServiceRunnable(List<DruidCoordinatorHelper> helpers, final int startingLeaderCounter)
{
super(helpers, startingLeaderCounter);
}
}
}
| |
package org.jsoup.nodes;
import org.jsoup.SerializationException;
import org.jsoup.helper.StringUtil;
import org.jsoup.helper.Validate;
import org.jsoup.parser.Parser;
import org.jsoup.select.NodeTraversor;
import org.jsoup.select.NodeVisitor;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.jsoup.parser.Token;
/**
The base, abstract Node model. Elements, Documents, Comments etc are all Node instances.
@author Jonathan Hedley, jonathan@hedley.net */
public abstract class Node implements Cloneable {
private static final List<Node> EMPTY_NODES = Collections.emptyList();
Node parentNode;
List<Node> childNodes;
Attributes attributes;
String baseUri;
int siblingIndex;
//TODO modified
private Token token ;
private int tokenIndex ;
/**
Create a new Node.
@param baseUri base URI
@param attributes attributes (not null, but may be empty)
*/
protected Node(String baseUri, Attributes attributes) {
Validate.notNull(baseUri);
Validate.notNull(attributes);
childNodes = EMPTY_NODES;
this.baseUri = baseUri.trim();
this.attributes = attributes;
}
protected Node(String baseUri) {
this(baseUri, new Attributes());
}
/**
* Default constructor. Doesn't setup base uri, children, or attributes; use with caution.
*/
protected Node() {
childNodes = EMPTY_NODES;
attributes = null;
}
/**
Get the node name of this node. Use for debugging purposes and not logic switching (for that, use instanceof).
@return node name
*/
public abstract String nodeName();
/**
* Get an attribute's value by its key. <b>Case insensitive</b>
* <p>
* To get an absolute URL from an attribute that may be a relative URL, prefix the key with <code><b>abs</b></code>,
* which is a shortcut to the {@link #absUrl} method.
* </p>
* E.g.:
* <blockquote><code>String url = a.attr("abs:href");</code></blockquote>
*
* @param attributeKey The attribute key.
* @return The attribute, or empty string if not present (to avoid nulls).
* @see #attributes()
* @see #hasAttr(String)
* @see #absUrl(String)
*/
public String attr(String attributeKey) {
Validate.notNull(attributeKey);
String val = attributes.getIgnoreCase(attributeKey);
if (val.length() > 0)
return val;
else if (attributeKey.toLowerCase().startsWith("abs:"))
return absUrl(attributeKey.substring("abs:".length()));
else return "";
}
/**
* Get all of the element's attributes.
* @return attributes (which implements iterable, in same order as presented in original HTML).
*/
public Attributes attributes() {
return attributes;
}
/**
* Set an attribute (key=value). If the attribute already exists, it is replaced.
* @param attributeKey The attribute key.
* @param attributeValue The attribute value.
* @return this (for chaining)
*/
public Node attr(String attributeKey, String attributeValue) {
attributes.put(attributeKey, attributeValue);
return this;
}
/**
* Test if this element has an attribute. <b>Case insensitive</b>
* @param attributeKey The attribute key to check.
* @return true if the attribute exists, false if not.
*/
public boolean hasAttr(String attributeKey) {
Validate.notNull(attributeKey);
if (attributeKey.startsWith("abs:")) {
String key = attributeKey.substring("abs:".length());
if (attributes.hasKeyIgnoreCase(key) && !absUrl(key).equals(""))
return true;
}
return attributes.hasKeyIgnoreCase(attributeKey);
}
/**
* Remove an attribute from this element.
* @param attributeKey The attribute to remove.
* @return this (for chaining)
*/
public Node removeAttr(String attributeKey) {
Validate.notNull(attributeKey);
attributes.removeIgnoreCase(attributeKey);
return this;
}
/**
Get the base URI of this node.
@return base URI
*/
public String baseUri() {
return baseUri;
}
/**
Update the base URI of this node and all of its descendants.
@param baseUri base URI to set
*/
public void setBaseUri(final String baseUri) {
Validate.notNull(baseUri);
traverse(new NodeVisitor() {
public void head(Node node, int depth) {
node.baseUri = baseUri;
}
public void tail(Node node, int depth) {
}
});
}
/**
* Get an absolute URL from a URL attribute that may be relative (i.e. an <code><a href></code> or
* <code><img src></code>).
* <p>
* E.g.: <code>String absUrl = linkEl.absUrl("href");</code>
* </p>
* <p>
* If the attribute value is already absolute (i.e. it starts with a protocol, like
* <code>http://</code> or <code>https://</code> etc), and it successfully parses as a URL, the attribute is
* returned directly. Otherwise, it is treated as a URL relative to the element's {@link #baseUri}, and made
* absolute using that.
* </p>
* <p>
* As an alternate, you can use the {@link #attr} method with the <code>abs:</code> prefix, e.g.:
* <code>String absUrl = linkEl.attr("abs:href");</code>
* </p>
*
* @param attributeKey The attribute key
* @return An absolute URL if one could be made, or an empty string (not null) if the attribute was missing or
* could not be made successfully into a URL.
* @see #attr
* @see java.net.URL#URL(java.net.URL, String)
*/
public String absUrl(String attributeKey) {
Validate.notEmpty(attributeKey);
if (!hasAttr(attributeKey)) {
return ""; // nothing to make absolute with
} else {
return StringUtil.resolve(baseUri, attr(attributeKey));
}
}
/**
Get a child node by its 0-based index.
@param index index of child node
@return the child node at this index. Throws a {@code IndexOutOfBoundsException} if the index is out of bounds.
*/
public Node childNode(int index) {
return childNodes.get(index);
}
/**
Get this node's children. Presented as an unmodifiable list: new children can not be added, but the child nodes
themselves can be manipulated.
@return list of children. If no children, returns an empty list.
*/
public List<Node> childNodes() {
return Collections.unmodifiableList(childNodes);
}
/**
* Returns a deep copy of this node's children. Changes made to these nodes will not be reflected in the original
* nodes
* @return a deep copy of this node's children
*/
public List<Node> childNodesCopy() {
List<Node> children = new ArrayList<Node>(childNodes.size());
for (Node node : childNodes) {
children.add(node.clone());
}
return children;
}
/**
* Get the number of child nodes that this node holds.
* @return the number of child nodes that this node holds.
*/
public final int childNodeSize() {
return childNodes.size();
}
protected Node[] childNodesAsArray() {
return childNodes.toArray(new Node[childNodeSize()]);
}
/**
Gets this node's parent node.
@return parent node; or null if no parent.
*/
public Node parent() {
return parentNode;
}
/**
Gets this node's parent node. Not overridable by extending classes, so useful if you really just need the Node type.
@return parent node; or null if no parent.
*/
public final Node parentNode() {
return parentNode;
}
/**
* Get this node's root node; that is, its topmost ancestor. If this node is the top ancestor, returns {@code this}.
* @return topmost ancestor.
*/
public Node root() {
Node node = this;
while (node.parentNode != null)
node = node.parentNode;
return node;
}
/**
* Gets the Document associated with this Node.
* @return the Document associated with this Node, or null if there is no such Document.
*/
public Document ownerDocument() {
Node root = root();
return (root instanceof Document) ? (Document) root : null;
}
/**
* Remove (delete) this node from the DOM tree. If this node has children, they are also removed.
*/
public void remove() {
Validate.notNull(parentNode);
parentNode.removeChild(this);
}
/**
* Insert the specified HTML into the DOM before this node (i.e. as a preceding sibling).
* @param html HTML to add before this node
* @return this node, for chaining
* @see #after(String)
*/
public Node before(String html) {
addSiblingHtml(siblingIndex, html);
return this;
}
/**
* Insert the specified node into the DOM before this node (i.e. as a preceding sibling).
* @param node to add before this node
* @return this node, for chaining
* @see #after(Node)
*/
public Node before(Node node) {
Validate.notNull(node);
Validate.notNull(parentNode);
parentNode.addChildren(siblingIndex, node);
return this;
}
/**
* Insert the specified HTML into the DOM after this node (i.e. as a following sibling).
* @param html HTML to add after this node
* @return this node, for chaining
* @see #before(String)
*/
public Node after(String html) {
addSiblingHtml(siblingIndex + 1, html);
return this;
}
/**
* Insert the specified node into the DOM after this node (i.e. as a following sibling).
* @param node to add after this node
* @return this node, for chaining
* @see #before(Node)
*/
public Node after(Node node) {
Validate.notNull(node);
Validate.notNull(parentNode);
parentNode.addChildren(siblingIndex + 1, node);
return this;
}
private void addSiblingHtml(int index, String html) {
Validate.notNull(html);
Validate.notNull(parentNode);
Element context = parent() instanceof Element ? (Element) parent() : null;
List<Node> nodes = Parser.parseFragment(html, context, baseUri());
parentNode.addChildren(index, nodes.toArray(new Node[nodes.size()]));
}
/**
Wrap the supplied HTML around this node.
@param html HTML to wrap around this element, e.g. {@code <div class="head"></div>}. Can be arbitrarily deep.
@return this node, for chaining.
*/
public Node wrap(String html) {
Validate.notEmpty(html);
Element context = parent() instanceof Element ? (Element) parent() : null;
List<Node> wrapChildren = Parser.parseFragment(html, context, baseUri());
Node wrapNode = wrapChildren.get(0);
if (wrapNode == null || !(wrapNode instanceof Element)) // nothing to wrap with; noop
return null;
Element wrap = (Element) wrapNode;
Element deepest = getDeepChild(wrap);
parentNode.replaceChild(this, wrap);
deepest.addChildren(this);
// remainder (unbalanced wrap, like <div></div><p></p> -- The <p> is remainder
if (wrapChildren.size() > 0) {
for (int i = 0; i < wrapChildren.size(); i++) {
Node remainder = wrapChildren.get(i);
remainder.parentNode.removeChild(remainder);
wrap.appendChild(remainder);
}
}
return this;
}
/**
* Removes this node from the DOM, and moves its children up into the node's parent. This has the effect of dropping
* the node but keeping its children.
* <p>
* For example, with the input html:
* </p>
* <p>{@code <div>One <span>Two <b>Three</b></span></div>}</p>
* Calling {@code element.unwrap()} on the {@code span} element will result in the html:
* <p>{@code <div>One Two <b>Three</b></div>}</p>
* and the {@code "Two "} {@link TextNode} being returned.
*
* @return the first child of this node, after the node has been unwrapped. Null if the node had no children.
* @see #remove()
* @see #wrap(String)
*/
public Node unwrap() {
Validate.notNull(parentNode);
Node firstChild = childNodes.size() > 0 ? childNodes.get(0) : null;
parentNode.addChildren(siblingIndex, this.childNodesAsArray());
this.remove();
return firstChild;
}
private Element getDeepChild(Element el) {
List<Element> children = el.children();
if (children.size() > 0)
return getDeepChild(children.get(0));
else
return el;
}
/**
* Replace this node in the DOM with the supplied node.
* @param in the node that will will replace the existing node.
*/
public void replaceWith(Node in) {
Validate.notNull(in);
Validate.notNull(parentNode);
parentNode.replaceChild(this, in);
}
protected void setParentNode(Node parentNode) {
if (this.parentNode != null)
this.parentNode.removeChild(this);
this.parentNode = parentNode;
}
protected void replaceChild(Node out, Node in) {
Validate.isTrue(out.parentNode == this);
Validate.notNull(in);
if (in.parentNode != null)
in.parentNode.removeChild(in);
final int index = out.siblingIndex;
childNodes.set(index, in);
in.parentNode = this;
in.setSiblingIndex(index);
out.parentNode = null;
}
protected void removeChild(Node out) {
Validate.isTrue(out.parentNode == this);
final int index = out.siblingIndex;
childNodes.remove(index);
reindexChildren(index);
out.parentNode = null;
}
protected void addChildren(Node... children) {
//most used. short circuit addChildren(int), which hits reindex children and array copy
for (Node child: children) {
reparentChild(child);
ensureChildNodes();
childNodes.add(child);
child.setSiblingIndex(childNodes.size()-1);
}
}
protected void addChildren(int index, Node... children) {
Validate.noNullElements(children);
ensureChildNodes();
for (int i = children.length - 1; i >= 0; i--) {
Node in = children[i];
reparentChild(in);
childNodes.add(index, in);
reindexChildren(index);
}
}
protected void ensureChildNodes() {
if (childNodes == EMPTY_NODES) {
childNodes = new ArrayList<Node>(4);
}
}
protected void reparentChild(Node child) {
if (child.parentNode != null)
child.parentNode.removeChild(child);
child.setParentNode(this);
}
private void reindexChildren(int start) {
for (int i = start; i < childNodes.size(); i++) {
childNodes.get(i).setSiblingIndex(i);
}
}
/**
Retrieves this node's sibling nodes. Similar to {@link #childNodes() node.parent.childNodes()}, but does not
include this node (a node is not a sibling of itself).
@return node siblings. If the node has no parent, returns an empty list.
*/
public List<Node> siblingNodes() {
if (parentNode == null)
return Collections.emptyList();
List<Node> nodes = parentNode.childNodes;
List<Node> siblings = new ArrayList<Node>(nodes.size() - 1);
for (Node node: nodes)
if (node != this)
siblings.add(node);
return siblings;
}
/**
Get this node's next sibling.
@return next sibling, or null if this is the last sibling
*/
public Node nextSibling() {
if (parentNode == null)
return null; // root
final List<Node> siblings = parentNode.childNodes;
final int index = siblingIndex+1;
if (siblings.size() > index)
return siblings.get(index);
else
return null;
}
/**
Get this node's previous sibling.
@return the previous sibling, or null if this is the first sibling
*/
public Node previousSibling() {
if (parentNode == null)
return null; // root
if (siblingIndex > 0)
return parentNode.childNodes.get(siblingIndex-1);
else
return null;
}
/**
* Get the list index of this node in its node sibling list. I.e. if this is the first node
* sibling, returns 0.
* @return position in node sibling list
* @see org.jsoup.nodes.Element#elementSiblingIndex()
*/
public int siblingIndex() {
return siblingIndex;
}
protected void setSiblingIndex(int siblingIndex) {
this.siblingIndex = siblingIndex;
}
/**
* Perform a depth-first traversal through this node and its descendants.
* @param nodeVisitor the visitor callbacks to perform on each node
* @return this node, for chaining
*/
public Node traverse(NodeVisitor nodeVisitor) {
Validate.notNull(nodeVisitor);
NodeTraversor traversor = new NodeTraversor(nodeVisitor);
traversor.traverse(this);
return this;
}
/**
Get the outer HTML of this node.
@return HTML
*/
public String outerHtml() {
StringBuilder accum = new StringBuilder(128);
outerHtml(accum);
return accum.toString();
}
protected void outerHtml(Appendable accum) {
new NodeTraversor(new OuterHtmlVisitor(accum, getOutputSettings())).traverse(this);
}
// if this node has no document (or parent), retrieve the default output settings
Document.OutputSettings getOutputSettings() {
Document owner = ownerDocument();
return owner != null ? owner.outputSettings() : (new Document("")).outputSettings();
}
/**
Get the outer HTML of this node.
@param accum accumulator to place HTML into
@throws IOException if appending to the given accumulator fails.
*/
abstract void outerHtmlHead(Appendable accum, int depth, Document.OutputSettings out) throws IOException;
abstract void outerHtmlTail(Appendable accum, int depth, Document.OutputSettings out) throws IOException;
/**
* Write this node and its children to the given {@link Appendable}.
*
* @param appendable the {@link Appendable} to write to.
* @return the supplied {@link Appendable}, for chaining.
*/
public <T extends Appendable> T html(T appendable) {
outerHtml(appendable);
return appendable;
}
public String toString() {
return outerHtml();
}
protected void indent(Appendable accum, int depth, Document.OutputSettings out) throws IOException {
accum.append("\n").append(StringUtil.padding(depth * out.indentAmount()));
}
/**
* Check if this node is the same instance of another (object identity test).
* @param o other object to compare to
* @return true if the content of this node is the same as the other
* @see Node#hasSameValue(Object) to compare nodes by their value
*/
@Override
public boolean equals(Object o) {
// implemented just so that javadoc is clear this is an identity test
return this == o;
}
/**
* Check if this node is has the same content as another node. A node is considered the same if its name, attributes and content match the
* other node; particularly its position in the tree does not influence its similarity.
* @param o other object to compare to
* @return true if the content of this node is the same as the other
*/
public boolean hasSameValue(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
return this.outerHtml().equals(((Node) o).outerHtml());
}
/**
* Create a stand-alone, deep copy of this node, and all of its children. The cloned node will have no siblings or
* parent node. As a stand-alone object, any changes made to the clone or any of its children will not impact the
* original node.
* <p>
* The cloned node may be adopted into another Document or node structure using {@link Element#appendChild(Node)}.
* @return stand-alone cloned node
*/
@Override
public Node clone() {
Node thisClone = doClone(null); // splits for orphan
// Queue up nodes that need their children cloned (BFS).
LinkedList<Node> nodesToProcess = new LinkedList<Node>();
nodesToProcess.add(thisClone);
while (!nodesToProcess.isEmpty()) {
Node currParent = nodesToProcess.remove();
for (int i = 0; i < currParent.childNodes.size(); i++) {
Node childClone = currParent.childNodes.get(i).doClone(currParent);
currParent.childNodes.set(i, childClone);
nodesToProcess.add(childClone);
}
}
return thisClone;
}
/*
* Return a clone of the node using the given parent (which can be null).
* Not a deep copy of children.
*/
protected Node doClone(Node parent) {
Node clone;
try {
clone = (Node) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
clone.parentNode = parent; // can be null, to create an orphan split
clone.siblingIndex = parent == null ? 0 : siblingIndex;
clone.attributes = attributes != null ? attributes.clone() : null;
clone.baseUri = baseUri;
clone.childNodes = new ArrayList<Node>(childNodes.size());
for (Node child: childNodes)
clone.childNodes.add(child);
return clone;
}
public Token getToken() {
return token;
}
public void setToken(Token token) {
if(token != null) {
this.tokenIndex = token.getTokenIsndex();
} else {
this.tokenIndex = -1;
}
this.token = token;
}
public int getTokenIndex() {
return tokenIndex;
}
public void setTokenIndex(int tokenIndex) {
this.tokenIndex = tokenIndex;
}
private static class OuterHtmlVisitor implements NodeVisitor {
private Appendable accum;
private Document.OutputSettings out;
OuterHtmlVisitor(Appendable accum, Document.OutputSettings out) {
this.accum = accum;
this.out = out;
}
public void head(Node node, int depth) {
try {
node.outerHtmlHead(accum, depth, out);
} catch (IOException exception) {
throw new SerializationException(exception);
}
}
public void tail(Node node, int depth) {
if (!node.nodeName().equals("#text")) { // saves a void hit.
try {
node.outerHtmlTail(accum, depth, out);
} catch (IOException exception) {
throw new SerializationException(exception);
}
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/genomics/v1/variants.proto
package com.google.genomics.v1;
/**
* <pre>
* The delete variant set request.
* </pre>
*
* Protobuf type {@code google.genomics.v1.DeleteVariantSetRequest}
*/
public final class DeleteVariantSetRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.genomics.v1.DeleteVariantSetRequest)
DeleteVariantSetRequestOrBuilder {
// Use DeleteVariantSetRequest.newBuilder() to construct.
private DeleteVariantSetRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteVariantSetRequest() {
variantSetId_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private DeleteVariantSetRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
variantSetId_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_DeleteVariantSetRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_DeleteVariantSetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.genomics.v1.DeleteVariantSetRequest.class, com.google.genomics.v1.DeleteVariantSetRequest.Builder.class);
}
public static final int VARIANT_SET_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object variantSetId_;
/**
* <pre>
* The ID of the variant set to be deleted.
* </pre>
*
* <code>optional string variant_set_id = 1;</code>
*/
public java.lang.String getVariantSetId() {
java.lang.Object ref = variantSetId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
variantSetId_ = s;
return s;
}
}
/**
* <pre>
* The ID of the variant set to be deleted.
* </pre>
*
* <code>optional string variant_set_id = 1;</code>
*/
public com.google.protobuf.ByteString
getVariantSetIdBytes() {
java.lang.Object ref = variantSetId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
variantSetId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getVariantSetIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, variantSetId_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getVariantSetIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, variantSetId_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.genomics.v1.DeleteVariantSetRequest)) {
return super.equals(obj);
}
com.google.genomics.v1.DeleteVariantSetRequest other = (com.google.genomics.v1.DeleteVariantSetRequest) obj;
boolean result = true;
result = result && getVariantSetId()
.equals(other.getVariantSetId());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + VARIANT_SET_ID_FIELD_NUMBER;
hash = (53 * hash) + getVariantSetId().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.genomics.v1.DeleteVariantSetRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.genomics.v1.DeleteVariantSetRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The delete variant set request.
* </pre>
*
* Protobuf type {@code google.genomics.v1.DeleteVariantSetRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.genomics.v1.DeleteVariantSetRequest)
com.google.genomics.v1.DeleteVariantSetRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_DeleteVariantSetRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_DeleteVariantSetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.genomics.v1.DeleteVariantSetRequest.class, com.google.genomics.v1.DeleteVariantSetRequest.Builder.class);
}
// Construct using com.google.genomics.v1.DeleteVariantSetRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
variantSetId_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_DeleteVariantSetRequest_descriptor;
}
public com.google.genomics.v1.DeleteVariantSetRequest getDefaultInstanceForType() {
return com.google.genomics.v1.DeleteVariantSetRequest.getDefaultInstance();
}
public com.google.genomics.v1.DeleteVariantSetRequest build() {
com.google.genomics.v1.DeleteVariantSetRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.genomics.v1.DeleteVariantSetRequest buildPartial() {
com.google.genomics.v1.DeleteVariantSetRequest result = new com.google.genomics.v1.DeleteVariantSetRequest(this);
result.variantSetId_ = variantSetId_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.genomics.v1.DeleteVariantSetRequest) {
return mergeFrom((com.google.genomics.v1.DeleteVariantSetRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.genomics.v1.DeleteVariantSetRequest other) {
if (other == com.google.genomics.v1.DeleteVariantSetRequest.getDefaultInstance()) return this;
if (!other.getVariantSetId().isEmpty()) {
variantSetId_ = other.variantSetId_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.genomics.v1.DeleteVariantSetRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.genomics.v1.DeleteVariantSetRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object variantSetId_ = "";
/**
* <pre>
* The ID of the variant set to be deleted.
* </pre>
*
* <code>optional string variant_set_id = 1;</code>
*/
public java.lang.String getVariantSetId() {
java.lang.Object ref = variantSetId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
variantSetId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The ID of the variant set to be deleted.
* </pre>
*
* <code>optional string variant_set_id = 1;</code>
*/
public com.google.protobuf.ByteString
getVariantSetIdBytes() {
java.lang.Object ref = variantSetId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
variantSetId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The ID of the variant set to be deleted.
* </pre>
*
* <code>optional string variant_set_id = 1;</code>
*/
public Builder setVariantSetId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
variantSetId_ = value;
onChanged();
return this;
}
/**
* <pre>
* The ID of the variant set to be deleted.
* </pre>
*
* <code>optional string variant_set_id = 1;</code>
*/
public Builder clearVariantSetId() {
variantSetId_ = getDefaultInstance().getVariantSetId();
onChanged();
return this;
}
/**
* <pre>
* The ID of the variant set to be deleted.
* </pre>
*
* <code>optional string variant_set_id = 1;</code>
*/
public Builder setVariantSetIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
variantSetId_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.genomics.v1.DeleteVariantSetRequest)
}
// @@protoc_insertion_point(class_scope:google.genomics.v1.DeleteVariantSetRequest)
private static final com.google.genomics.v1.DeleteVariantSetRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.genomics.v1.DeleteVariantSetRequest();
}
public static com.google.genomics.v1.DeleteVariantSetRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteVariantSetRequest>
PARSER = new com.google.protobuf.AbstractParser<DeleteVariantSetRequest>() {
public DeleteVariantSetRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteVariantSetRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteVariantSetRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteVariantSetRequest> getParserForType() {
return PARSER;
}
public com.google.genomics.v1.DeleteVariantSetRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Copyright 2017-2019 The GreyCat Authors. All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package greycat.utility;
import greycat.Constants;
import greycat.struct.Buffer;
public class HashHelper {
private static final long PRIME1 = 2654435761L;
private static final long PRIME2 = 2246822519L;
private static final long PRIME3 = 3266489917L;
private static final long PRIME4 = 668265263L;
private static final long PRIME5 = 0x165667b1;
private static final int len = 24;
public static long longHash(long number, long max) {
long hash = number % max;
return hash < 0 ? hash * -1 : hash;
/*
if (max <= 0) {
throw new IllegalArgumentException("Max must be > 0");
}
long crc = PRIME5;
crc += number;
crc += crc << 17;
crc *= PRIME4;
crc *= PRIME1;
crc += number;
crc += crc << 17;
crc *= PRIME4;
crc *= PRIME1;
crc += len;
crc ^= crc >>> 15;
crc *= PRIME2;
crc += number;
crc ^= crc >>> 13;
crc *= PRIME3;
crc ^= crc >>> 16;
*/
/*
//To check later if we can replace by somthing better
crc = crc & 0x7FFFFFFFFFFFFFFFL; //convert positive
crc = crc % max; // return between 0 and max
*/
/*
crc = (crc < 0 ? crc * -1 : crc); // positive
crc = crc % max;
return crc;
*/
}
public static int intHash(int number, int max) {
int hash = number % max;
return hash < 0 ? hash * -1 : hash;
}
public static long simpleTripleHash(byte p0, long p1, long p2, long p3, long max) {
long hash = (((long) p0) ^ p1 ^ p2 ^ p3) % max;
if (hash < 0) {
hash = hash * -1;
}
return hash;
}
public static long tripleHash(byte p0, long p1, long p2, long p3, long max) {
if (max <= 0) {
throw new IllegalArgumentException("Max must be > 0");
}
long v1 = PRIME5;
long v2 = v1 * PRIME2 + len;
long v3 = v2 * PRIME3;
long v4 = v3 * PRIME4;
long crc;
v1 = ((v1 << 13) | (v1 >>> 51)) + p1;
v2 = ((v2 << 11) | (v2 >>> 53)) + p2;
v3 = ((v3 << 17) | (v3 >>> 47)) + p3;
v4 = ((v4 << 19) | (v4 >>> 45)) + p0;
v1 += ((v1 << 17) | (v1 >>> 47));
v2 += ((v2 << 19) | (v2 >>> 45));
v3 += ((v3 << 13) | (v3 >>> 51));
v4 += ((v4 << 11) | (v4 >>> 53));
v1 *= PRIME1;
v2 *= PRIME1;
v3 *= PRIME1;
v4 *= PRIME1;
v1 += p1;
v2 += p2;
v3 += p3;
v4 += PRIME5;
v1 *= PRIME2;
v2 *= PRIME2;
v3 *= PRIME2;
v4 *= PRIME2;
v1 += ((v1 << 11) | (v1 >>> 53));
v2 += ((v2 << 17) | (v2 >>> 47));
v3 += ((v3 << 19) | (v3 >>> 45));
v4 += ((v4 << 13) | (v4 >>> 51));
v1 *= PRIME3;
v2 *= PRIME3;
v3 *= PRIME3;
v4 *= PRIME3;
crc = v1 + ((v2 << 3) | (v2 >>> 61)) + ((v3 << 6) | (v3 >>> 58)) + ((v4 << 9) | (v4 >>> 55));
crc ^= crc >>> 11;
crc += (PRIME4 + len) * PRIME1;
crc ^= crc >>> 15;
crc *= PRIME2;
crc ^= crc >>> 13;
crc = (crc < 0 ? crc * -1 : crc); // positive
crc = crc % max;
return crc;
}
/**
* Returns a random long number
* @return a random number
*/
/**
* {@native ts
* return Math.random() * 1000000
* }
*/
public static long rand() {
return (long) (Math.random() * Constants.END_OF_TIME);
}
/**
* Tests equality between two elements
* @param src The first element
* @param other The second element
* @return True if equals, false otherwise
* @deprecated
* @see Constants#equals(String, String)
*/
/**
* {@native ts
* return src === other
* }
*/
public static boolean equals(String src, String other) {
return src.equals(other);
}
/**
* Returns the minimum double value
* @return the minimum double value
*/
/**
* {@native ts
* return Number.MIN_VALUE;
* }
*/
public static double DOUBLE_MIN_VALUE() {
return Double.MIN_VALUE;
}
/**
* Returns the maximum double value
* @return the maximum double value
*/
/**
* {@native ts
* return Number.MAX_VALUE;
* }
*/
public static double DOUBLE_MAX_VALUE() {
return Double.MAX_VALUE;
}
/**
* Checks if an object is defined
* @param param The element to check
* @return True if defined, null otherwise.
* @deprecated
* @see Constants#isDefined(Object)
*/
/**
* {@native ts
* return param != undefined && param != null;
* }
*/
public static boolean isDefined(Object param) {
return param != null;
}
/**
* {@native ts
* private static byteTable = function(){
* var table = [];
* var h = Long.fromBits(0xCAAF1684, 0x544B2FBA);
* for (var i = 0; i < 256; i++) {
* for (var j = 0; j < 31; j++) {
* h = h.shiftRightUnsigned(7).xor(h);
* h = h.shiftLeft(11).xor(h);
* h = h.shiftRightUnsigned(10).xor(h);
* }
* table[i] = h.toSigned();
* }
* return table;
* }();
* }
*/
private static final long[] byteTable = createLookupTable();
/**
* {@native ts
* private static HSTART : Long = Long.fromBits(0xA205B064, 0xBB40E64D);
* }
*/
private static final long HSTART = 0xBB40E64DA205B064L;
//0xBB40E64DA205B064
/**
* {@native ts
* private static HMULT : Long = Long.fromBits(0xE116586D,0x6A5D39EA);
* }
*/
private static final long HMULT = 7664345821815920749L;
//0x6A5D39EAE116586D
/**
* Hashes a String
* @param data The string to has
* @return The hash value
*/
/**
* {native ts
* var h = HashHelper.HSTART;
* var dataLength = data.length;
* for (var i = 0; i < dataLength; i++) {
* h = h.mul(HashHelper.HMULT).xor(HashHelper.byteTable[data.charCodeAt(i) & 0xff]);
* }
* return h.mod(greycat.internal.CoreConstants.END_OF_TIME).toNumber();
* }
*/
/**
* {@native ts
* var hash = 0, i, chr, len;
* if (data.length === 0) return hash;
* for (i = 0, len = data.length; i < len; i++) {
* chr = data.charCodeAt(i);
* hash = ((hash << 5) - hash) + chr;
* hash |= 0; // Convert to 32bit integer
* }
* return hash;
* }
*/
public static int hash(String data) {
/*
long h = HSTART;
final long hmult = HMULT;
final long[] ht = byteTable;
int dataLength = data.length();
for (int i = 0; i < dataLength; i++) {
h = (h * hmult) ^ ht[data.codePointAt(i) & 0xff];
}
return h % Constants.END_OF_TIME;
*/
return data.hashCode();
}
/**
* Hashes a byte array.
* @param data The bytes to hash
* @return The hash value
*/
/**
* {@native ts
* var h = HashHelper.HSTART;
* var dataLength = data.length;
* for (var i = 0; i < dataLength; i++) {
* h = h.mul(HashHelper.HMULT).xor(HashHelper.byteTable[data[i] & 0xff]);
* }
* return h.mod(greycat.internal.CoreConstants.END_OF_TIME).toNumber();
* }
*/
public static long hashBytes(byte[] data) {
long h = HSTART;
final long hmult = HMULT;
final long[] ht = byteTable;
int dataLength = data.length;
for (int i = 0; i < dataLength; i++) {
h = (h * hmult) ^ ht[data[i] & 0xff];
}
return h % Constants.END_OF_TIME;
}
/**
* {@native ts
* var h = HashHelper.HSTART;
* for (var i = begin; i < end; i++) {
* h = h.mul(HashHelper.HMULT).xor(HashHelper.byteTable[data.read(i) & 0xff]);
* }
* return h.mod(greycat.internal.CoreConstants.END_OF_TIME).toNumber();
* }
*/
public static long hashBuffer(final Buffer data, final long begin, final long end) {
long h = HSTART;
final long hmult = HMULT;
final long[] ht = byteTable;
for (long i = begin; i < end; i++) {
h = (h * hmult) ^ ht[data.read(i) & 0xff];
}
return h % Constants.END_OF_TIME;
}
/**
* @ignore ts
*/
private static final long[] createLookupTable() {
long[] byteTable = new long[256];
long h = 0x544B2FBACAAF1684L;
for (int i = 0; i < 256; i++) {
for (int j = 0; j < 31; j++) {
h = (h >>> 7) ^ h;
h = (h << 11) ^ h;
h = (h >>> 10) ^ h;
}
byteTable[i] = h;
}
return byteTable;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInsight.daemon.lambda;
import com.intellij.JavaTestUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.search.JavaFunctionalExpressionSearcher;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.searches.FunctionalExpressionSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.testFramework.LightProjectDescriptor;
import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
import java.util.function.Predicate;
public class FindFunctionalInterfaceTest extends LightCodeInsightFixtureTestCase {
public void testMethodArgument() {
doTestOneExpression();
}
public void testMethodArgumentByTypeParameter() {
doTestOneExpression();
}
public void testFieldDeclaredInFileWithoutFunctionalInterfaces() {
myFixture.addClass("class B {" +
" void f(A a) {" +
" a.r = () -> {};" +
" }" +
"}");
myFixture.addClass("public class A {" +
" public I r;" +
"}");
doTestOneExpression();
}
public void testVarargPosition() {
myFixture.addClass("\n" +
"class A { \n" +
" <T> void foo(T... r) {}\n" +
" void bar(J i){foo(i, i, () -> {});}\n" +
"}");
doTestOneExpression();
}
private void doTestOneExpression() {
configure();
final PsiClass psiClass = findClassAtCaret();
final Collection<PsiFunctionalExpression> expressions = FunctionalExpressionSearch.search(psiClass).findAll();
int size = expressions.size();
assertEquals(1, size);
final PsiFunctionalExpression next = expressions.iterator().next();
assertNotNull(next);
assertEquals("() -> {}", next.getText());
}
@NotNull
private PsiClass findClassAtCaret() {
final PsiElement elementAtCaret = myFixture.getElementAtCaret();
assertNotNull(elementAtCaret);
final PsiClass psiClass = PsiTreeUtil.getParentOfType(elementAtCaret, PsiClass.class, false);
assertTrue(psiClass != null && psiClass.isInterface());
return psiClass;
}
public void testFieldFromAnonymousClassScope() {
configure();
final PsiElement elementAtCaret = myFixture.getElementAtCaret();
assertNotNull(elementAtCaret);
final PsiField field = PsiTreeUtil.getParentOfType(elementAtCaret, PsiField.class, false);
assertNotNull(field);
final PsiClass aClass = field.getContainingClass();
assertTrue(aClass instanceof PsiAnonymousClass);
final Collection<PsiReference> references = ReferencesSearch.search(field).findAll();
assertFalse(references.isEmpty());
assertEquals(1, references.size());
}
public void testMethodWithClassTypeParameter() {
configure();
assertSize(1, FunctionalExpressionSearch.search(findClass("I")).findAll());
}
public void testFindSubInterfaceLambdas() {
configure();
assertSize(5, FunctionalExpressionSearch.search(findClass("DumbAwareRunnable")).findAll());
assertSize(3, FunctionalExpressionSearch.search(findClass("DumbAwareRunnable2")).findAll());
assertSize(6, FunctionalExpressionSearch.search(findClass("DumbAware")).findAll());
assertSize(1, FunctionalExpressionSearch.search(findClass("WithDefaultMethods")).findAll());
assertSize(1, FunctionalExpressionSearch.search(findClass("WithManyMethods")).findAll());
assertSize(1, FunctionalExpressionSearch.search(findClass("WithManyMethods2")).findAll());
}
public void testArraysStreamLikeApi() {
configure();
assertSize(1, FunctionalExpressionSearch.search(findClass("I")).findAll());
}
public void testStreamOfLikeApiWithLocalVar() {
configure();
assertSize(1, FunctionalExpressionSearch.search(findClass("I")).findAll());
}
public void testStreamOfLikeApiWithField() {
myFixture.addClass("class Base { StrType Stream = null; }");
configure();
assertSize(1, FunctionalExpressionSearch.search(findClass("I")).findAll());
}
public void testCallWithQualifiedName() {
myFixture.addClass("package pkg.p1.p2.p3; public interface I { void run() {} }");
myFixture.addClass("package pkg.p1.p2.p3; public class Util { public static void foo(I i) {} }");
configure();
assertSize(1, FunctionalExpressionSearch.search(findClass("pkg.p1.p2.p3.I")).findAll());
}
public void testInsideArrayInitializer() {
myFixture.addClass("public interface Foo { void run() {}}");
myFixture.addClass("public interface Bar { void run() {}}");
configure();
assertSize(3, FunctionalExpressionSearch.search(findClass("Foo")).findAll());
}
public void testCallOnGenericParameter() {
configure();
assertSize(1, FunctionalExpressionSearch.search(findClass("I")).findAll());
}
public void testChainStartingWithConstructor() {
configure();
assertSize(1, FunctionalExpressionSearch.search(findClass("IterHelper.MapIterCallback")).findAll());
}
public void testDontVisitInapplicableFiles() {
PsiClass sam = myFixture.addClass("interface I { void foo(); }");
myFixture.addClass("class Some { " +
"{ I i = () -> {}; }" +
"void doTest(int a) {} " +
"void doTest(I i, I j) {} " +
"Some intermediate() {} " +
"Object intermediate(int a, int b) {} " +
"}");
myFixture.addClass("class _WrongSignature {{ I i = a -> {}; I j = () -> true; }}");
myFixture.addClass("class _CallArgumentCountMismatch extends Some {{ " +
" doTest(() -> {}); " +
" intermediate(4).doTest(() -> {}, () -> {}); " +
"}}");
myFixture.addClass("class _KnownTypeVariableAssignment {" +
"static Runnable field;" +
"{ Runnable r = () -> {}; field = () -> {}; } " +
"}");
myFixture.addClass("class _SuperFieldAssignment extends _KnownTypeVariableAssignment {" +
"{ field = () -> {}; } " +
"}");
myFixture.addClass("import static _KnownTypeVariableAssignment.*; " +
"class _StaticallyImportedFieldAssignment {" +
"{ field = () -> {}; } " +
"}");
assertSize(1, FunctionalExpressionSearch.search(sam).findAll());
for (VirtualFile file : JavaFunctionalExpressionSearcher.getFilesToSearchInPsi(sam)) {
assertFalse(file.getName(), file.getName().startsWith("_"));
}
}
private PsiClass findClass(String i) {
return JavaPsiFacade.getInstance(getProject()).findClass(i, GlobalSearchScope.allScope(getProject()));
}
private void configure() {
myFixture.configureByFile(getTestName(false) + ".java");
}
public void testClassFromJdk() {
doTestIndexSearch("(e) -> true");
}
public void testClassFromJdkMethodRef() {
doTestIndexSearch("this::bar");
}
public void doTestIndexSearch(String expected) {
configure();
PsiClass predicate = findClass(Predicate.class.getName());
assert predicate != null;
final PsiFunctionalExpression next = assertOneElement(FunctionalExpressionSearch.search(predicate).findAll());
assertEquals(expected, next.getText());
}
public void testConstructorReferences() {
configure();
myFixture.addClass("class Bar extends Foo {\n" +
" public Bar() { super(() -> 1); }\n" +
"\n" +
" {\n" +
" new Foo(() -> 2) { };\n" +
" new Foo(() -> 3);\n" +
" }\n" +
"}");
assertSize(5, FunctionalExpressionSearch.search(findClassAtCaret()).findAll());
}
@Override
protected String getBasePath() {
return JavaTestUtil.getRelativeJavaTestDataPath() + "/codeInsight/daemonCodeAnalyzer/lambda/findUsages/";
}
@NotNull
@Override
protected LightProjectDescriptor getProjectDescriptor() {
return JAVA_8;
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.samples.apps.topeka.persistence;
import android.content.ContentValues;
import android.content.Context;
import android.content.res.Resources;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.text.TextUtils;
import android.util.Log;
import com.google.samples.apps.topeka.R;
import com.google.samples.apps.topeka.helper.JsonHelper;
import com.google.samples.apps.topeka.model.Category;
import com.google.samples.apps.topeka.model.JsonAttributes;
import com.google.samples.apps.topeka.model.Theme;
import com.google.samples.apps.topeka.model.quiz.AlphaPickerQuiz;
import com.google.samples.apps.topeka.model.quiz.FillBlankQuiz;
import com.google.samples.apps.topeka.model.quiz.FillTwoBlanksQuiz;
import com.google.samples.apps.topeka.model.quiz.FourQuarterQuiz;
import com.google.samples.apps.topeka.model.quiz.MultiSelectQuiz;
import com.google.samples.apps.topeka.model.quiz.PickerQuiz;
import com.google.samples.apps.topeka.model.quiz.Quiz;
import com.google.samples.apps.topeka.model.quiz.SelectItemQuiz;
import com.google.samples.apps.topeka.model.quiz.ToggleTranslateQuiz;
import com.google.samples.apps.topeka.model.quiz.TrueFalseQuiz;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Database for storing and retrieving info for categories and quizzes
*/
public class TopekaDatabaseHelper extends SQLiteOpenHelper {
private static final String TAG = "TopekaDatabaseHelper";
private static final String DB_NAME = "topeka";
private static final String DB_SUFFIX = ".db";
private static final int DB_VERSION = 1;
private static List<Category> mCategories;
private static TopekaDatabaseHelper mInstance;
private final Resources mResources;
private TopekaDatabaseHelper(Context context) {
//prevents external instance creation
super(context, DB_NAME + DB_SUFFIX, null, DB_VERSION);
mResources = context.getResources();
}
private static TopekaDatabaseHelper getInstance(Context context) {
if (null == mInstance) {
mInstance = new TopekaDatabaseHelper(context);
}
return mInstance;
}
/**
* Gets all categories with their quizzes.
*
* @param context The context this is running in.
* @param fromDatabase <code>true</code> if a data refresh is needed, else <code>false</code>.
* @return All categories stored in the database.
*/
public static List<Category> getCategories(Context context, boolean fromDatabase) {
if (null == mCategories || fromDatabase) {
mCategories = loadCategories(context);
}
return mCategories;
}
private static List<Category> loadCategories(Context context) {
Cursor data = TopekaDatabaseHelper.getCategoryCursor(context);
List<Category> tmpCategories = new ArrayList<>(data.getCount());
final SQLiteDatabase readableDatabase = TopekaDatabaseHelper.getReadableDatabase(context);
do {
final Category category = getCategory(data, readableDatabase);
tmpCategories.add(category);
} while (data.moveToNext());
return tmpCategories;
}
/**
* Gets all categories wrapped in a {@link Cursor} positioned at it's first element.
* <p>There are <b>no quizzes</b> within the categories obtained from this cursor</p>
*
* @param context The context this is running in.
* @return All categories stored in the database.
*/
private static Cursor getCategoryCursor(Context context) {
SQLiteDatabase readableDatabase = getReadableDatabase(context);
Cursor data = readableDatabase
.query(CategoryTable.NAME, CategoryTable.PROJECTION, null, null, null, null, null);
data.moveToFirst();
return data;
}
/**
* Gets a category from the given position of the cursor provided.
*
* @param cursor The Cursor containing the data.
* @param readableDatabase The database that contains the quizzes.
* @return The found category.
*/
private static Category getCategory(Cursor cursor, SQLiteDatabase readableDatabase) {
// "magic numbers" based on CategoryTable#PROJECTION
final String id = cursor.getString(0);
final String name = cursor.getString(1);
final String themeName = cursor.getString(2);
final Theme theme = Theme.valueOf(themeName);
final String isSolved = cursor.getString(3);
final boolean solved = getBooleanFromDatabase(isSolved);
final int[] scores = JsonHelper.jsonArrayToIntArray(cursor.getString(4));
final List<Quiz> quizzes = getQuizzes(id, readableDatabase);
return new Category(name, id, theme, quizzes, scores, solved);
}
private static boolean getBooleanFromDatabase(String isSolved) {
// json stores booleans as true/false strings, whereas SQLite stores them as 0/1 values
return null != isSolved && isSolved.length() == 1 && Integer.valueOf(isSolved) == 1;
}
/**
* Looks for a category with a given id.
*
* @param context The context this is running in.
* @param categoryId Id of the category to look for.
* @return The found category.
*/
public static Category getCategoryWith(Context context, String categoryId) {
SQLiteDatabase readableDatabase = getReadableDatabase(context);
String[] selectionArgs = {categoryId};
Cursor data = readableDatabase
.query(CategoryTable.NAME, CategoryTable.PROJECTION, CategoryTable.COLUMN_ID + "=?",
selectionArgs, null, null, null);
data.moveToFirst();
return getCategory(data, readableDatabase);
}
/**
* Scooooooooooore!
*
* @param context The context this is running in.
* @return The score over all Categories.
*/
public static int getScore(Context context) {
final List<Category> categories = getCategories(context, false);
int score = 0;
for (Category cat : categories) {
score += cat.getScore();
}
return score;
}
/**
* Updates values for a category.
*
* @param context The context this is running in.
* @param category The category to update.
*/
public static void updateCategory(Context context, Category category) {
if (mCategories != null && mCategories.contains(category)) {
final int location = mCategories.indexOf(category);
mCategories.remove(location);
mCategories.add(location, category);
}
SQLiteDatabase writableDatabase = getWritableDatabase(context);
ContentValues categoryValues = createContentValuesFor(category);
writableDatabase.update(CategoryTable.NAME, categoryValues, CategoryTable.COLUMN_ID + "=?",
new String[]{category.getId()});
final List<Quiz> quizzes = category.getQuizzes();
updateQuizzes(writableDatabase, quizzes);
}
/**
* Updates a list of given quizzes.
*
* @param writableDatabase The database to write the quizzes to.
* @param quizzes The quizzes to write.
*/
private static void updateQuizzes(SQLiteDatabase writableDatabase, List<Quiz> quizzes) {
Quiz quiz;
ContentValues quizValues = new ContentValues();
String[] quizArgs = new String[1];
for (int i = 0; i < quizzes.size(); i++) {
quiz = quizzes.get(i);
quizValues.clear();
quizValues.put(QuizTable.COLUMN_SOLVED, quiz.isSolved());
quizArgs[0] = quiz.getQuestion();
writableDatabase.update(QuizTable.NAME, quizValues, QuizTable.COLUMN_QUESTION + "=?",
quizArgs);
}
}
/**
* Resets the contents of Topeka's database to it's initial state.
*
* @param context The context this is running in.
*/
public static void reset(Context context) {
SQLiteDatabase writableDatabase = getWritableDatabase(context);
writableDatabase.delete(CategoryTable.NAME, null, null);
writableDatabase.delete(QuizTable.NAME, null, null);
getInstance(context).preFillDatabase(writableDatabase);
}
/**
* Creates objects for quizzes according to a category id.
*
* @param categoryId The category to create quizzes for.
* @param database The database containing the quizzes.
* @return The found quizzes or an empty list if none were available.
*/
private static List<Quiz> getQuizzes(final String categoryId, SQLiteDatabase database) {
final List<Quiz> quizzes = new ArrayList<>();
final Cursor cursor = database.query(QuizTable.NAME, QuizTable.PROJECTION,
QuizTable.FK_CATEGORY + " LIKE ?", new String[]{categoryId}, null, null, null);
cursor.moveToFirst();
do {
quizzes.add(createQuizDueToType(cursor));
} while (cursor.moveToNext());
cursor.close();
return quizzes;
}
/**
* Creates a quiz corresponding to the projection provided from a cursor row.
* Currently only {@link QuizTable#PROJECTION} is supported.
*
* @param cursor The Cursor containing the data.
* @return The created quiz.
*/
private static Quiz createQuizDueToType(Cursor cursor) {
// "magic numbers" based on QuizTable#PROJECTION
final String type = cursor.getString(2);
final String question = cursor.getString(3);
final String answer = cursor.getString(4);
final String options = cursor.getString(5);
final int min = cursor.getInt(6);
final int max = cursor.getInt(7);
final int step = cursor.getInt(8);
final boolean solved = getBooleanFromDatabase(cursor.getString(11));
switch (type) {
case JsonAttributes.QuizType.ALPHA_PICKER: {
return new AlphaPickerQuiz(question, answer, solved);
}
case JsonAttributes.QuizType.FILL_BLANK: {
return createFillBlankQuiz(cursor, question, answer, solved);
}
case JsonAttributes.QuizType.FILL_TWO_BLANKS: {
return createFillTwoBlanksQuiz(question, answer, solved);
}
case JsonAttributes.QuizType.FOUR_QUARTER: {
return createFourQuarterQuiz(question, answer, options, solved);
}
case JsonAttributes.QuizType.MULTI_SELECT: {
return createMultiSelectQuiz(question, answer, options, solved);
}
case JsonAttributes.QuizType.PICKER: {
return new PickerQuiz(question, Integer.valueOf(answer), min, max, step, solved);
}
case JsonAttributes.QuizType.SINGLE_SELECT:
//fall-through intended
case JsonAttributes.QuizType.SINGLE_SELECT_ITEM: {
return createSelectItemQuiz(question, answer, options, solved);
}
case JsonAttributes.QuizType.TOGGLE_TRANSLATE: {
return createToggleTranslateQuiz(question, answer, options, solved);
}
case JsonAttributes.QuizType.TRUE_FALSE: {
return createTrueFalseQuiz(question, answer, solved);
}
default: {
throw new IllegalArgumentException("Quiz type " + type + " is not supported");
}
}
}
private static Quiz createFillBlankQuiz(Cursor cursor, String question,
String answer, boolean solved) {
final String start = cursor.getString(9);
final String end = cursor.getString(10);
return new FillBlankQuiz(question, answer, start, end, solved);
}
private static Quiz createFillTwoBlanksQuiz(String question, String answer, boolean solved) {
final String[] answerArray = JsonHelper.jsonArrayToStringArray(answer);
return new FillTwoBlanksQuiz(question, answerArray, solved);
}
private static Quiz createFourQuarterQuiz(String question, String answer,
String options, boolean solved) {
final int[] answerArray = JsonHelper.jsonArrayToIntArray(answer);
final String[] optionsArray = JsonHelper.jsonArrayToStringArray(options);
return new FourQuarterQuiz(question, answerArray, optionsArray, solved);
}
private static Quiz createMultiSelectQuiz(String question, String answer,
String options, boolean solved) {
final int[] answerArray = JsonHelper.jsonArrayToIntArray(answer);
final String[] optionsArray = JsonHelper.jsonArrayToStringArray(options);
return new MultiSelectQuiz(question, answerArray, optionsArray, solved);
}
private static Quiz createSelectItemQuiz(String question, String answer,
String options, boolean solved) {
final int[] answerArray = JsonHelper.jsonArrayToIntArray(answer);
final String[] optionsArray = JsonHelper.jsonArrayToStringArray(options);
return new SelectItemQuiz(question, answerArray, optionsArray, solved);
}
private static Quiz createToggleTranslateQuiz(String question, String answer,
String options, boolean solved) {
final int[] answerArray = JsonHelper.jsonArrayToIntArray(answer);
final String[][] optionsArrays = extractOptionsArrays(options);
return new ToggleTranslateQuiz(question, answerArray, optionsArrays, solved);
}
private static Quiz createTrueFalseQuiz(String question, String answer, boolean solved) {
/*
* parsing json with the potential values "true" and "false"
* see res/raw/categories.json for reference
*/
final boolean answerValue = "true".equals(answer);
return new TrueFalseQuiz(question, answerValue, solved);
}
private static String[][] extractOptionsArrays(String options) {
final String[] optionsLvlOne = JsonHelper.jsonArrayToStringArray(options);
final String[][] optionsArray = new String[optionsLvlOne.length][];
for (int i = 0; i < optionsLvlOne.length; i++) {
optionsArray[i] = JsonHelper.jsonArrayToStringArray(optionsLvlOne[i]);
}
return optionsArray;
}
/**
* Creates the content values to update a category in the database.
*
* @param category The category to update.
* @return ContentValues containing updatable data.
*/
private static ContentValues createContentValuesFor(Category category) {
ContentValues contentValues = new ContentValues();
contentValues.put(CategoryTable.COLUMN_SOLVED, category.isSolved());
contentValues.put(CategoryTable.COLUMN_SCORES, Arrays.toString(category.getScores()));
return contentValues;
}
private static SQLiteDatabase getReadableDatabase(Context context) {
return getInstance(context).getReadableDatabase();
}
private static SQLiteDatabase getWritableDatabase(Context context) {
return getInstance(context).getWritableDatabase();
}
@Override
public void onCreate(SQLiteDatabase db) {
/*
* create the category table first, as quiz table has a foreign key
* constraint on category id
*/
db.execSQL(CategoryTable.CREATE);
db.execSQL(QuizTable.CREATE);
preFillDatabase(db);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
/* no-op */
}
private void preFillDatabase(SQLiteDatabase db) {
try {
db.beginTransaction();
try {
fillCategoriesAndQuizzes(db);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
} catch (IOException | JSONException e) {
Log.e(TAG, "preFillDatabase", e);
}
}
private void fillCategoriesAndQuizzes(SQLiteDatabase db) throws JSONException, IOException {
ContentValues values = new ContentValues(); // reduce, reuse
JSONArray jsonArray = new JSONArray(readCategoriesFromResources());
JSONObject category;
for (int i = 0; i < jsonArray.length(); i++) {
category = jsonArray.getJSONObject(i);
final String categoryId = category.getString(JsonAttributes.ID);
fillCategory(db, values, category, categoryId);
final JSONArray quizzes = category.getJSONArray(JsonAttributes.QUIZZES);
fillQuizzesForCategory(db, values, quizzes, categoryId);
}
}
private String readCategoriesFromResources() throws IOException {
StringBuilder categoriesJson = new StringBuilder();
InputStream rawCategories = mResources.openRawResource(R.raw.categories);
BufferedReader reader = new BufferedReader(new InputStreamReader(rawCategories));
String line;
while ((line = reader.readLine()) != null) {
categoriesJson.append(line);
}
return categoriesJson.toString();
}
private void fillCategory(SQLiteDatabase db, ContentValues values, JSONObject category,
String categoryId) throws JSONException {
values.clear();
values.put(CategoryTable.COLUMN_ID, categoryId);
values.put(CategoryTable.COLUMN_NAME, category.getString(JsonAttributes.NAME));
values.put(CategoryTable.COLUMN_THEME, category.getString(JsonAttributes.THEME));
values.put(CategoryTable.COLUMN_SOLVED, category.getString(JsonAttributes.SOLVED));
values.put(CategoryTable.COLUMN_SCORES, category.getString(JsonAttributes.SCORES));
db.insert(CategoryTable.NAME, null, values);
}
private void fillQuizzesForCategory(SQLiteDatabase db, ContentValues values, JSONArray quizzes,
String categoryId) throws JSONException {
JSONObject quiz;
for (int i = 0; i < quizzes.length(); i++) {
quiz = quizzes.getJSONObject(i);
values.clear();
values.put(QuizTable.FK_CATEGORY, categoryId);
values.put(QuizTable.COLUMN_TYPE, quiz.getString(JsonAttributes.TYPE));
values.put(QuizTable.COLUMN_QUESTION, quiz.getString(JsonAttributes.QUESTION));
values.put(QuizTable.COLUMN_ANSWER, quiz.getString(JsonAttributes.ANSWER));
putNonEmptyString(values, quiz, JsonAttributes.OPTIONS, QuizTable.COLUMN_OPTIONS);
putNonEmptyString(values, quiz, JsonAttributes.MIN, QuizTable.COLUMN_MIN);
putNonEmptyString(values, quiz, JsonAttributes.MAX, QuizTable.COLUMN_MAX);
putNonEmptyString(values, quiz, JsonAttributes.START, QuizTable.COLUMN_START);
putNonEmptyString(values, quiz, JsonAttributes.END, QuizTable.COLUMN_END);
putNonEmptyString(values, quiz, JsonAttributes.STEP, QuizTable.COLUMN_STEP);
db.insert(QuizTable.NAME, null, values);
}
}
/**
* Puts a non-empty string to ContentValues provided.
*
* @param values The place where the data should be put.
* @param quiz The quiz potentially containing the data.
* @param jsonKey The key to look for.
* @param contentKey The key use for placing the data in the database.
*/
private void putNonEmptyString(ContentValues values, JSONObject quiz, String jsonKey,
String contentKey) {
final String stringToPut = quiz.optString(jsonKey, null);
if (!TextUtils.isEmpty(stringToPut)) {
values.put(contentKey, stringToPut);
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.dmn.engine;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import org.activiti.dmn.api.DmnRepositoryService;
import org.activiti.dmn.api.DmnRuleService;
import org.activiti.dmn.engine.impl.DmnEngineImpl;
import org.activiti.dmn.engine.impl.DmnRepositoryServiceImpl;
import org.activiti.dmn.engine.impl.DmnRuleServiceImpl;
import org.activiti.dmn.engine.impl.RuleEngineExecutorImpl;
import org.activiti.dmn.engine.impl.ServiceImpl;
import org.activiti.dmn.engine.impl.cfg.CommandExecutorImpl;
import org.activiti.dmn.engine.impl.cfg.IdGenerator;
import org.activiti.dmn.engine.impl.cfg.StandaloneDmnEngineConfiguration;
import org.activiti.dmn.engine.impl.cfg.StandaloneInMemDmnEngineConfiguration;
import org.activiti.dmn.engine.impl.cfg.TransactionContextFactory;
import org.activiti.dmn.engine.impl.cfg.standalone.StandaloneMybatisTransactionContextFactory;
import org.activiti.dmn.engine.impl.db.DbSqlSessionFactory;
import org.activiti.dmn.engine.impl.deployer.CachingAndArtifactsManager;
import org.activiti.dmn.engine.impl.deployer.DmnDeployer;
import org.activiti.dmn.engine.impl.deployer.DmnDeploymentHelper;
import org.activiti.dmn.engine.impl.deployer.ParsedDeploymentBuilderFactory;
import org.activiti.dmn.engine.impl.interceptor.CommandConfig;
import org.activiti.dmn.engine.impl.interceptor.CommandContextFactory;
import org.activiti.dmn.engine.impl.interceptor.CommandContextInterceptor;
import org.activiti.dmn.engine.impl.interceptor.CommandExecutor;
import org.activiti.dmn.engine.impl.interceptor.CommandInterceptor;
import org.activiti.dmn.engine.impl.interceptor.CommandInvoker;
import org.activiti.dmn.engine.impl.interceptor.LogInterceptor;
import org.activiti.dmn.engine.impl.interceptor.SessionFactory;
import org.activiti.dmn.engine.impl.mvel.config.DefaultCustomExpressionFunctionRegistry;
import org.activiti.dmn.engine.impl.parser.DmnParseFactory;
import org.activiti.dmn.engine.impl.persistence.StrongUuidGenerator;
import org.activiti.dmn.engine.impl.persistence.deploy.DecisionTableCacheEntry;
import org.activiti.dmn.engine.impl.persistence.deploy.DefaultDeploymentCache;
import org.activiti.dmn.engine.impl.persistence.deploy.Deployer;
import org.activiti.dmn.engine.impl.persistence.deploy.DeploymentCache;
import org.activiti.dmn.engine.impl.persistence.deploy.DeploymentManager;
import org.activiti.dmn.engine.impl.persistence.entity.DecisionTableEntityManager;
import org.activiti.dmn.engine.impl.persistence.entity.DecisionTableEntityManagerImpl;
import org.activiti.dmn.engine.impl.persistence.entity.DmnDeploymentEntityManager;
import org.activiti.dmn.engine.impl.persistence.entity.DmnDeploymentEntityManagerImpl;
import org.activiti.dmn.engine.impl.persistence.entity.ResourceEntityManager;
import org.activiti.dmn.engine.impl.persistence.entity.ResourceEntityManagerImpl;
import org.activiti.dmn.engine.impl.persistence.entity.data.DecisionTableDataManager;
import org.activiti.dmn.engine.impl.persistence.entity.data.DmnDeploymentDataManager;
import org.activiti.dmn.engine.impl.persistence.entity.data.ResourceDataManager;
import org.activiti.dmn.engine.impl.persistence.entity.data.impl.MybatisDecisionTableDataManager;
import org.activiti.dmn.engine.impl.persistence.entity.data.impl.MybatisDmnDeploymentDataManager;
import org.activiti.dmn.engine.impl.persistence.entity.data.impl.MybatisResourceDataManager;
import org.activiti.dmn.engine.impl.util.DefaultClockImpl;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.builder.xml.XMLConfigBuilder;
import org.apache.ibatis.builder.xml.XMLMapperBuilder;
import org.apache.ibatis.datasource.pooled.PooledDataSource;
import org.apache.ibatis.mapping.Environment;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.defaults.DefaultSqlSessionFactory;
import org.apache.ibatis.transaction.TransactionFactory;
import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;
import org.apache.ibatis.transaction.managed.ManagedTransactionFactory;
import org.mvel2.integration.PropertyHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import liquibase.Liquibase;
import liquibase.database.Database;
import liquibase.database.DatabaseConnection;
import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection;
import liquibase.resource.ClassLoaderResourceAccessor;
public class DmnEngineConfiguration {
protected static final Logger logger = LoggerFactory.getLogger(DmnEngineConfiguration.class);
/** The tenant id indicating 'no tenant' */
public static final String NO_TENANT_ID = "";
public static final String DEFAULT_MYBATIS_MAPPING_FILE = "org/activiti/dmn/db/mapping/mappings.xml";
public static final String LIQUIBASE_CHANGELOG_PREFIX = "ACT_DMN_";
/**
* Checks the version of the DB schema against the library when the process engine is being created and throws an exception if the versions don't match.
*/
public static final String DB_SCHEMA_UPDATE_FALSE = "false";
/**
* Creates the schema when the process engine is being created and drops the schema when the process engine is being closed.
*/
public static final String DB_SCHEMA_UPDATE_DROP_CREATE = "create-drop";
/**
* Upon building of the process engine, a check is performed and an update of the schema is performed if it is necessary.
*/
public static final String DB_SCHEMA_UPDATE_TRUE = "true";
protected String dmnEngineName = DmnEngines.NAME_DEFAULT;
protected String databaseType;
protected String jdbcDriver = "org.h2.Driver";
protected String jdbcUrl = "jdbc:h2:tcp://localhost/~/activitidmn";
protected String jdbcUsername = "sa";
protected String jdbcPassword = "";
protected String dataSourceJndiName;
protected int jdbcMaxActiveConnections;
protected int jdbcMaxIdleConnections;
protected int jdbcMaxCheckoutTime;
protected int jdbcMaxWaitTime;
protected boolean jdbcPingEnabled;
protected String jdbcPingQuery;
protected int jdbcPingConnectionNotUsedFor;
protected int jdbcDefaultTransactionIsolationLevel;
protected DataSource dataSource;
protected String databaseSchemaUpdate = DB_SCHEMA_UPDATE_TRUE;
protected String xmlEncoding = "UTF-8";
protected BeanFactory beanFactory;
// COMMAND EXECUTORS ///////////////////////////////////////////////
protected CommandConfig defaultCommandConfig;
protected CommandConfig schemaCommandConfig;
protected CommandInterceptor commandInvoker;
/**
* the configurable list which will be {@link #initInterceptorChain(java.util.List) processed} to build the {@link #commandExecutor}
*/
protected List<CommandInterceptor> customPreCommandInterceptors;
protected List<CommandInterceptor> customPostCommandInterceptors;
protected List<CommandInterceptor> commandInterceptors;
/** this will be initialized during the configurationComplete() */
protected CommandExecutor commandExecutor;
// SERVICES
// /////////////////////////////////////////////////////////////////
protected DmnRepositoryService repositoryService = new DmnRepositoryServiceImpl();
protected DmnRuleService ruleService = new DmnRuleServiceImpl();
protected RuleEngineExecutor ruleEngineExecutor = new RuleEngineExecutorImpl();
// DATA MANAGERS ///////////////////////////////////////////////////
protected DmnDeploymentDataManager deploymentDataManager;
protected DecisionTableDataManager decisionTableDataManager;
protected ResourceDataManager resourceDataManager;
// ENTITY MANAGERS /////////////////////////////////////////////////
protected DmnDeploymentEntityManager deploymentEntityManager;
protected DecisionTableEntityManager decisionTableEntityManager;
protected ResourceEntityManager resourceEntityManager;
protected CommandContextFactory commandContextFactory;
protected TransactionContextFactory transactionContextFactory;
// MYBATIS SQL SESSION FACTORY /////////////////////////////////////
protected SqlSessionFactory sqlSessionFactory;
protected TransactionFactory transactionFactory;
protected Set<Class<?>> customMybatisMappers;
protected Set<String> customMybatisXMLMappers;
// SESSION FACTORIES ///////////////////////////////////////////////
protected List<SessionFactory> customSessionFactories;
protected DbSqlSessionFactory dbSqlSessionFactory;
protected Map<Class<?>, SessionFactory> sessionFactories;
protected boolean transactionsExternallyManaged;
/**
* Flag that can be set to configure or nota relational database is used. This is useful for custom implementations that do not use relational databases at all.
*
* If true (default), the {@link ProcessEngineConfiguration#getDatabaseSchemaUpdate()} value will be used to determine what needs to happen wrt the database schema.
*
* If false, no validation or schema creation will be done. That means that the database schema must have been created 'manually' before but the engine does not validate whether the schema is
* correct. The {@link ProcessEngineConfiguration#getDatabaseSchemaUpdate()} value will not be used.
*/
protected boolean usingRelationalDatabase = true;
/**
* Allows configuring a database table prefix which is used for all runtime operations of the process engine. For example, if you specify a prefix named 'PRE1.', activiti will query for executions
* in a table named 'PRE1.ACT_RU_EXECUTION_'.
*
* <p />
* <strong>NOTE: the prefix is not respected by automatic database schema management. If you use {@link ProcessEngineConfiguration#DB_SCHEMA_UPDATE_CREATE_DROP} or
* {@link ProcessEngineConfiguration#DB_SCHEMA_UPDATE_TRUE}, activiti will create the database tables using the default names, regardless of the prefix configured here.</strong>
*/
protected String databaseTablePrefix = "";
/**
* database catalog to use
*/
protected String databaseCatalog = "";
/**
* In some situations you want to set the schema to use for table checks / generation if the database metadata doesn't return that correctly, see https://jira.codehaus.org/browse/ACT-1220,
* https://jira.codehaus.org/browse/ACT-1062
*/
protected String databaseSchema;
/**
* Set to true in case the defined databaseTablePrefix is a schema-name, instead of an actual table name prefix. This is relevant for checking if Activiti-tables exist, the databaseTablePrefix will
* not be used here - since the schema is taken into account already, adding a prefix for the table-check will result in wrong table-names.
*/
protected boolean tablePrefixIsSchema;
protected static Properties databaseTypeMappings = getDefaultDatabaseTypeMappings();
public static final String DATABASE_TYPE_H2 = "h2";
public static final String DATABASE_TYPE_HSQL = "hsql";
public static final String DATABASE_TYPE_MYSQL = "mysql";
public static final String DATABASE_TYPE_ORACLE = "oracle";
public static final String DATABASE_TYPE_POSTGRES = "postgres";
public static final String DATABASE_TYPE_MSSQL = "mssql";
public static final String DATABASE_TYPE_DB2 = "db2";
public static Properties getDefaultDatabaseTypeMappings() {
Properties databaseTypeMappings = new Properties();
databaseTypeMappings.setProperty("H2", DATABASE_TYPE_H2);
databaseTypeMappings.setProperty("HSQL Database Engine", DATABASE_TYPE_HSQL);
databaseTypeMappings.setProperty("MySQL", DATABASE_TYPE_MYSQL);
databaseTypeMappings.setProperty("Oracle", DATABASE_TYPE_ORACLE);
databaseTypeMappings.setProperty("PostgreSQL", DATABASE_TYPE_POSTGRES);
databaseTypeMappings.setProperty("Microsoft SQL Server", DATABASE_TYPE_MSSQL);
databaseTypeMappings.setProperty(DATABASE_TYPE_DB2, DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/NT", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/NT64", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2 UDP", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/LINUX", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/LINUX390", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/LINUXX8664", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/LINUXZ64", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/LINUXPPC64", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/400 SQL", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/6000", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2 UDB iSeries", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/AIX64", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/HPUX", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/HP64", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/SUN", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/SUN64", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/PTX", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2/2", DATABASE_TYPE_DB2);
databaseTypeMappings.setProperty("DB2 UDB AS400", DATABASE_TYPE_DB2);
return databaseTypeMappings;
}
public void initDatabaseType() {
Connection connection = null;
try {
connection = dataSource.getConnection();
DatabaseMetaData databaseMetaData = connection.getMetaData();
String databaseProductName = databaseMetaData.getDatabaseProductName();
logger.debug("database product name: '{}'", databaseProductName);
databaseType = databaseTypeMappings.getProperty(databaseProductName);
if (databaseType == null) {
throw new ActivitiDmnException("couldn't deduct database type from database product name '" + databaseProductName + "'");
}
logger.debug("using database type: {}", databaseType);
} catch (SQLException e) {
logger.error("Exception while initializing Database connection", e);
} finally {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
logger.error("Exception while closing the Database connection", e);
}
}
}
// DEPLOYERS
// ////////////////////////////////////////////////////////////////
protected DmnDeployer dmnDeployer;
protected DmnParseFactory dmnParseFactory;
protected ParsedDeploymentBuilderFactory parsedDeploymentBuilderFactory;
protected DmnDeploymentHelper dmnDeploymentHelper;
protected CachingAndArtifactsManager cachingAndArtifactsManager;
protected List<Deployer> customPreDeployers;
protected List<Deployer> customPostDeployers;
protected List<Deployer> deployers;
protected DeploymentManager deploymentManager;
protected int decisionCacheLimit = -1; // By default, no limit
protected DeploymentCache<DecisionTableCacheEntry> decisionCache;
protected IdGenerator idGenerator;
protected Clock clock;
// CUSTOM EXPRESSION FUNCTIONS
// ////////////////////////////////////////////////////////////////
protected CustomExpressionFunctionRegistry customExpressionFunctionRegistry;
protected CustomExpressionFunctionRegistry postCustomExpressionFunctionRegistry;
protected Map<String, Method> customExpressionFunctions = new HashMap<String, Method>();
protected Map<Class<?>, PropertyHandler> customPropertyHandlers = new HashMap<Class<?>, PropertyHandler>();
/**
* Set this to true if you want to have extra checks on the BPMN xml that is parsed. See http://www.jorambarrez.be/blog/2013/02/19/uploading-a-funny-xml -can-bring-down-your-server/
*
* Unfortunately, this feature is not available on some platforms (JDK 6, JBoss), hence the reason why it is disabled by default. If your platform allows the use of StaxSource during XML parsing, do
* enable it.
*/
protected boolean enableSafeDmnXml;
public static DmnEngineConfiguration createDmnEngineConfigurationFromResourceDefault() {
return createDmnEngineConfigurationFromResource("activiti.dmn.cfg.xml", "dmnEngineConfiguration");
}
public static DmnEngineConfiguration createDmnEngineConfigurationFromResource(String resource) {
return createDmnEngineConfigurationFromResource(resource, "dmnEngineConfiguration");
}
public static DmnEngineConfiguration createDmnEngineConfigurationFromResource(String resource, String beanName) {
return parseProcessEngineConfigurationFromResource(resource, beanName);
}
public static DmnEngineConfiguration createDmnEngineConfigurationFromInputStream(InputStream inputStream) {
return createDmnEngineConfigurationFromInputStream(inputStream, "dmnEngineConfiguration");
}
public static DmnEngineConfiguration createDmnEngineConfigurationFromInputStream(InputStream inputStream, String beanName) {
return parseProcessEngineConfigurationFromInputStream(inputStream, beanName);
}
public static DmnEngineConfiguration createStandaloneDmnEngineConfiguration() {
return new StandaloneDmnEngineConfiguration();
}
public static DmnEngineConfiguration createStandaloneInMemDmnEngineConfiguration() {
return new StandaloneInMemDmnEngineConfiguration();
}
public static DmnEngineConfiguration parseDmnEngineConfiguration(Resource springResource, String beanName) {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
XmlBeanDefinitionReader xmlBeanDefinitionReader = new XmlBeanDefinitionReader(beanFactory);
xmlBeanDefinitionReader.setValidationMode(XmlBeanDefinitionReader.VALIDATION_XSD);
xmlBeanDefinitionReader.loadBeanDefinitions(springResource);
DmnEngineConfiguration processEngineConfiguration = (DmnEngineConfiguration) beanFactory.getBean(beanName);
processEngineConfiguration.setBeanFactory(beanFactory);
return processEngineConfiguration;
}
public static DmnEngineConfiguration parseProcessEngineConfigurationFromInputStream(InputStream inputStream, String beanName) {
Resource springResource = new InputStreamResource(inputStream);
return parseDmnEngineConfiguration(springResource, beanName);
}
public static DmnEngineConfiguration parseProcessEngineConfigurationFromResource(String resource, String beanName) {
Resource springResource = new ClassPathResource(resource);
return parseDmnEngineConfiguration(springResource, beanName);
}
// buildProcessEngine
// ///////////////////////////////////////////////////////
public DmnEngine buildDmnEngine() {
init();
return new DmnEngineImpl(this);
}
// init
// /////////////////////////////////////////////////////////////////////
protected void init() {
initCommandContextFactory();
initTransactionContextFactory();
initCommandExecutors();
initIdGenerator();
initDataSource();
initDbSchema();
initTransactionFactory();
initSqlSessionFactory();
initSessionFactories();
initServices();
initDataManagers();
initEntityManagers();
initDeployers();
initClock();
initCustomExpressionFunctions();
}
// services
// /////////////////////////////////////////////////////////////////
protected void initServices() {
initService(repositoryService);
initService(ruleService);
}
protected void initService(Object service) {
if (service instanceof ServiceImpl) {
((ServiceImpl) service).setCommandExecutor(commandExecutor);
}
}
// Data managers
///////////////////////////////////////////////////////////
public void initDataManagers() {
if (deploymentDataManager == null) {
deploymentDataManager = new MybatisDmnDeploymentDataManager(this);
}
if (decisionTableDataManager == null) {
decisionTableDataManager = new MybatisDecisionTableDataManager(this);
}
if (resourceDataManager == null) {
resourceDataManager = new MybatisResourceDataManager(this);
}
}
public void initEntityManagers() {
if (deploymentEntityManager == null) {
deploymentEntityManager = new DmnDeploymentEntityManagerImpl(this, deploymentDataManager);
}
if (decisionTableEntityManager == null) {
decisionTableEntityManager = new DecisionTableEntityManagerImpl(this, decisionTableDataManager);
}
if (resourceEntityManager == null) {
resourceEntityManager = new ResourceEntityManagerImpl(this, resourceDataManager);
}
}
// DataSource
// ///////////////////////////////////////////////////////////////
protected void initDataSource() {
if (dataSource == null) {
if (dataSourceJndiName != null) {
try {
dataSource = (DataSource) new InitialContext().lookup(dataSourceJndiName);
} catch (Exception e) {
throw new ActivitiDmnException("couldn't lookup datasource from " + dataSourceJndiName + ": " + e.getMessage(), e);
}
} else if (jdbcUrl != null) {
if ((jdbcDriver == null) || (jdbcUsername == null)) {
throw new ActivitiDmnException("DataSource or JDBC properties have to be specified in a process engine configuration");
}
logger.debug("initializing datasource to db: {}", jdbcUrl);
if (logger.isInfoEnabled()) {
logger.info("Configuring Datasource with following properties (omitted password for security)");
logger.info("datasource driver: " + jdbcDriver);
logger.info("datasource url : " + jdbcUrl);
logger.info("datasource user name : " + jdbcUsername);
}
PooledDataSource pooledDataSource = new PooledDataSource(this.getClass().getClassLoader(), jdbcDriver, jdbcUrl, jdbcUsername, jdbcPassword);
if (jdbcMaxActiveConnections > 0) {
pooledDataSource.setPoolMaximumActiveConnections(jdbcMaxActiveConnections);
}
if (jdbcMaxIdleConnections > 0) {
pooledDataSource.setPoolMaximumIdleConnections(jdbcMaxIdleConnections);
}
if (jdbcMaxCheckoutTime > 0) {
pooledDataSource.setPoolMaximumCheckoutTime(jdbcMaxCheckoutTime);
}
if (jdbcMaxWaitTime > 0) {
pooledDataSource.setPoolTimeToWait(jdbcMaxWaitTime);
}
if (jdbcPingEnabled == true) {
pooledDataSource.setPoolPingEnabled(true);
if (jdbcPingQuery != null) {
pooledDataSource.setPoolPingQuery(jdbcPingQuery);
}
pooledDataSource.setPoolPingConnectionsNotUsedFor(jdbcPingConnectionNotUsedFor);
}
if (jdbcDefaultTransactionIsolationLevel > 0) {
pooledDataSource.setDefaultTransactionIsolationLevel(jdbcDefaultTransactionIsolationLevel);
}
dataSource = pooledDataSource;
}
if (dataSource instanceof PooledDataSource) {
// ACT-233: connection pool of Ibatis is not properly
// initialized if this is not called!
((PooledDataSource) dataSource).forceCloseAll();
}
}
if (databaseType == null) {
initDatabaseType();
}
}
// data model
// ///////////////////////////////////////////////////////////////
public void initDbSchema() {
try {
DatabaseConnection connection = new JdbcConnection(dataSource.getConnection());
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(connection);
database.setDatabaseChangeLogTableName(LIQUIBASE_CHANGELOG_PREFIX + database.getDatabaseChangeLogTableName());
database.setDatabaseChangeLogLockTableName(LIQUIBASE_CHANGELOG_PREFIX + database.getDatabaseChangeLogLockTableName());
if (StringUtils.isNotEmpty(databaseSchema)) {
database.setDefaultSchemaName(databaseSchema);
database.setLiquibaseSchemaName(databaseSchema);
}
if (StringUtils.isNotEmpty(databaseCatalog)) {
database.setDefaultCatalogName(databaseCatalog);
database.setLiquibaseCatalogName(databaseCatalog);
}
Liquibase liquibase = new Liquibase("org/activiti/dmn/db/liquibase/activiti-dmn-db-changelog.xml", new ClassLoaderResourceAccessor(), database);
if (DB_SCHEMA_UPDATE_DROP_CREATE.equals(databaseSchemaUpdate)) {
logger.debug("Dropping and creating schema DMN");
liquibase.dropAll();
liquibase.update("dmn");
} else if (DB_SCHEMA_UPDATE_TRUE.equals(databaseSchemaUpdate)) {
logger.debug("Updating schema DMN");
liquibase.update("dmn");
} else if (DB_SCHEMA_UPDATE_FALSE.equals(databaseSchemaUpdate)) {
logger.debug("Validating schema DMN");
liquibase.validate();
}
} catch (Exception e) {
throw new ActivitiDmnException("Error initialising dmn data model");
}
}
// session factories ////////////////////////////////////////////////////////
public void initSessionFactories() {
if (sessionFactories == null) {
sessionFactories = new HashMap<Class<?>, SessionFactory>();
if (usingRelationalDatabase) {
initDbSqlSessionFactory();
}
}
if (customSessionFactories != null) {
for (SessionFactory sessionFactory : customSessionFactories) {
addSessionFactory(sessionFactory);
}
}
}
public void initDbSqlSessionFactory() {
if (dbSqlSessionFactory == null) {
dbSqlSessionFactory = createDbSqlSessionFactory();
}
dbSqlSessionFactory.setDatabaseType(databaseType);
dbSqlSessionFactory.setSqlSessionFactory(sqlSessionFactory);
dbSqlSessionFactory.setIdGenerator(idGenerator);
dbSqlSessionFactory.setDatabaseTablePrefix(databaseTablePrefix);
dbSqlSessionFactory.setTablePrefixIsSchema(tablePrefixIsSchema);
dbSqlSessionFactory.setDatabaseCatalog(databaseCatalog);
dbSqlSessionFactory.setDatabaseSchema(databaseSchema);
addSessionFactory(dbSqlSessionFactory);
}
public DbSqlSessionFactory createDbSqlSessionFactory() {
return new DbSqlSessionFactory();
}
public void addSessionFactory(SessionFactory sessionFactory) {
sessionFactories.put(sessionFactory.getSessionType(), sessionFactory);
}
// command executors
// ////////////////////////////////////////////////////////
public void initCommandExecutors() {
initDefaultCommandConfig();
initSchemaCommandConfig();
initCommandInvoker();
initCommandInterceptors();
initCommandExecutor();
}
public void initDefaultCommandConfig() {
if (defaultCommandConfig == null) {
defaultCommandConfig = new CommandConfig();
}
}
public void initSchemaCommandConfig() {
if (schemaCommandConfig == null) {
schemaCommandConfig = new CommandConfig().transactionNotSupported();
}
}
public void initCommandInvoker() {
if (commandInvoker == null) {
commandInvoker = new CommandInvoker();
}
}
public void initCommandInterceptors() {
if (commandInterceptors == null) {
commandInterceptors = new ArrayList<CommandInterceptor>();
if (customPreCommandInterceptors != null) {
commandInterceptors.addAll(customPreCommandInterceptors);
}
commandInterceptors.addAll(getDefaultCommandInterceptors());
if (customPostCommandInterceptors != null) {
commandInterceptors.addAll(customPostCommandInterceptors);
}
commandInterceptors.add(commandInvoker);
}
}
public Collection<? extends CommandInterceptor> getDefaultCommandInterceptors() {
List<CommandInterceptor> interceptors = new ArrayList<CommandInterceptor>();
interceptors.add(new LogInterceptor());
CommandInterceptor transactionInterceptor = createTransactionInterceptor();
if (transactionInterceptor != null) {
interceptors.add(transactionInterceptor);
}
interceptors.add(new CommandContextInterceptor(commandContextFactory, this));
return interceptors;
}
public void initCommandExecutor() {
if (commandExecutor == null) {
CommandInterceptor first = initInterceptorChain(commandInterceptors);
commandExecutor = new CommandExecutorImpl(getDefaultCommandConfig(), first);
}
}
public CommandInterceptor initInterceptorChain(List<CommandInterceptor> chain) {
if (chain == null || chain.isEmpty()) {
throw new ActivitiDmnException("invalid command interceptor chain configuration: " + chain);
}
for (int i = 0; i < chain.size() - 1; i++) {
chain.get(i).setNext(chain.get(i + 1));
}
return chain.get(0);
}
public CommandInterceptor createTransactionInterceptor() {
return null;
}
// deployers
// ////////////////////////////////////////////////////////////////
protected void initDeployers() {
if (dmnParseFactory == null) {
dmnParseFactory = new DmnParseFactory();
}
if (this.dmnDeployer == null) {
this.deployers = new ArrayList<Deployer>();
if (customPreDeployers != null) {
this.deployers.addAll(customPreDeployers);
}
this.deployers.addAll(getDefaultDeployers());
if (customPostDeployers != null) {
this.deployers.addAll(customPostDeployers);
}
}
// Decision cache
if (decisionCache == null) {
if (decisionCacheLimit <= 0) {
decisionCache = new DefaultDeploymentCache<DecisionTableCacheEntry>();
} else {
decisionCache = new DefaultDeploymentCache<DecisionTableCacheEntry>(decisionCacheLimit);
}
}
deploymentManager = new DeploymentManager(decisionCache, this);
deploymentManager.setDeployers(deployers);
deploymentManager.setDeploymentEntityManager(deploymentEntityManager);
deploymentManager.setDecisionTableEntityManager(decisionTableEntityManager);
}
public Collection<? extends Deployer> getDefaultDeployers() {
List<Deployer> defaultDeployers = new ArrayList<Deployer>();
if (dmnDeployer == null) {
dmnDeployer = new DmnDeployer();
}
initDmnDeployerDependencies();
dmnDeployer.setIdGenerator(idGenerator);
dmnDeployer.setParsedDeploymentBuilderFactory(parsedDeploymentBuilderFactory);
dmnDeployer.setDmnDeploymentHelper(dmnDeploymentHelper);
dmnDeployer.setCachingAndArtifactsManager(cachingAndArtifactsManager);
defaultDeployers.add(dmnDeployer);
return defaultDeployers;
}
public void initDmnDeployerDependencies() {
if (parsedDeploymentBuilderFactory == null) {
parsedDeploymentBuilderFactory = new ParsedDeploymentBuilderFactory();
}
if (parsedDeploymentBuilderFactory.getDmnParseFactory() == null) {
parsedDeploymentBuilderFactory.setDmnParseFactory(dmnParseFactory);
}
if (dmnDeploymentHelper == null) {
dmnDeploymentHelper = new DmnDeploymentHelper();
}
if (cachingAndArtifactsManager == null) {
cachingAndArtifactsManager = new CachingAndArtifactsManager();
}
}
// id generator
// /////////////////////////////////////////////////////////////
public void initIdGenerator() {
if (idGenerator == null) {
idGenerator = new StrongUuidGenerator();
}
}
// OTHER
// ////////////////////////////////////////////////////////////////////
public void initCommandContextFactory() {
if (commandContextFactory == null) {
commandContextFactory = new CommandContextFactory();
}
commandContextFactory.setDmnEngineConfiguration(this);
}
public void initTransactionContextFactory() {
if (transactionContextFactory == null) {
transactionContextFactory = new StandaloneMybatisTransactionContextFactory();
}
}
public void initClock() {
if (clock == null) {
clock = new DefaultClockImpl();
}
}
// custom expression functions
// ////////////////////////////////////////////////////////////////
protected void initCustomExpressionFunctions() {
if (customExpressionFunctionRegistry == null) {
customExpressionFunctions.putAll(new DefaultCustomExpressionFunctionRegistry().getCustomExpressionMethods());
} else {
customExpressionFunctions.putAll(customExpressionFunctionRegistry.getCustomExpressionMethods());
}
if (postCustomExpressionFunctionRegistry != null) {
customExpressionFunctions.putAll(postCustomExpressionFunctionRegistry.getCustomExpressionMethods());
}
}
// myBatis SqlSessionFactory
// ////////////////////////////////////////////////
public void initTransactionFactory() {
if (transactionFactory == null) {
if (transactionsExternallyManaged) {
transactionFactory = new ManagedTransactionFactory();
} else {
transactionFactory = new JdbcTransactionFactory();
}
}
}
public void initSqlSessionFactory() {
if (sqlSessionFactory == null) {
InputStream inputStream = null;
try {
inputStream = getMyBatisXmlConfigurationStream();
Environment environment = new Environment("default", transactionFactory, dataSource);
Reader reader = new InputStreamReader(inputStream);
Properties properties = new Properties();
properties.put("prefix", databaseTablePrefix);
// set default properties
properties.put("limitBefore", "");
properties.put("limitAfter", "");
properties.put("limitBetween", "");
properties.put("limitOuterJoinBetween", "");
properties.put("limitBeforeNativeQuery", "");
properties.put("orderBy", "order by ${orderByColumns}");
properties.put("blobType", "BLOB");
properties.put("boolValue", "TRUE");
if (databaseType != null) {
properties.load(getResourceAsStream("org/activiti/dmn/db/properties/" + databaseType + ".properties"));
}
Configuration configuration = initMybatisConfiguration(environment, reader, properties);
sqlSessionFactory = new DefaultSqlSessionFactory(configuration);
} catch (Exception e) {
throw new ActivitiDmnException("Error while building ibatis SqlSessionFactory: " + e.getMessage(), e);
} finally {
IOUtils.closeQuietly(inputStream);
}
}
}
public Configuration initMybatisConfiguration(Environment environment, Reader reader, Properties properties) {
XMLConfigBuilder parser = new XMLConfigBuilder(reader, "", properties);
Configuration configuration = parser.getConfiguration();
if (databaseType != null) {
configuration.setDatabaseId(databaseType);
}
configuration.setEnvironment(environment);
initCustomMybatisMappers(configuration);
configuration = parseMybatisConfiguration(configuration, parser);
return configuration;
}
public void initCustomMybatisMappers(Configuration configuration) {
if (getCustomMybatisMappers() != null) {
for (Class<?> clazz : getCustomMybatisMappers()) {
configuration.addMapper(clazz);
}
}
}
public Configuration parseMybatisConfiguration(Configuration configuration, XMLConfigBuilder parser) {
return parseCustomMybatisXMLMappers(parser.parse());
}
public Configuration parseCustomMybatisXMLMappers(Configuration configuration) {
if (getCustomMybatisXMLMappers() != null)
// see XMLConfigBuilder.mapperElement()
for (String resource : getCustomMybatisXMLMappers()) {
XMLMapperBuilder mapperParser = new XMLMapperBuilder(getResourceAsStream(resource), configuration, resource, configuration.getSqlFragments());
mapperParser.parse();
}
return configuration;
}
protected InputStream getResourceAsStream(String resource) {
return this.getClass().getClassLoader().getResourceAsStream(resource);
}
public InputStream getMyBatisXmlConfigurationStream() {
return getResourceAsStream(DEFAULT_MYBATIS_MAPPING_FILE);
}
// getters and setters
// //////////////////////////////////////////////////////
public String getDmnEngineName() {
return dmnEngineName;
}
public DmnEngineConfiguration setDmnEngineName(String dmnEngineName) {
this.dmnEngineName = dmnEngineName;
return this;
}
public String getDatabaseType() {
return databaseType;
}
public DmnEngineConfiguration setDatabaseType(String databaseType) {
this.databaseType = databaseType;
return this;
}
public DataSource getDataSource() {
return dataSource;
}
public DmnEngineConfiguration setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
return this;
}
public String getJdbcDriver() {
return jdbcDriver;
}
public DmnEngineConfiguration setJdbcDriver(String jdbcDriver) {
this.jdbcDriver = jdbcDriver;
return this;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public DmnEngineConfiguration setJdbcUrl(String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
return this;
}
public String getJdbcUsername() {
return jdbcUsername;
}
public DmnEngineConfiguration setJdbcUsername(String jdbcUsername) {
this.jdbcUsername = jdbcUsername;
return this;
}
public String getJdbcPassword() {
return jdbcPassword;
}
public DmnEngineConfiguration setJdbcPassword(String jdbcPassword) {
this.jdbcPassword = jdbcPassword;
return this;
}
public int getJdbcMaxActiveConnections() {
return jdbcMaxActiveConnections;
}
public DmnEngineConfiguration setJdbcMaxActiveConnections(int jdbcMaxActiveConnections) {
this.jdbcMaxActiveConnections = jdbcMaxActiveConnections;
return this;
}
public int getJdbcMaxIdleConnections() {
return jdbcMaxIdleConnections;
}
public DmnEngineConfiguration setJdbcMaxIdleConnections(int jdbcMaxIdleConnections) {
this.jdbcMaxIdleConnections = jdbcMaxIdleConnections;
return this;
}
public int getJdbcMaxCheckoutTime() {
return jdbcMaxCheckoutTime;
}
public DmnEngineConfiguration setJdbcMaxCheckoutTime(int jdbcMaxCheckoutTime) {
this.jdbcMaxCheckoutTime = jdbcMaxCheckoutTime;
return this;
}
public int getJdbcMaxWaitTime() {
return jdbcMaxWaitTime;
}
public DmnEngineConfiguration setJdbcMaxWaitTime(int jdbcMaxWaitTime) {
this.jdbcMaxWaitTime = jdbcMaxWaitTime;
return this;
}
public boolean isJdbcPingEnabled() {
return jdbcPingEnabled;
}
public DmnEngineConfiguration setJdbcPingEnabled(boolean jdbcPingEnabled) {
this.jdbcPingEnabled = jdbcPingEnabled;
return this;
}
public int getJdbcPingConnectionNotUsedFor() {
return jdbcPingConnectionNotUsedFor;
}
public DmnEngineConfiguration setJdbcPingConnectionNotUsedFor(int jdbcPingConnectionNotUsedFor) {
this.jdbcPingConnectionNotUsedFor = jdbcPingConnectionNotUsedFor;
return this;
}
public int getJdbcDefaultTransactionIsolationLevel() {
return jdbcDefaultTransactionIsolationLevel;
}
public DmnEngineConfiguration setJdbcDefaultTransactionIsolationLevel(int jdbcDefaultTransactionIsolationLevel) {
this.jdbcDefaultTransactionIsolationLevel = jdbcDefaultTransactionIsolationLevel;
return this;
}
public String getJdbcPingQuery() {
return jdbcPingQuery;
}
public DmnEngineConfiguration setJdbcPingQuery(String jdbcPingQuery) {
this.jdbcPingQuery = jdbcPingQuery;
return this;
}
public String getDataSourceJndiName() {
return dataSourceJndiName;
}
public DmnEngineConfiguration setDataSourceJndiName(String dataSourceJndiName) {
this.dataSourceJndiName = dataSourceJndiName;
return this;
}
public String getXmlEncoding() {
return xmlEncoding;
}
public DmnEngineConfiguration setXmlEncoding(String xmlEncoding) {
this.xmlEncoding = xmlEncoding;
return this;
}
public BeanFactory getBeanFactory() {
return beanFactory;
}
public DmnEngineConfiguration setBeanFactory(BeanFactory beanFactory) {
this.beanFactory = beanFactory;
return this;
}
public CommandConfig getDefaultCommandConfig() {
return defaultCommandConfig;
}
public DmnEngineConfiguration setDefaultCommandConfig(CommandConfig defaultCommandConfig) {
this.defaultCommandConfig = defaultCommandConfig;
return this;
}
public CommandInterceptor getCommandInvoker() {
return commandInvoker;
}
public DmnEngineConfiguration setCommandInvoker(CommandInterceptor commandInvoker) {
this.commandInvoker = commandInvoker;
return this;
}
public List<CommandInterceptor> getCustomPreCommandInterceptors() {
return customPreCommandInterceptors;
}
public DmnEngineConfiguration setCustomPreCommandInterceptors(List<CommandInterceptor> customPreCommandInterceptors) {
this.customPreCommandInterceptors = customPreCommandInterceptors;
return this;
}
public List<CommandInterceptor> getCustomPostCommandInterceptors() {
return customPostCommandInterceptors;
}
public DmnEngineConfiguration setCustomPostCommandInterceptors(List<CommandInterceptor> customPostCommandInterceptors) {
this.customPostCommandInterceptors = customPostCommandInterceptors;
return this;
}
public List<CommandInterceptor> getCommandInterceptors() {
return commandInterceptors;
}
public DmnEngineConfiguration setCommandInterceptors(List<CommandInterceptor> commandInterceptors) {
this.commandInterceptors = commandInterceptors;
return this;
}
public CommandExecutor getCommandExecutor() {
return commandExecutor;
}
public DmnEngineConfiguration setCommandExecutor(CommandExecutor commandExecutor) {
this.commandExecutor = commandExecutor;
return this;
}
public DmnRepositoryService getDmnRepositoryService() {
return repositoryService;
}
public DmnRuleService getDmnRuleService() {
return ruleService;
}
public RuleEngineExecutor getRuleEngineExecutor() {
return ruleEngineExecutor;
}
public DeploymentManager getDeploymentManager() {
return deploymentManager;
}
public DmnEngineConfiguration getDmnEngineConfiguration() {
return this;
}
public DmnDeployer getDmnDeployer() {
return dmnDeployer;
}
public DmnEngineConfiguration setDmnDeployer(DmnDeployer dmnDeployer) {
this.dmnDeployer = dmnDeployer;
return this;
}
public DmnParseFactory getDmnParseFactory() {
return dmnParseFactory;
}
public DmnEngineConfiguration setDmnParseFactory(DmnParseFactory dmnParseFactory) {
this.dmnParseFactory = dmnParseFactory;
return this;
}
public int getDecisionCacheLimit() {
return decisionCacheLimit;
}
public DmnEngineConfiguration setDecisionCacheLimit(int decisionCacheLimit) {
this.decisionCacheLimit = decisionCacheLimit;
return this;
}
public DeploymentCache<DecisionTableCacheEntry> getDecisionCache() {
return decisionCache;
}
public DmnEngineConfiguration setDecisionCache(DeploymentCache<DecisionTableCacheEntry> decisionCache) {
this.decisionCache = decisionCache;
return this;
}
public DmnDeploymentDataManager getDeploymentDataManager() {
return deploymentDataManager;
}
public void setDeploymentDataManager(DmnDeploymentDataManager deploymentDataManager) {
this.deploymentDataManager = deploymentDataManager;
}
public DecisionTableDataManager getDecisionTableDataManager() {
return decisionTableDataManager;
}
public void setDecisionTableDataManager(DecisionTableDataManager decisionTableDataManager) {
this.decisionTableDataManager = decisionTableDataManager;
}
public ResourceDataManager getResourceDataManager() {
return resourceDataManager;
}
public void setResourceDataManager(ResourceDataManager resourceDataManager) {
this.resourceDataManager = resourceDataManager;
}
public DmnDeploymentEntityManager getDeploymentEntityManager() {
return deploymentEntityManager;
}
public void setDeploymentEntityManager(DmnDeploymentEntityManager deploymentEntityManager) {
this.deploymentEntityManager = deploymentEntityManager;
}
public DecisionTableEntityManager getDecisionTableEntityManager() {
return decisionTableEntityManager;
}
public void setDecisionTableEntityManager(DecisionTableEntityManager decisionTableEntityManager) {
this.decisionTableEntityManager = decisionTableEntityManager;
}
public ResourceEntityManager getResourceEntityManager() {
return resourceEntityManager;
}
public void setResourceEntityManager(ResourceEntityManager resourceEntityManager) {
this.resourceEntityManager = resourceEntityManager;
}
public CommandContextFactory getCommandContextFactory() {
return commandContextFactory;
}
public void setCommandContextFactory(CommandContextFactory commandContextFactory) {
this.commandContextFactory = commandContextFactory;
}
public SqlSessionFactory getSqlSessionFactory() {
return sqlSessionFactory;
}
public void setSqlSessionFactory(SqlSessionFactory sqlSessionFactory) {
this.sqlSessionFactory = sqlSessionFactory;
}
public TransactionFactory getTransactionFactory() {
return transactionFactory;
}
public void setTransactionFactory(TransactionFactory transactionFactory) {
this.transactionFactory = transactionFactory;
}
public Set<Class<?>> getCustomMybatisMappers() {
return customMybatisMappers;
}
public void setCustomMybatisMappers(Set<Class<?>> customMybatisMappers) {
this.customMybatisMappers = customMybatisMappers;
}
public Set<String> getCustomMybatisXMLMappers() {
return customMybatisXMLMappers;
}
public void setCustomMybatisXMLMappers(Set<String> customMybatisXMLMappers) {
this.customMybatisXMLMappers = customMybatisXMLMappers;
}
public List<SessionFactory> getCustomSessionFactories() {
return customSessionFactories;
}
public void setCustomSessionFactories(List<SessionFactory> customSessionFactories) {
this.customSessionFactories = customSessionFactories;
}
public DbSqlSessionFactory getDbSqlSessionFactory() {
return dbSqlSessionFactory;
}
public void setDbSqlSessionFactory(DbSqlSessionFactory dbSqlSessionFactory) {
this.dbSqlSessionFactory = dbSqlSessionFactory;
}
public boolean isUsingRelationalDatabase() {
return usingRelationalDatabase;
}
public void setUsingRelationalDatabase(boolean usingRelationalDatabase) {
this.usingRelationalDatabase = usingRelationalDatabase;
}
public String getDatabaseTablePrefix() {
return databaseTablePrefix;
}
public void setDatabaseTablePrefix(String databaseTablePrefix) {
this.databaseTablePrefix = databaseTablePrefix;
}
public String getDatabaseCatalog() {
return databaseCatalog;
}
public void setDatabaseCatalog(String databaseCatalog) {
this.databaseCatalog = databaseCatalog;
}
public String getDatabaseSchema() {
return databaseSchema;
}
public void setDatabaseSchema(String databaseSchema) {
this.databaseSchema = databaseSchema;
}
public boolean isTablePrefixIsSchema() {
return tablePrefixIsSchema;
}
public void setTablePrefixIsSchema(boolean tablePrefixIsSchema) {
this.tablePrefixIsSchema = tablePrefixIsSchema;
}
public Map<Class<?>, SessionFactory> getSessionFactories() {
return sessionFactories;
}
public DmnEngineConfiguration setSessionFactories(Map<Class<?>, SessionFactory> sessionFactories) {
this.sessionFactories = sessionFactories;
return this;
}
public TransactionContextFactory getTransactionContextFactory() {
return transactionContextFactory;
}
public DmnEngineConfiguration setTransactionContextFactory(TransactionContextFactory transactionContextFactory) {
this.transactionContextFactory = transactionContextFactory;
return this;
}
public boolean isEnableSafeDmnXml() {
return enableSafeDmnXml;
}
public DmnEngineConfiguration setEnableSafeDmnXml(boolean enableSafeDmnXml) {
this.enableSafeDmnXml = enableSafeDmnXml;
return this;
}
public Clock getClock() {
return clock;
}
public DmnEngineConfiguration setClock(Clock clock) {
this.clock = clock;
return this;
}
public CustomExpressionFunctionRegistry getCustomExpressionFunctionRegistry() {
return customExpressionFunctionRegistry;
}
public void setCustomExpressionFunctionRegistry(CustomExpressionFunctionRegistry customExpressionFunctionRegistry) {
this.customExpressionFunctionRegistry = customExpressionFunctionRegistry;
}
public CustomExpressionFunctionRegistry getPostCustomExpressionFunctionRegistry() {
return postCustomExpressionFunctionRegistry;
}
public void setPostCustomExpressionFunctionRegistry(CustomExpressionFunctionRegistry postCustomExpressionFunctionRegistry) {
this.postCustomExpressionFunctionRegistry = postCustomExpressionFunctionRegistry;
}
public Map<String, Method> getCustomExpressionFunctions() {
return customExpressionFunctions;
}
public void setCustomExpressionFunctions(Map<String, Method> customExpressionFunctions) {
this.customExpressionFunctions = customExpressionFunctions;
}
public Map<Class<?>, PropertyHandler> getCustomPropertyHandlers() {
return customPropertyHandlers;
}
public void setCustomPropertyHandlers(Map<Class<?>, PropertyHandler> customPropertyHandlers) {
this.customPropertyHandlers = customPropertyHandlers;
}
public DmnEngineConfiguration setDatabaseSchemaUpdate(String databaseSchemaUpdate) {
this.databaseSchemaUpdate = databaseSchemaUpdate;
return this;
}
}
| |
/*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.drools.workbench.models.guided.dtable.shared.model;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.drools.workbench.models.datamodel.rule.ActionInsertFact;
import org.drools.workbench.models.datamodel.rule.BaseSingleFieldConstraint;
import org.drools.workbench.models.datamodel.rule.FactPattern;
import org.drools.workbench.models.datamodel.rule.IAction;
import org.drools.workbench.models.datamodel.rule.IPattern;
import org.drools.workbench.models.datamodel.rule.RuleModel;
import org.drools.workbench.models.datamodel.rule.SingleFieldConstraint;
import org.drools.workbench.models.guided.dtable.shared.model.adaptors.ActionInsertFactCol52ActionInsertFactAdaptor;
import org.drools.workbench.models.guided.dtable.shared.model.adaptors.ActionInsertFactCol52ActionInsertLogicalFactAdaptor;
import org.drools.workbench.models.guided.dtable.shared.model.adaptors.ConditionCol52FieldConstraintAdaptor;
import org.drools.workbench.models.guided.dtable.shared.model.adaptors.Pattern52FactPatternAdaptor;
import org.uberfire.commons.validation.PortablePreconditions;
/**
* A RuleModel that can provide details of bound Facts and Fields from an
* associated Decision Table. This allows columns using BRL fragments to
* integrate with Decision Table columns
*/
public class BRLRuleModel extends RuleModel {
private static final long serialVersionUID = 540l;
private GuidedDecisionTable52 dtable;
public BRLRuleModel() {
}
public BRLRuleModel(final GuidedDecisionTable52 dtable) {
PortablePreconditions.checkNotNull("dtable",
dtable);
this.dtable = dtable;
}
@Override
public List<String> getLHSBoundFacts() {
final Set<String> facts = new HashSet<>();
for (CompositeColumn<? extends BaseColumn> col : dtable.getConditions()) {
if (col instanceof Pattern52) {
final Pattern52 p = (Pattern52) col;
if (p.isBound()) {
facts.add(p.getBoundName());
}
} else if (col instanceof BRLConditionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLConditionColumn brl = (BRLConditionColumn) col;
rm.lhs = brl.getDefinition().toArray(new IPattern[brl.getDefinition().size()]);
facts.addAll(rm.getLHSBoundFacts());
}
}
facts.addAll(super.getLHSBoundFacts());
return new ArrayList<>(facts);
}
@Override
public FactPattern getLHSBoundFact(final String var) {
for (CompositeColumn<? extends BaseColumn> col : dtable.getConditions()) {
if (col instanceof Pattern52) {
final Pattern52 p = (Pattern52) col;
if (p.isBound() && p.getBoundName().equals(var)) {
return new Pattern52FactPatternAdaptor(p);
}
} else if (col instanceof BRLConditionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLConditionColumn brl = (BRLConditionColumn) col;
rm.lhs = brl.getDefinition().toArray(new IPattern[brl.getDefinition().size()]);
final FactPattern fp = rm.getLHSBoundFact(var);
if (fp != null) {
return fp;
}
}
}
return super.getLHSBoundFact(var);
}
@Override
public SingleFieldConstraint getLHSBoundField(final String var) {
for (CompositeColumn<? extends BaseColumn> col : dtable.getConditions()) {
if (col instanceof Pattern52) {
final Pattern52 p = (Pattern52) col;
for (ConditionCol52 cc : p.getChildColumns()) {
if (cc.isBound() && cc.getBinding().equals(var)) {
final ConditionCol52FieldConstraintAdaptor sfcAdaptor = new ConditionCol52FieldConstraintAdaptor(cc);
sfcAdaptor.setFactType(p.getFactType());
return sfcAdaptor;
}
}
} else if (col instanceof BRLConditionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLConditionColumn brl = (BRLConditionColumn) col;
rm.lhs = brl.getDefinition().toArray(new IPattern[brl.getDefinition().size()]);
final SingleFieldConstraint sfc = rm.getLHSBoundField(var);
if (sfc != null) {
return sfc;
}
}
}
return super.getLHSBoundField(var);
}
@Override
public String getLHSBindingType(final String var) {
for (CompositeColumn<? extends BaseColumn> col : dtable.getConditions()) {
if (col instanceof Pattern52) {
final Pattern52 p = (Pattern52) col;
if (p.isBound() && p.getBoundName().equals(var)) {
return p.getFactType();
}
for (ConditionCol52 cc : p.getChildColumns()) {
if (cc.isBound() && cc.getBinding().equals(var)) {
return cc.getFieldType();
}
}
} else if (col instanceof BRLConditionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLConditionColumn brl = (BRLConditionColumn) col;
rm.lhs = brl.getDefinition().toArray(new IPattern[brl.getDefinition().size()]);
final String type = rm.getLHSBindingType(var);
if (type != null) {
return type;
}
}
}
return super.getLHSBindingType(var);
}
@Override
public FactPattern getLHSParentFactPatternForBinding(final String var) {
for (CompositeColumn<? extends BaseColumn> col : dtable.getConditions()) {
if (col instanceof Pattern52) {
final Pattern52 p = (Pattern52) col;
if (p.isBound() && p.getBoundName().equals(var)) {
return new Pattern52FactPatternAdaptor(p);
}
for (ConditionCol52 cc : p.getChildColumns()) {
if (cc.isBound() && cc.getBinding().equals(var)) {
return new Pattern52FactPatternAdaptor(p);
}
}
} else if (col instanceof BRLConditionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLConditionColumn brl = (BRLConditionColumn) col;
rm.lhs = brl.getDefinition().toArray(new IPattern[brl.getDefinition().size()]);
final FactPattern fp = rm.getLHSParentFactPatternForBinding(var);
if (fp != null) {
return fp;
}
}
}
return super.getLHSParentFactPatternForBinding(var);
}
@Override
public List<String> getAllVariables() {
final Set<String> variables = new HashSet<>();
variables.addAll(getAllLHSVariables());
variables.addAll(getAllRHSVariables());
return new ArrayList<>(variables);
}
@Override
public List<String> getAllLHSVariables() {
return getLHSVariables(true,
true);
}
@Override
public List<String> getLHSPatternVariables() {
return getLHSVariables(true,
false);
}
@Override
public List<String> getLHSVariables(final boolean includePatterns,
final boolean includeFields) {
final Set<String> variables = new HashSet<>();
for (CompositeColumn<? extends BaseColumn> col : dtable.getConditions()) {
if (col instanceof Pattern52) {
final Pattern52 p = (Pattern52) col;
if (p.isBound()) {
if (includePatterns) {
variables.add(p.getBoundName());
}
}
for (ConditionCol52 cc : p.getChildColumns()) {
if (cc.isBound()) {
if (includeFields) {
variables.add(cc.getBinding());
}
}
}
} else if (col instanceof BRLConditionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLConditionColumn brl = (BRLConditionColumn) col;
rm.lhs = brl.getDefinition().toArray(new IPattern[brl.getDefinition().size()]);
variables.addAll(rm.getLHSVariables(includePatterns,
includeFields));
}
}
variables.addAll(super.getLHSVariables(includePatterns,
includeFields));
return new ArrayList<>(variables);
}
@Override
public List<String> getAllRHSVariables() {
final Set<String> variables = new HashSet<>();
for (ActionCol52 col : dtable.getActionCols()) {
if (col instanceof ActionInsertFactCol52) {
final ActionInsertFactCol52 action = (ActionInsertFactCol52) col;
variables.add(action.getBoundName());
} else if (col instanceof BRLActionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLActionColumn brl = (BRLActionColumn) col;
rm.rhs = brl.getDefinition().toArray(new IAction[brl.getDefinition().size()]);
variables.addAll(rm.getAllRHSVariables());
}
}
variables.addAll(super.getAllRHSVariables());
return new ArrayList<>(variables);
}
@Override
public boolean isBoundFactUsed(final String binding) {
for (ActionCol52 col : dtable.getActionCols()) {
if (col instanceof ActionInsertFactCol52) {
final ActionInsertFactCol52 action = (ActionInsertFactCol52) col;
if (action.getBoundName().equals(binding)) {
return true;
}
} else if (col instanceof ActionSetFieldCol52) {
final ActionSetFieldCol52 action = (ActionSetFieldCol52) col;
if (action.getBoundName().equals(binding)) {
return true;
}
} else if (col instanceof ActionRetractFactCol52) {
if (col instanceof LimitedEntryActionRetractFactCol52) {
//Check whether Limited Entry retraction is bound to Pattern
final LimitedEntryActionRetractFactCol52 ler = (LimitedEntryActionRetractFactCol52) col;
if (ler.getValue().getStringValue().equals(binding)) {
return true;
}
} else {
//Check whether data for column contains Pattern binding
final int colIndex = dtable.getExpandedColumns().indexOf(col);
for (List<DTCellValue52> row : dtable.getData()) {
DTCellValue52 cell = row.get(colIndex);
if (cell != null && cell.getStringValue().equals(binding)) {
return true;
}
}
}
} else if (col instanceof BRLActionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLActionColumn brl = (BRLActionColumn) col;
rm.rhs = brl.getDefinition().toArray(new IAction[brl.getDefinition().size()]);
if (rm.isBoundFactUsed(binding)) {
return true;
}
}
}
return super.isBoundFactUsed(binding);
}
@Override
public List<String> getBoundVariablesInScope(final BaseSingleFieldConstraint con) {
final Set<String> variables = new HashSet<>();
for (CompositeColumn<? extends BaseColumn> col : dtable.getConditions()) {
if (col instanceof Pattern52) {
final Pattern52 p = (Pattern52) col;
if (p.isBound()) {
variables.add(p.getBoundName());
}
for (ConditionCol52 cc : p.getChildColumns()) {
if (cc.isBound()) {
variables.add(cc.getBinding());
}
}
} else if (col instanceof BRLConditionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLConditionColumn brl = (BRLConditionColumn) col;
rm.lhs = brl.getDefinition().toArray(new IPattern[brl.getDefinition().size()]);
variables.addAll(rm.getBoundVariablesInScope(con));
}
}
variables.addAll(super.getBoundVariablesInScope(con));
return new ArrayList<>(variables);
}
@Override
public boolean isVariableNameUsed(String s) {
return super.isVariableNameUsed(s);
}
@Override
public List<String> getRHSBoundFacts() {
final Set<String> variables = new HashSet<>();
for (ActionCol52 col : dtable.getActionCols()) {
if (col instanceof ActionInsertFactCol52) {
final ActionInsertFactCol52 action = (ActionInsertFactCol52) col;
variables.add(action.getBoundName());
} else if (col instanceof BRLActionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLActionColumn brl = (BRLActionColumn) col;
rm.rhs = brl.getDefinition().toArray(new IAction[brl.getDefinition().size()]);
variables.addAll(rm.getRHSBoundFacts());
}
}
variables.addAll(super.getRHSBoundFacts());
return new ArrayList<>(variables);
}
@Override
public ActionInsertFact getRHSBoundFact(final String var) {
for (ActionCol52 col : dtable.getActionCols()) {
if (col instanceof ActionInsertFactCol52) {
final ActionInsertFactCol52 action = (ActionInsertFactCol52) col;
if (action.getBoundName().equals(var)) {
if (action.isInsertLogical()) {
return new ActionInsertFactCol52ActionInsertLogicalFactAdaptor(action);
}
return new ActionInsertFactCol52ActionInsertFactAdaptor(action);
}
} else if (col instanceof BRLActionColumn) {
//Delegate to super class's implementation
final RuleModel rm = new RuleModel();
final BRLActionColumn brl = (BRLActionColumn) col;
rm.rhs = brl.getDefinition().toArray(new IAction[brl.getDefinition().size()]);
final ActionInsertFact aif = rm.getRHSBoundFact(var);
if (aif != null) {
return aif;
}
}
}
return super.getRHSBoundFact(var);
}
}
| |
package com.eidosmedia.eclipse.maven.resources.remote;
import java.io.File;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.Scanner;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.m2e.core.MavenPlugin;
import org.eclipse.m2e.core.embedder.ArtifactKey;
import org.eclipse.m2e.core.embedder.IMaven;
import org.eclipse.m2e.core.project.IMavenProjectFacade;
import org.eclipse.m2e.core.project.IMavenProjectRegistry;
import org.eclipse.m2e.core.project.configurator.MojoExecutionBuildParticipant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sonatype.plexus.build.incremental.BuildContext;
/**
*
* @author Maurizio Merli
*
*/
public class BuildParticipant extends MojoExecutionBuildParticipant {
private static final Logger log = LoggerFactory.getLogger(BuildParticipant.class);
public BuildParticipant(MojoExecution execution) {
super(execution, true, true);
}
@Override
public Set<IProject> build(int kind, IProgressMonitor monitor) throws Exception {
final MojoExecution mojoExecution = getMojoExecution();
log.debug("execution: {}", mojoExecution);
if (mojoExecution == null) {
return null;
}
final String phase = mojoExecution.getLifecyclePhase();
log.debug("phase: {}", phase);
final String goal = mojoExecution.getGoal();
log.debug("goal: {}", goal);
if ("bundle".equalsIgnoreCase(goal)) {
return buildBundle(kind, monitor);
} else if ("process".equalsIgnoreCase(goal)) {
return buildProcess(kind, monitor);
} else {
return super.build(kind, monitor);
}
}
/**
*
* @param kind
* @param monitor
* @return
* @throws Exception
*/
private Set<IProject> buildBundle(int kind, IProgressMonitor monitor) throws Exception {
log.info("process \"bundle\" goal");
final IMaven maven = MavenPlugin.getMaven();
final IMavenProjectFacade currentProject = getMavenProjectFacade();
final MavenProject mavenProject = currentProject.getMavenProject();
final BuildContext buildContext = getBuildContext();
final IMavenProjectRegistry projectRegistry = MavenPlugin.getMavenProjectRegistry();
ArtifactKey artifactKey = currentProject.getArtifactKey();
String shortArtifactKey = artifactKey.getGroupId() + ":" + artifactKey.getArtifactId() + ":" + artifactKey.getVersion();
log.debug("artifact key: {}", shortArtifactKey);
File basedir = mavenProject.getBasedir();
File sourcesDirectory = new File(basedir, "src");
File resourcesDirectory = maven.getMojoParameterValue(getSession(), getMojoExecution(), "resourcesDirectory", File.class);
File outputDirectory = maven.getMojoParameterValue(getSession(), getMojoExecution(), "outputDirectory", File.class);
File remoteResourcesDescriptor = new File(outputDirectory, "META-INF/maven/remote-resources.xml");
String preprocessedFiles = null; // (String) buildContext.getValue("preprocessedFiles");
if (remoteResourcesDescriptor.exists()) {
if ((INCREMENTAL_BUILD == kind || AUTO_BUILD == kind) && preprocessedFiles == null) {
log.debug("scan resources {}", resourcesDirectory);
Scanner ds = buildContext.newScanner(resourcesDirectory);
ds.scan();
String[] files = ds.getIncludedFiles();
if (files == null || files.length <= 0) {
log.debug("build check: no resource changes");
log.debug("scan deleted resources {}", resourcesDirectory);
ds = buildContext.newDeleteScanner(resourcesDirectory);
ds.scan();
files = ds.getIncludedFiles();
if (files == null || files.length <= 0) {
return null;
} else {
log.debug("build check: resources deleted");
}
} else {
log.debug("build check: resources changed");
}
} else {
log.debug("build check: full build");
}
} else {
log.debug("build check: remote resources descriptor does not exists");
}
final Set<IProject> result = super.build(kind, monitor);
if (outputDirectory != null && outputDirectory.exists()) {
log.debug("refresh output directory: {}", outputDirectory);
buildContext.refresh(outputDirectory);
}
return result;
}
/**
*
* @param kind
* @param monitor
* @return
* @throws Exception
*/
private Set<IProject> buildProcess(int kind, IProgressMonitor monitor) throws Exception {
log.info("process \"process\" goal");
final IMaven maven = MavenPlugin.getMaven();
final IMavenProjectFacade currentProject = getMavenProjectFacade();
final BuildContext buildContext = getBuildContext();
final IMavenProjectRegistry projectRegistry = MavenPlugin.getMavenProjectRegistry();
final IWorkspace workspace = ResourcesPlugin.getWorkspace();
List<String> bundles = maven.getMojoParameterValue(getSession(), getMojoExecution(), "resourceBundles", List.class);
Set<String> bundleSet = new HashSet<String>(bundles.size());
for (String bundle : bundles) {
log.debug("remote bundle: {}", bundle);
bundleSet.add(bundle);
}
File outputDirectory = maven.getMojoParameterValue(getSession(), getMojoExecution(), "outputDirectory", File.class);
long lastModified = (outputDirectory.lastModified() / 1000) * 1000; // remove millis part
Set<IProject> dependencyProjects = new HashSet<IProject>();
boolean skip = true;
IMavenProjectFacade[] mavenProjects = projectRegistry.getProjects();
for (IMavenProjectFacade mavenProject : mavenProjects) {
if (mavenProject.equals(currentProject)) {
continue;
}
ArtifactKey artifactKey = mavenProject.getArtifactKey();
String shortArtifactKey = artifactKey.getGroupId() + ":" + artifactKey.getArtifactId() + ":" + artifactKey.getVersion();
if (!bundleSet.contains(shortArtifactKey)) {
log.debug("skip workspace bundle: {}", shortArtifactKey);
continue;
}
log.debug("check workspace bundle: {}", shortArtifactKey);
IProject dependencyProject = mavenProject.getProject();
dependencyProjects.add(dependencyProject);
if (skip) {
// TODO visits only exported resources
IPath path = mavenProject.getOutputLocation();
IFolder outputLocation = workspace.getRoot().getFolder(path);
CheckLastModifiedVisitor visitor = new CheckLastModifiedVisitor(lastModified);
outputLocation.accept(visitor, IContainer.INCLUDE_PHANTOMS);
if (visitor.getResult()) {
skip = false;
}
}
}
if (buildContext.isIncremental() && skip) {
log.debug("check: no remote resources to process");
return dependencyProjects;
}
boolean cleanDestinationFolder = false;
IPreferenceStore preferenceStore = Activator.getDefault().getPreferenceStore();
if (preferenceStore != null && preferenceStore.getBoolean(PreferenceConstants.P_CLEAN_DESTINATION_FOLDER)) {
log.debug("cleanDestinationFolder option is active");
final MavenProject mavenProject = currentProject.getMavenProject();
String buildDirectoryPath = mavenProject.getBuild().getDirectory() + File.separatorChar;
String outputDirectoryPath = outputDirectory.getCanonicalPath();
if (outputDirectoryPath.startsWith(buildDirectoryPath)) {
boolean enableClean = false;
Xpp3Dom conf = getMojoExecution().getConfiguration();
if (conf != null) {
conf = conf.getChild("properties");
if (conf != null) {
conf = conf.getChild("cleanOutputDirectory");
if (conf != null && "true".equalsIgnoreCase(conf.getValue())) {
enableClean = true;
}
}
}
if (enableClean) {
cleanDestinationFolder = true;
} else {
log.warn("cleanOutputDirectory not enable");
}
} else {
log.warn("output directory path is not under 'target' folder, clean ignored");
}
}
if (cleanDestinationFolder) {
log.debug("clean destination folder");
delete(outputDirectory, false);
}
log.debug("do mojo...");
Set<IProject> result = super.build(kind, monitor);
if (result == null) {
result = dependencyProjects;
} else {
result.addAll(dependencyProjects);
}
log.debug("update destination folder timestamp");
outputDirectory.setLastModified(System.currentTimeMillis()); // touch output folder
log.debug("output resources: {}", outputDirectory);
if (outputDirectory != null) {
buildContext.refresh(outputDirectory);
}
return result;
}
/**
*
* @param file
*/
private void delete(File file, boolean deleteParent) {
File[] children = file.listFiles();
if (children != null) {
for (File child : children) {
delete(child, true);
}
}
if (deleteParent) {
file.delete();
}
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source;
import android.os.Handler;
import android.os.SystemClock;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.util.Assertions;
import java.io.IOException;
/**
* Interface for callbacks to be notified of adaptive {@link MediaSource} events.
*/
public interface AdaptiveMediaSourceEventListener {
/**
* Called when a load begins.
*
* @param dataSpec Defines the data being loaded.
* @param dataType One of the {@link C} {@code DATA_TYPE_*} constants defining the type of data
* being loaded.
* @param trackType One of the {@link C} {@code TRACK_TYPE_*} constants if the data corresponds
* to media of a specific type. {@link C#TRACK_TYPE_UNKNOWN} otherwise.
* @param trackFormat The format of the track to which the data belongs. Null if the data does
* not belong to a track.
* @param trackSelectionReason One of the {@link C} {@code SELECTION_REASON_*} constants if the
* data belongs to a track. {@link C#SELECTION_REASON_UNKNOWN} otherwise.
* @param trackSelectionData Optional data associated with the selection of the track to which the
* data belongs. Null if the data does not belong to a track.
* @param mediaStartTimeMs The start time of the media being loaded, or {@link C#TIME_UNSET} if
* the load is not for media data.
* @param mediaEndTimeMs The end time of the media being loaded, or {@link C#TIME_UNSET} if the
* load is not for media data.
* @param elapsedRealtimeMs The value of {@link SystemClock#elapsedRealtime} when the load began.
*/
void onLoadStarted(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs);
/**
* Called when a load ends.
*
* @param dataSpec Defines the data being loaded.
* @param dataType One of the {@link C} {@code DATA_TYPE_*} constants defining the type of data
* being loaded.
* @param trackType One of the {@link C} {@code TRACK_TYPE_*} constants if the data corresponds
* to media of a specific type. {@link C#TRACK_TYPE_UNKNOWN} otherwise.
* @param trackFormat The format of the track to which the data belongs. Null if the data does
* not belong to a track.
* @param trackSelectionReason One of the {@link C} {@code SELECTION_REASON_*} constants if the
* data belongs to a track. {@link C#SELECTION_REASON_UNKNOWN} otherwise.
* @param trackSelectionData Optional data associated with the selection of the track to which the
* data belongs. Null if the data does not belong to a track.
* @param mediaStartTimeMs The start time of the media being loaded, or {@link C#TIME_UNSET} if
* the load is not for media data.
* @param mediaEndTimeMs The end time of the media being loaded, or {@link C#TIME_UNSET} if the
* load is not for media data.
* @param elapsedRealtimeMs The value of {@link SystemClock#elapsedRealtime} when the load ended.
* @param loadDurationMs The duration of the load.
* @param bytesLoaded The number of bytes that were loaded.
*/
void onLoadCompleted(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded);
/**
* Called when a load is canceled.
*
* @param dataSpec Defines the data being loaded.
* @param dataType One of the {@link C} {@code DATA_TYPE_*} constants defining the type of data
* being loaded.
* @param trackType One of the {@link C} {@code TRACK_TYPE_*} constants if the data corresponds
* to media of a specific type. {@link C#TRACK_TYPE_UNKNOWN} otherwise.
* @param trackFormat The format of the track to which the data belongs. Null if the data does
* not belong to a track.
* @param trackSelectionReason One of the {@link C} {@code SELECTION_REASON_*} constants if the
* data belongs to a track. {@link C#SELECTION_REASON_UNKNOWN} otherwise.
* @param trackSelectionData Optional data associated with the selection of the track to which the
* data belongs. Null if the data does not belong to a track.
* @param mediaStartTimeMs The start time of the media being loaded, or {@link C#TIME_UNSET} if
* the load is not for media data.
* @param mediaEndTimeMs The end time of the media being loaded, or {@link C#TIME_UNSET} if the
* load is not for media data.
* @param elapsedRealtimeMs The value of {@link SystemClock#elapsedRealtime} when the load was
* canceled.
* @param loadDurationMs The duration of the load up to the point at which it was canceled.
* @param bytesLoaded The number of bytes that were loaded prior to cancelation.
*/
void onLoadCanceled(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded);
/**
* Called when a load error occurs.
* <p>
* The error may or may not have resulted in the load being canceled, as indicated by the
* {@code wasCanceled} parameter. If the load was canceled, {@link #onLoadCanceled} will
* <em>not</em> be called in addition to this method.
*
* @param dataSpec Defines the data being loaded.
* @param dataType One of the {@link C} {@code DATA_TYPE_*} constants defining the type of data
* being loaded.
* @param trackType One of the {@link C} {@code TRACK_TYPE_*} constants if the data corresponds
* to media of a specific type. {@link C#TRACK_TYPE_UNKNOWN} otherwise.
* @param trackFormat The format of the track to which the data belongs. Null if the data does
* not belong to a track.
* @param trackSelectionReason One of the {@link C} {@code SELECTION_REASON_*} constants if the
* data belongs to a track. {@link C#SELECTION_REASON_UNKNOWN} otherwise.
* @param trackSelectionData Optional data associated with the selection of the track to which the
* data belongs. Null if the data does not belong to a track.
* @param mediaStartTimeMs The start time of the media being loaded, or {@link C#TIME_UNSET} if
* the load is not for media data.
* @param mediaEndTimeMs The end time of the media being loaded, or {@link C#TIME_UNSET} if the
* load is not for media data.
* @param elapsedRealtimeMs The value of {@link SystemClock#elapsedRealtime} when the error
* occurred.
* @param loadDurationMs The duration of the load up to the point at which the error occurred.
* @param bytesLoaded The number of bytes that were loaded prior to the error.
* @param error The load error.
* @param wasCanceled Whether the load was canceled as a result of the error.
*/
void onLoadError(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded,
IOException error, boolean wasCanceled);
/**
* Called when data is removed from the back of a media buffer, typically so that it can be
* re-buffered in a different format.
*
* @param trackType The type of the media. One of the {@link C} {@code TRACK_TYPE_*} constants.
* @param mediaStartTimeMs The start time of the media being discarded.
* @param mediaEndTimeMs The end time of the media being discarded.
*/
void onUpstreamDiscarded(int trackType, long mediaStartTimeMs, long mediaEndTimeMs);
/**
* Called when a downstream format change occurs (i.e. when the format of the media being read
* from one or more {@link SampleStream}s provided by the source changes).
*
* @param trackType The type of the media. One of the {@link C} {@code TRACK_TYPE_*} constants.
* @param trackFormat The format of the track to which the data belongs. Null if the data does
* not belong to a track.
* @param trackSelectionReason One of the {@link C} {@code SELECTION_REASON_*} constants if the
* data belongs to a track. {@link C#SELECTION_REASON_UNKNOWN} otherwise.
* @param trackSelectionData Optional data associated with the selection of the track to which the
* data belongs. Null if the data does not belong to a track.
* @param mediaTimeMs The media time at which the change occurred.
*/
void onDownstreamFormatChanged(int trackType, Format trackFormat, int trackSelectionReason,
Object trackSelectionData, long mediaTimeMs);
/**
* Dispatches events to a {@link AdaptiveMediaSourceEventListener}.
*/
final class EventDispatcher {
private final Handler handler;
private final AdaptiveMediaSourceEventListener listener;
private final long mediaTimeOffsetMs;
public EventDispatcher(Handler handler, AdaptiveMediaSourceEventListener listener) {
this(handler, listener, 0);
}
public EventDispatcher(Handler handler, AdaptiveMediaSourceEventListener listener,
long mediaTimeOffsetMs) {
this.handler = listener != null ? Assertions.checkNotNull(handler) : null;
this.listener = listener;
this.mediaTimeOffsetMs = mediaTimeOffsetMs;
}
public EventDispatcher copyWithMediaTimeOffsetMs(long mediaTimeOffsetMs) {
return new EventDispatcher(handler, listener, mediaTimeOffsetMs);
}
public void loadStarted(DataSpec dataSpec, int dataType, long elapsedRealtimeMs) {
loadStarted(dataSpec, dataType, C.TRACK_TYPE_UNKNOWN, null, C.SELECTION_REASON_UNKNOWN,
null, C.TIME_UNSET, C.TIME_UNSET, elapsedRealtimeMs);
}
public void loadStarted(final DataSpec dataSpec, final int dataType, final int trackType,
final Format trackFormat, final int trackSelectionReason, final Object trackSelectionData,
final long mediaStartTimeUs, final long mediaEndTimeUs, final long elapsedRealtimeMs) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onLoadStarted(dataSpec, dataType, trackType, trackFormat, trackSelectionReason,
trackSelectionData, adjustMediaTime(mediaStartTimeUs),
adjustMediaTime(mediaEndTimeUs), elapsedRealtimeMs);
}
});
}
}
public void loadCompleted(DataSpec dataSpec, int dataType, long elapsedRealtimeMs,
long loadDurationMs, long bytesLoaded) {
loadCompleted(dataSpec, dataType, C.TRACK_TYPE_UNKNOWN, null, C.SELECTION_REASON_UNKNOWN,
null, C.TIME_UNSET, C.TIME_UNSET, elapsedRealtimeMs, loadDurationMs, bytesLoaded);
}
public void loadCompleted(final DataSpec dataSpec, final int dataType, final int trackType,
final Format trackFormat, final int trackSelectionReason, final Object trackSelectionData,
final long mediaStartTimeUs, final long mediaEndTimeUs, final long elapsedRealtimeMs,
final long loadDurationMs, final long bytesLoaded) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onLoadCompleted(dataSpec, dataType, trackType, trackFormat,
trackSelectionReason, trackSelectionData, adjustMediaTime(mediaStartTimeUs),
adjustMediaTime(mediaEndTimeUs), elapsedRealtimeMs, loadDurationMs, bytesLoaded);
}
});
}
}
public void loadCanceled(DataSpec dataSpec, int dataType, long elapsedRealtimeMs,
long loadDurationMs, long bytesLoaded) {
loadCanceled(dataSpec, dataType, C.TRACK_TYPE_UNKNOWN, null, C.SELECTION_REASON_UNKNOWN,
null, C.TIME_UNSET, C.TIME_UNSET, elapsedRealtimeMs, loadDurationMs, bytesLoaded);
}
public void loadCanceled(final DataSpec dataSpec, final int dataType, final int trackType,
final Format trackFormat, final int trackSelectionReason, final Object trackSelectionData,
final long mediaStartTimeUs, final long mediaEndTimeUs, final long elapsedRealtimeMs,
final long loadDurationMs, final long bytesLoaded) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onLoadCanceled(dataSpec, dataType, trackType, trackFormat,
trackSelectionReason, trackSelectionData, adjustMediaTime(mediaStartTimeUs),
adjustMediaTime(mediaEndTimeUs), elapsedRealtimeMs, loadDurationMs, bytesLoaded);
}
});
}
}
public void loadError(DataSpec dataSpec, int dataType, long elapsedRealtimeMs,
long loadDurationMs, long bytesLoaded, IOException error, boolean wasCanceled) {
loadError(dataSpec, dataType, C.TRACK_TYPE_UNKNOWN, null, C.SELECTION_REASON_UNKNOWN,
null, C.TIME_UNSET, C.TIME_UNSET, elapsedRealtimeMs, loadDurationMs, bytesLoaded,
error, wasCanceled);
}
public void loadError(final DataSpec dataSpec, final int dataType, final int trackType,
final Format trackFormat, final int trackSelectionReason, final Object trackSelectionData,
final long mediaStartTimeUs, final long mediaEndTimeUs, final long elapsedRealtimeMs,
final long loadDurationMs, final long bytesLoaded, final IOException error,
final boolean wasCanceled) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onLoadError(dataSpec, dataType, trackType, trackFormat, trackSelectionReason,
trackSelectionData, adjustMediaTime(mediaStartTimeUs),
adjustMediaTime(mediaEndTimeUs), elapsedRealtimeMs, loadDurationMs, bytesLoaded,
error, wasCanceled);
}
});
}
}
public void upstreamDiscarded(final int trackType, final long mediaStartTimeUs,
final long mediaEndTimeUs) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onUpstreamDiscarded(trackType, adjustMediaTime(mediaStartTimeUs),
adjustMediaTime(mediaEndTimeUs));
}
});
}
}
public void downstreamFormatChanged(final int trackType, final Format trackFormat,
final int trackSelectionReason, final Object trackSelectionData,
final long mediaTimeUs) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onDownstreamFormatChanged(trackType, trackFormat, trackSelectionReason,
trackSelectionData, adjustMediaTime(mediaTimeUs));
}
});
}
}
private long adjustMediaTime(long mediaTimeUs) {
long mediaTimeMs = C.usToMs(mediaTimeUs);
return mediaTimeMs == C.TIME_UNSET ? C.TIME_UNSET : mediaTimeOffsetMs + mediaTimeMs;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.runtime;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.consumer.OffsetCommitCallback;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.WakeupException;
import org.apache.kafka.common.record.RecordBatch;
import org.apache.kafka.common.record.TimestampType;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaAndValue;
import org.apache.kafka.connect.errors.RetriableException;
import org.apache.kafka.connect.runtime.isolation.PluginClassLoader;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.sink.SinkConnector;
import org.apache.kafka.connect.sink.SinkRecord;
import org.apache.kafka.connect.sink.SinkTask;
import org.apache.kafka.connect.storage.Converter;
import org.apache.kafka.connect.util.ConnectorTaskId;
import org.apache.kafka.connect.util.MockTime;
import org.easymock.Capture;
import org.easymock.CaptureType;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.easymock.PowerMock;
import org.powermock.api.easymock.annotation.Mock;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(PowerMockRunner.class)
@PrepareForTest(WorkerSinkTask.class)
@PowerMockIgnore("javax.management.*")
public class WorkerSinkTaskTest {
// These are fixed to keep this code simpler. In this example we assume byte[] raw values
// with mix of integer/string in Connect
private static final String TOPIC = "test";
private static final int PARTITION = 12;
private static final int PARTITION2 = 13;
private static final int PARTITION3 = 14;
private static final long FIRST_OFFSET = 45;
private static final Schema KEY_SCHEMA = Schema.INT32_SCHEMA;
private static final int KEY = 12;
private static final Schema VALUE_SCHEMA = Schema.STRING_SCHEMA;
private static final String VALUE = "VALUE";
private static final byte[] RAW_KEY = "key".getBytes();
private static final byte[] RAW_VALUE = "value".getBytes();
private static final TopicPartition TOPIC_PARTITION = new TopicPartition(TOPIC, PARTITION);
private static final TopicPartition TOPIC_PARTITION2 = new TopicPartition(TOPIC, PARTITION2);
private static final TopicPartition TOPIC_PARTITION3 = new TopicPartition(TOPIC, PARTITION3);
private static final Map<String, String> TASK_PROPS = new HashMap<>();
static {
TASK_PROPS.put(SinkConnector.TOPICS_CONFIG, TOPIC);
TASK_PROPS.put(TaskConfig.TASK_CLASS_CONFIG, TestSinkTask.class.getName());
}
private static final TaskConfig TASK_CONFIG = new TaskConfig(TASK_PROPS);
private ConnectorTaskId taskId = new ConnectorTaskId("job", 0);
private TargetState initialState = TargetState.STARTED;
private Time time;
private WorkerSinkTask workerTask;
@Mock
private SinkTask sinkTask;
private Capture<WorkerSinkTaskContext> sinkTaskContext = EasyMock.newCapture();
private WorkerConfig workerConfig;
@Mock
private PluginClassLoader pluginLoader;
@Mock
private Converter keyConverter;
@Mock
private Converter valueConverter;
@Mock
private TransformationChain<SinkRecord> transformationChain;
@Mock
private TaskStatus.Listener statusListener;
@Mock
private KafkaConsumer<byte[], byte[]> consumer;
private Capture<ConsumerRebalanceListener> rebalanceListener = EasyMock.newCapture();
private long recordsReturnedTp1;
private long recordsReturnedTp3;
@Before
public void setUp() {
time = new MockTime();
Map<String, String> workerProps = new HashMap<>();
workerProps.put("key.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("value.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("internal.key.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("internal.value.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("internal.key.converter.schemas.enable", "false");
workerProps.put("internal.value.converter.schemas.enable", "false");
workerProps.put("offset.storage.file.filename", "/tmp/connect.offsets");
workerConfig = new StandaloneConfig(workerProps);
pluginLoader = PowerMock.createMock(PluginClassLoader.class);
workerTask = PowerMock.createPartialMock(
WorkerSinkTask.class, new String[]{"createConsumer"},
taskId, sinkTask, statusListener, initialState, workerConfig, keyConverter, valueConverter, transformationChain, pluginLoader, time);
recordsReturnedTp1 = 0;
recordsReturnedTp3 = 0;
}
@Test
public void testStartPaused() throws Exception {
workerTask = PowerMock.createPartialMock(
WorkerSinkTask.class, new String[]{"createConsumer"},
taskId, sinkTask, statusListener, TargetState.PAUSED, workerConfig, keyConverter, valueConverter, transformationChain, pluginLoader, time);
expectInitializeTask();
expectPollInitialAssignment();
Set<TopicPartition> partitions = new HashSet<>(asList(TOPIC_PARTITION, TOPIC_PARTITION2));
EasyMock.expect(consumer.assignment()).andReturn(partitions);
consumer.pause(partitions);
PowerMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration();
PowerMock.verifyAll();
}
@Test
public void testPause() throws Exception {
expectInitializeTask();
expectPollInitialAssignment();
expectConsumerPoll(1);
expectConversionAndTransformation(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
Set<TopicPartition> partitions = new HashSet<>(asList(TOPIC_PARTITION, TOPIC_PARTITION2));
// Pause
statusListener.onPause(taskId);
EasyMock.expectLastCall();
expectConsumerWakeup();
EasyMock.expect(consumer.assignment()).andReturn(partitions);
consumer.pause(partitions);
PowerMock.expectLastCall();
// Offset commit as requested when pausing; No records returned by consumer.poll()
sinkTask.preCommit(EasyMock.<Map<TopicPartition, OffsetAndMetadata>>anyObject());
EasyMock.expectLastCall().andStubReturn(Collections.emptyMap());
expectConsumerPoll(0);
sinkTask.put(Collections.<SinkRecord>emptyList());
EasyMock.expectLastCall();
// And unpause
statusListener.onResume(taskId);
EasyMock.expectLastCall();
expectConsumerWakeup();
EasyMock.expect(consumer.assignment()).andReturn(new HashSet<>(asList(TOPIC_PARTITION, TOPIC_PARTITION2)));
consumer.resume(singleton(TOPIC_PARTITION));
PowerMock.expectLastCall();
consumer.resume(singleton(TOPIC_PARTITION2));
PowerMock.expectLastCall();
expectConsumerPoll(1);
expectConversionAndTransformation(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration(); // initial assignment
workerTask.iteration(); // fetch some data
workerTask.transitionTo(TargetState.PAUSED);
workerTask.iteration(); // wakeup
workerTask.iteration(); // now paused
workerTask.transitionTo(TargetState.STARTED);
workerTask.iteration(); // wakeup
workerTask.iteration(); // now unpaused
PowerMock.verifyAll();
}
@Test
public void testPollRedelivery() throws Exception {
expectInitializeTask();
expectPollInitialAssignment();
// If a retriable exception is thrown, we should redeliver the same batch, pausing the consumer in the meantime
expectConsumerPoll(1);
expectConversionAndTransformation(1);
Capture<Collection<SinkRecord>> records = EasyMock.newCapture(CaptureType.ALL);
sinkTask.put(EasyMock.capture(records));
EasyMock.expectLastCall().andThrow(new RetriableException("retry"));
// Pause
HashSet<TopicPartition> partitions = new HashSet<>(asList(TOPIC_PARTITION, TOPIC_PARTITION2));
EasyMock.expect(consumer.assignment()).andReturn(partitions);
consumer.pause(partitions);
PowerMock.expectLastCall();
// Retry delivery should succeed
expectConsumerPoll(0);
sinkTask.put(EasyMock.capture(records));
EasyMock.expectLastCall();
// And unpause
EasyMock.expect(consumer.assignment()).andReturn(partitions);
consumer.resume(singleton(TOPIC_PARTITION));
PowerMock.expectLastCall();
consumer.resume(singleton(TOPIC_PARTITION2));
PowerMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration();
workerTask.iteration();
workerTask.iteration();
PowerMock.verifyAll();
}
@Test
public void testErrorInRebalancePartitionRevocation() throws Exception {
RuntimeException exception = new RuntimeException("Revocation error");
expectInitializeTask();
expectPollInitialAssignment();
expectRebalanceRevocationError(exception);
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration();
try {
workerTask.iteration();
fail("Poll should have raised the rebalance exception");
} catch (RuntimeException e) {
assertEquals(exception, e);
}
PowerMock.verifyAll();
}
@Test
public void testErrorInRebalancePartitionAssignment() throws Exception {
RuntimeException exception = new RuntimeException("Assignment error");
expectInitializeTask();
expectPollInitialAssignment();
expectRebalanceAssignmentError(exception);
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration();
try {
workerTask.iteration();
fail("Poll should have raised the rebalance exception");
} catch (RuntimeException e) {
assertEquals(exception, e);
}
PowerMock.verifyAll();
}
@Test
public void testWakeupInCommitSyncCausesRetry() throws Exception {
expectInitializeTask();
expectPollInitialAssignment();
expectConsumerPoll(1);
expectConversionAndTransformation(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
final List<TopicPartition> partitions = asList(TOPIC_PARTITION, TOPIC_PARTITION2);
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
sinkTask.preCommit(offsets);
EasyMock.expectLastCall().andReturn(offsets);
// first one raises wakeup
consumer.commitSync(EasyMock.<Map<TopicPartition, OffsetAndMetadata>>anyObject());
EasyMock.expectLastCall().andThrow(new WakeupException());
// we should retry and complete the commit
consumer.commitSync(EasyMock.<Map<TopicPartition, OffsetAndMetadata>>anyObject());
EasyMock.expectLastCall();
sinkTask.close(new HashSet<>(partitions));
EasyMock.expectLastCall();
EasyMock.expect(consumer.position(TOPIC_PARTITION)).andReturn(FIRST_OFFSET);
EasyMock.expect(consumer.position(TOPIC_PARTITION2)).andReturn(FIRST_OFFSET);
sinkTask.open(partitions);
EasyMock.expectLastCall();
EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(
new IAnswer<ConsumerRecords<byte[], byte[]>>() {
@Override
public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
rebalanceListener.getValue().onPartitionsRevoked(partitions);
rebalanceListener.getValue().onPartitionsAssigned(partitions);
return ConsumerRecords.empty();
}
});
EasyMock.expect(consumer.assignment()).andReturn(new HashSet<>(partitions));
consumer.resume(Collections.singleton(TOPIC_PARTITION));
EasyMock.expectLastCall();
consumer.resume(Collections.singleton(TOPIC_PARTITION2));
EasyMock.expectLastCall();
statusListener.onResume(taskId);
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration(); // poll for initial assignment
workerTask.iteration(); // first record delivered
workerTask.iteration(); // now rebalance with the wakeup triggered
PowerMock.verifyAll();
}
@Test
public void testRequestCommit() throws Exception {
expectInitializeTask();
expectPollInitialAssignment();
expectConsumerPoll(1);
expectConversionAndTransformation(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
sinkTask.preCommit(offsets);
EasyMock.expectLastCall().andReturn(offsets);
final Capture<OffsetCommitCallback> callback = EasyMock.newCapture();
consumer.commitAsync(EasyMock.eq(offsets), EasyMock.capture(callback));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
callback.getValue().onComplete(offsets, null);
return null;
}
});
expectConsumerPoll(0);
sinkTask.put(Collections.<SinkRecord>emptyList());
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration(); // initial assignment
workerTask.iteration(); // first record delivered
sinkTaskContext.getValue().requestCommit();
assertTrue(sinkTaskContext.getValue().isCommitRequested());
assertNotEquals(offsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
workerTask.iteration(); // triggers the commit
assertFalse(sinkTaskContext.getValue().isCommitRequested()); // should have been cleared
assertEquals(offsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
assertEquals(0, workerTask.commitFailures());
PowerMock.verifyAll();
}
@Test
public void testPreCommit() throws Exception {
expectInitializeTask();
// iter 1
expectPollInitialAssignment();
// iter 2
expectConsumerPoll(2);
expectConversionAndTransformation(2);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
final Map<TopicPartition, OffsetAndMetadata> workerStartingOffsets = new HashMap<>();
workerStartingOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
workerStartingOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 2));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> taskOffsets = new HashMap<>();
taskOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1)); // act like FIRST_OFFSET+2 has not yet been flushed by the task
taskOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET + 1)); // should be ignored because > current offset
taskOffsets.put(new TopicPartition(TOPIC, 3), new OffsetAndMetadata(FIRST_OFFSET)); // should be ignored because this partition is not assigned
final Map<TopicPartition, OffsetAndMetadata> committableOffsets = new HashMap<>();
committableOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
committableOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
sinkTask.preCommit(workerCurrentOffsets);
EasyMock.expectLastCall().andReturn(taskOffsets);
// Expect extra invalid topic partition to be filtered, which causes the consumer assignment to be logged
EasyMock.expect(consumer.assignment()).andReturn(workerCurrentOffsets.keySet());
final Capture<OffsetCommitCallback> callback = EasyMock.newCapture();
consumer.commitAsync(EasyMock.eq(committableOffsets), EasyMock.capture(callback));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
callback.getValue().onComplete(committableOffsets, null);
return null;
}
});
expectConsumerPoll(0);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration(); // iter 1 -- initial assignment
assertEquals(workerStartingOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "currentOffsets"));
workerTask.iteration(); // iter 2 -- deliver 2 records
assertEquals(workerCurrentOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "currentOffsets"));
assertEquals(workerStartingOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit
assertEquals(committableOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
PowerMock.verifyAll();
}
@Test
public void testIgnoredCommit() throws Exception {
expectInitializeTask();
// iter 1
expectPollInitialAssignment();
// iter 2
expectConsumerPoll(1);
expectConversionAndTransformation(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
final Map<TopicPartition, OffsetAndMetadata> workerStartingOffsets = new HashMap<>();
workerStartingOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
workerStartingOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
// iter 3
sinkTask.preCommit(workerCurrentOffsets);
EasyMock.expectLastCall().andReturn(workerStartingOffsets);
// no actual consumer.commit() triggered
expectConsumerPoll(0);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration(); // iter 1 -- initial assignment
assertEquals(workerStartingOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "currentOffsets"));
assertEquals(workerStartingOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
workerTask.iteration(); // iter 2 -- deliver 2 records
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit
PowerMock.verifyAll();
}
// Test that the commitTimeoutMs timestamp is correctly computed and checked in WorkerSinkTask.iteration()
// when there is a long running commit in process. See KAFKA-4942 for more information.
@Test
public void testLongRunningCommitWithoutTimeout() throws Exception {
expectInitializeTask();
// iter 1
expectPollInitialAssignment();
// iter 2
expectConsumerPoll(1);
expectConversionAndTransformation(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
final Map<TopicPartition, OffsetAndMetadata> workerStartingOffsets = new HashMap<>();
workerStartingOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
workerStartingOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
// iter 3 - note that we return the current offset to indicate they should be committed
sinkTask.preCommit(workerCurrentOffsets);
EasyMock.expectLastCall().andReturn(workerCurrentOffsets);
// We need to delay the result of trying to commit offsets to Kafka via the consumer.commitAsync
// method. We do this so that we can test that we do not erroneously mark a commit as timed out
// while it is still running and under time. To fake this for tests we have the commit run in a
// separate thread and wait for a latch which we control back in the main thread.
final ExecutorService executor = Executors.newSingleThreadExecutor();
final CountDownLatch latch = new CountDownLatch(1);
consumer.commitAsync(EasyMock.eq(workerCurrentOffsets), EasyMock.<OffsetCommitCallback>anyObject());
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@SuppressWarnings("unchecked")
@Override
public Void answer() throws Throwable {
// Grab the arguments passed to the consumer.commitAsync method
final Object[] args = EasyMock.getCurrentArguments();
final Map<TopicPartition, OffsetAndMetadata> offsets = (Map<TopicPartition, OffsetAndMetadata>) args[0];
final OffsetCommitCallback callback = (OffsetCommitCallback) args[1];
executor.execute(new Runnable() {
@Override
public void run() {
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
callback.onComplete(offsets, null);
}
});
return null;
}
});
// no actual consumer.commit() triggered
expectConsumerPoll(0);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration(); // iter 1 -- initial assignment
assertEquals(workerStartingOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "currentOffsets"));
assertEquals(workerStartingOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
time.sleep(WorkerConfig.OFFSET_COMMIT_TIMEOUT_MS_DEFAULT);
workerTask.iteration(); // iter 2 -- deliver 2 records
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit in progress
// Make sure the "committing" flag didn't immediately get flipped back to false due to an incorrect timeout
assertTrue("Expected worker to be in the process of committing offsets", workerTask.isCommitting());
// Let the async commit finish and wait for it to end
latch.countDown();
executor.shutdown();
executor.awaitTermination(30, TimeUnit.SECONDS);
assertEquals(workerCurrentOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "currentOffsets"));
assertEquals(workerCurrentOffsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
PowerMock.verifyAll();
}
// Verify that when commitAsync is called but the supplied callback is not called by the consumer before a
// rebalance occurs, the async callback does not reset the last committed offset from the rebalance.
// See KAFKA-5731 for more information.
@Test
public void testCommitWithOutOfOrderCallback() throws Exception {
expectInitializeTask();
// iter 1
expectPollInitialAssignment();
// iter 2
expectConsumerPoll(1);
expectConversionAndTransformation(4);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
final Map<TopicPartition, OffsetAndMetadata> workerStartingOffsets = new HashMap<>();
workerStartingOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
workerStartingOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final List<TopicPartition> originalPartitions = asList(TOPIC_PARTITION, TOPIC_PARTITION2);
final List<TopicPartition> rebalancedPartitions = asList(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3);
final Map<TopicPartition, OffsetAndMetadata> rebalanceOffsets = new HashMap<>();
rebalanceOffsets.put(TOPIC_PARTITION, workerCurrentOffsets.get(TOPIC_PARTITION));
rebalanceOffsets.put(TOPIC_PARTITION2, workerCurrentOffsets.get(TOPIC_PARTITION2));
rebalanceOffsets.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> postRebalanceCurrentOffsets = new HashMap<>();
postRebalanceCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 3));
postRebalanceCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
postRebalanceCurrentOffsets.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET + 2));
// iter 3 - note that we return the current offset to indicate they should be committed
sinkTask.preCommit(workerCurrentOffsets);
EasyMock.expectLastCall().andReturn(workerCurrentOffsets);
// We need to delay the result of trying to commit offsets to Kafka via the consumer.commitAsync
// method. We do this so that we can test that the callback is not called until after the rebalance
// changes the lastCommittedOffsets. To fake this for tests we have the commitAsync build a function
// that will call the callback with the appropriate parameters, and we'll run that function later.
final AtomicReference<Runnable> asyncCallbackRunner = new AtomicReference<>();
final AtomicBoolean asyncCallbackRan = new AtomicBoolean();
consumer.commitAsync(EasyMock.eq(workerCurrentOffsets), EasyMock.<OffsetCommitCallback>anyObject());
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@SuppressWarnings("unchecked")
@Override
public Void answer() throws Throwable {
// Grab the arguments passed to the consumer.commitAsync method
final Object[] args = EasyMock.getCurrentArguments();
final Map<TopicPartition, OffsetAndMetadata> offsets = (Map<TopicPartition, OffsetAndMetadata>) args[0];
final OffsetCommitCallback callback = (OffsetCommitCallback) args[1];
asyncCallbackRunner.set(new Runnable() {
@Override
public void run() {
callback.onComplete(offsets, null);
asyncCallbackRan.set(true);
}
});
return null;
}
});
// Expect the next poll to discover and perform the rebalance, THEN complete the previous callback handler,
// and then return one record for TP1 and one for TP3.
final AtomicBoolean rebalanced = new AtomicBoolean();
EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(
new IAnswer<ConsumerRecords<byte[], byte[]>>() {
@Override
public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
// Rebalance always begins with revoking current partitions ...
rebalanceListener.getValue().onPartitionsRevoked(originalPartitions);
// Respond to the rebalance
Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, rebalanceOffsets.get(TOPIC_PARTITION).offset());
offsets.put(TOPIC_PARTITION2, rebalanceOffsets.get(TOPIC_PARTITION2).offset());
offsets.put(TOPIC_PARTITION3, rebalanceOffsets.get(TOPIC_PARTITION3).offset());
sinkTaskContext.getValue().offset(offsets);
rebalanceListener.getValue().onPartitionsAssigned(rebalancedPartitions);
rebalanced.set(true);
// Run the previous async commit handler
asyncCallbackRunner.get().run();
// And prep the two records to return
long timestamp = RecordBatch.NO_TIMESTAMP;
TimestampType timestampType = TimestampType.NO_TIMESTAMP_TYPE;
List<ConsumerRecord<byte[], byte[]>> records = new ArrayList<>();
records.add(new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturnedTp1 + 1, timestamp, timestampType, 0L, 0, 0, RAW_KEY, RAW_VALUE));
records.add(new ConsumerRecord<>(TOPIC, PARTITION3, FIRST_OFFSET + recordsReturnedTp3 + 1, timestamp, timestampType, 0L, 0, 0, RAW_KEY, RAW_VALUE));
recordsReturnedTp1 += 1;
recordsReturnedTp3 += 1;
return new ConsumerRecords<>(Collections.singletonMap(new TopicPartition(TOPIC, PARTITION), records));
}
});
// onPartitionsRevoked
sinkTask.preCommit(workerCurrentOffsets);
EasyMock.expectLastCall().andReturn(workerCurrentOffsets);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
sinkTask.close(workerCurrentOffsets.keySet());
EasyMock.expectLastCall();
consumer.commitSync(workerCurrentOffsets);
EasyMock.expectLastCall();
// onPartitionsAssigned - step 1
final long offsetTp1 = rebalanceOffsets.get(TOPIC_PARTITION).offset();
final long offsetTp2 = rebalanceOffsets.get(TOPIC_PARTITION2).offset();
final long offsetTp3 = rebalanceOffsets.get(TOPIC_PARTITION3).offset();
EasyMock.expect(consumer.position(TOPIC_PARTITION)).andReturn(offsetTp1);
EasyMock.expect(consumer.position(TOPIC_PARTITION2)).andReturn(offsetTp2);
EasyMock.expect(consumer.position(TOPIC_PARTITION3)).andReturn(offsetTp3);
// onPartitionsAssigned - step 2
sinkTask.open(rebalancedPartitions);
EasyMock.expectLastCall();
// onPartitionsAssigned - step 3 rewind
consumer.seek(TOPIC_PARTITION, offsetTp1);
EasyMock.expectLastCall();
consumer.seek(TOPIC_PARTITION2, offsetTp2);
EasyMock.expectLastCall();
consumer.seek(TOPIC_PARTITION3, offsetTp3);
EasyMock.expectLastCall();
// iter 4 - note that we return the current offset to indicate they should be committed
sinkTask.preCommit(postRebalanceCurrentOffsets);
EasyMock.expectLastCall().andReturn(postRebalanceCurrentOffsets);
final Capture<OffsetCommitCallback> callback = EasyMock.newCapture();
consumer.commitAsync(EasyMock.eq(postRebalanceCurrentOffsets), EasyMock.capture(callback));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
callback.getValue().onComplete(postRebalanceCurrentOffsets, null);
return null;
}
});
// no actual consumer.commit() triggered
expectConsumerPoll(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration(); // iter 1 -- initial assignment
assertEquals(workerStartingOffsets, Whitebox.getInternalState(workerTask, "currentOffsets"));
assertEquals(workerStartingOffsets, Whitebox.getInternalState(workerTask, "lastCommittedOffsets"));
time.sleep(WorkerConfig.OFFSET_COMMIT_TIMEOUT_MS_DEFAULT);
workerTask.iteration(); // iter 2 -- deliver 2 records
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit in progress
assertTrue(asyncCallbackRan.get());
assertTrue(rebalanced.get());
// Check that the offsets were not reset by the out-of-order async commit callback
assertEquals(postRebalanceCurrentOffsets, Whitebox.getInternalState(workerTask, "currentOffsets"));
assertEquals(rebalanceOffsets, Whitebox.getInternalState(workerTask, "lastCommittedOffsets"));
time.sleep(WorkerConfig.OFFSET_COMMIT_TIMEOUT_MS_DEFAULT);
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 4 -- commit in progress
// Check that the offsets were not reset by the out-of-order async commit callback
assertEquals(postRebalanceCurrentOffsets, Whitebox.getInternalState(workerTask, "currentOffsets"));
assertEquals(postRebalanceCurrentOffsets, Whitebox.getInternalState(workerTask, "lastCommittedOffsets"));
PowerMock.verifyAll();
}
@Test
public void testMissingTimestampPropagation() throws Exception {
expectInitializeTask();
expectConsumerPoll(1, RecordBatch.NO_TIMESTAMP, TimestampType.CREATE_TIME);
expectConversionAndTransformation(1);
Capture<Collection<SinkRecord>> records = EasyMock.newCapture(CaptureType.ALL);
sinkTask.put(EasyMock.capture(records));
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration();
SinkRecord record = records.getValue().iterator().next();
// we expect null for missing timestamp, the sentinel value of Record.NO_TIMESTAMP is Kafka's API
assertEquals(null, record.timestamp());
assertEquals(TimestampType.CREATE_TIME, record.timestampType());
PowerMock.verifyAll();
}
@Test
public void testTimestampPropagation() throws Exception {
final Long timestamp = System.currentTimeMillis();
final TimestampType timestampType = TimestampType.CREATE_TIME;
expectInitializeTask();
expectConsumerPoll(1, timestamp, timestampType);
expectConversionAndTransformation(1);
Capture<Collection<SinkRecord>> records = EasyMock.newCapture(CaptureType.ALL);
sinkTask.put(EasyMock.capture(records));
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
workerTask.iteration();
SinkRecord record = records.getValue().iterator().next();
assertEquals(timestamp, record.timestamp());
assertEquals(timestampType, record.timestampType());
PowerMock.verifyAll();
}
private void expectInitializeTask() throws Exception {
PowerMock.expectPrivate(workerTask, "createConsumer").andReturn(consumer);
consumer.subscribe(EasyMock.eq(asList(TOPIC)), EasyMock.capture(rebalanceListener));
PowerMock.expectLastCall();
sinkTask.initialize(EasyMock.capture(sinkTaskContext));
PowerMock.expectLastCall();
sinkTask.start(TASK_PROPS);
PowerMock.expectLastCall();
}
private void expectRebalanceRevocationError(RuntimeException e) {
final List<TopicPartition> partitions = asList(TOPIC_PARTITION, TOPIC_PARTITION2);
sinkTask.close(new HashSet<>(partitions));
EasyMock.expectLastCall().andThrow(e);
sinkTask.preCommit(EasyMock.<Map<TopicPartition, OffsetAndMetadata>>anyObject());
EasyMock.expectLastCall().andReturn(Collections.emptyMap());
EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(
new IAnswer<ConsumerRecords<byte[], byte[]>>() {
@Override
public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
rebalanceListener.getValue().onPartitionsRevoked(partitions);
return ConsumerRecords.empty();
}
});
}
private void expectRebalanceAssignmentError(RuntimeException e) {
final List<TopicPartition> partitions = asList(TOPIC_PARTITION, TOPIC_PARTITION2);
sinkTask.close(new HashSet<>(partitions));
EasyMock.expectLastCall();
sinkTask.preCommit(EasyMock.<Map<TopicPartition, OffsetAndMetadata>>anyObject());
EasyMock.expectLastCall().andReturn(Collections.emptyMap());
EasyMock.expect(consumer.position(TOPIC_PARTITION)).andReturn(FIRST_OFFSET);
EasyMock.expect(consumer.position(TOPIC_PARTITION2)).andReturn(FIRST_OFFSET);
sinkTask.open(partitions);
EasyMock.expectLastCall().andThrow(e);
EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(
new IAnswer<ConsumerRecords<byte[], byte[]>>() {
@Override
public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
rebalanceListener.getValue().onPartitionsRevoked(partitions);
rebalanceListener.getValue().onPartitionsAssigned(partitions);
return ConsumerRecords.empty();
}
});
}
private void expectPollInitialAssignment() {
final List<TopicPartition> partitions = asList(TOPIC_PARTITION, TOPIC_PARTITION2);
sinkTask.open(partitions);
EasyMock.expectLastCall();
EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(new IAnswer<ConsumerRecords<byte[], byte[]>>() {
@Override
public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
rebalanceListener.getValue().onPartitionsAssigned(partitions);
return ConsumerRecords.empty();
}
});
EasyMock.expect(consumer.position(TOPIC_PARTITION)).andReturn(FIRST_OFFSET);
EasyMock.expect(consumer.position(TOPIC_PARTITION2)).andReturn(FIRST_OFFSET);
sinkTask.put(Collections.<SinkRecord>emptyList());
EasyMock.expectLastCall();
}
private void expectConsumerWakeup() {
consumer.wakeup();
EasyMock.expectLastCall();
EasyMock.expect(consumer.poll(EasyMock.anyLong())).andThrow(new WakeupException());
}
private void expectConsumerPoll(final int numMessages) {
expectConsumerPoll(numMessages, RecordBatch.NO_TIMESTAMP, TimestampType.NO_TIMESTAMP_TYPE);
}
private void expectConsumerPoll(final int numMessages, final long timestamp, final TimestampType timestampType) {
EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(
new IAnswer<ConsumerRecords<byte[], byte[]>>() {
@Override
public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
List<ConsumerRecord<byte[], byte[]>> records = new ArrayList<>();
for (int i = 0; i < numMessages; i++)
records.add(new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturnedTp1 + i, timestamp, timestampType, 0L, 0, 0, RAW_KEY, RAW_VALUE));
recordsReturnedTp1 += numMessages;
return new ConsumerRecords<>(
numMessages > 0 ?
Collections.singletonMap(new TopicPartition(TOPIC, PARTITION), records) :
Collections.<TopicPartition, List<ConsumerRecord<byte[], byte[]>>>emptyMap()
);
}
});
}
private void expectConversionAndTransformation(final int numMessages) {
EasyMock.expect(keyConverter.toConnectData(TOPIC, RAW_KEY)).andReturn(new SchemaAndValue(KEY_SCHEMA, KEY)).times(numMessages);
EasyMock.expect(valueConverter.toConnectData(TOPIC, RAW_VALUE)).andReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE)).times(numMessages);
final Capture<SinkRecord> recordCapture = EasyMock.newCapture();
EasyMock.expect(transformationChain.apply(EasyMock.capture(recordCapture)))
.andAnswer(new IAnswer<SinkRecord>() {
@Override
public SinkRecord answer() {
return recordCapture.getValue();
}
}).times(numMessages);
}
private abstract static class TestSinkTask extends SinkTask {
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.codec.KeyValueCodec;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.testclassification.FlakeyTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category({MediumTests.class, FlakeyTests.class})
public class TestMultiParallel {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMultiParallel.class);
private static final Logger LOG = LoggerFactory.getLogger(TestMultiParallel.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] VALUE = Bytes.toBytes("value");
private static final byte[] QUALIFIER = Bytes.toBytes("qual");
private static final String FAMILY = "family";
private static final TableName TEST_TABLE = TableName.valueOf("multi_test_table");
private static final byte[] BYTES_FAMILY = Bytes.toBytes(FAMILY);
private static final byte[] ONE_ROW = Bytes.toBytes("xxx");
private static final byte [][] KEYS = makeKeys();
private static final int slaves = 5; // also used for testing HTable pool size
private static Connection CONNECTION;
@BeforeClass
public static void beforeClass() throws Exception {
// Uncomment the following lines if more verbosity is needed for
// debugging (see HBASE-12285 for details).
//((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
//((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
//((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
UTIL.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY,
KeyValueCodec.class.getCanonicalName());
// Disable table on master for now as the feature is broken
//UTIL.getConfiguration().setBoolean(LoadBalancer.TABLES_ON_MASTER, true);
// We used to ask for system tables on Master exclusively but not needed by test and doesn't
// work anyways -- so commented out.
// UTIL.getConfiguration().setBoolean(LoadBalancer.SYSTEM_TABLES_ON_MASTER, true);
UTIL.getConfiguration()
.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, MyMasterObserver.class.getName());
UTIL.startMiniCluster(slaves);
Table t = UTIL.createMultiRegionTable(TEST_TABLE, Bytes.toBytes(FAMILY));
UTIL.waitTableEnabled(TEST_TABLE);
t.close();
CONNECTION = ConnectionFactory.createConnection(UTIL.getConfiguration());
assertTrue(MyMasterObserver.start.get());
}
@AfterClass
public static void afterClass() throws Exception {
CONNECTION.close();
UTIL.shutdownMiniCluster();
}
@Before
public void before() throws Exception {
final int balanceCount = MyMasterObserver.postBalanceCount.get();
LOG.info("before");
if (UTIL.ensureSomeRegionServersAvailable(slaves)) {
// Distribute regions
UTIL.getMiniHBaseCluster().getMaster().balance();
// Some plans are created.
if (MyMasterObserver.postBalanceCount.get() > balanceCount) {
// It is necessary to wait the move procedure to start.
// Otherwise, the next wait may pass immediately.
UTIL.waitFor(3 * 1000, 100, false, () ->
UTIL.getMiniHBaseCluster().getMaster().getAssignmentManager().hasRegionsInTransition()
);
}
// Wait until completing balance
UTIL.waitUntilAllRegionsAssigned(TEST_TABLE);
}
LOG.info("before done");
}
private static byte[][] makeKeys() {
byte [][] starterKeys = HBaseTestingUtility.KEYS;
// Create a "non-uniform" test set with the following characteristics:
// a) Unequal number of keys per region
// Don't use integer as a multiple, so that we have a number of keys that is
// not a multiple of the number of regions
int numKeys = (int) (starterKeys.length * 10.33F);
List<byte[]> keys = new ArrayList<>();
for (int i = 0; i < numKeys; i++) {
int kIdx = i % starterKeys.length;
byte[] k = starterKeys[kIdx];
byte[] cp = new byte[k.length + 1];
System.arraycopy(k, 0, cp, 0, k.length);
cp[k.length] = new Integer(i % 256).byteValue();
keys.add(cp);
}
// b) Same duplicate keys (showing multiple Gets/Puts to the same row, which
// should work)
// c) keys are not in sorted order (within a region), to ensure that the
// sorting code and index mapping doesn't break the functionality
for (int i = 0; i < 100; i++) {
int kIdx = i % starterKeys.length;
byte[] k = starterKeys[kIdx];
byte[] cp = new byte[k.length + 1];
System.arraycopy(k, 0, cp, 0, k.length);
cp[k.length] = new Integer(i % 256).byteValue();
keys.add(cp);
}
return keys.toArray(new byte [][] {new byte [] {}});
}
/**
* This is for testing the active number of threads that were used while
* doing a batch operation. It inserts one row per region via the batch
* operation, and then checks the number of active threads.
* <p/>
* For HBASE-3553
*/
@Test
public void testActiveThreadsCount() throws Exception {
UTIL.getConfiguration().setLong("hbase.htable.threads.coresize", slaves + 1);
// Make sure max is at least as big as coresize; can be smaller in test context where
// we tune down thread sizes -- max could be < slaves + 1.
UTIL.getConfiguration().setLong("hbase.htable.threads.max", slaves + 1);
try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration())) {
ThreadPoolExecutor executor = HTable.getDefaultExecutor(UTIL.getConfiguration());
try {
try (Table t = connection.getTable(TEST_TABLE, executor)) {
List<Put> puts = constructPutRequests(); // creates a Put for every region
t.batch(puts, null);
HashSet<ServerName> regionservers = new HashSet<>();
try (RegionLocator locator = connection.getRegionLocator(TEST_TABLE)) {
for (Row r : puts) {
HRegionLocation location = locator.getRegionLocation(r.getRow());
regionservers.add(location.getServerName());
}
}
assertEquals(regionservers.size(), executor.getLargestPoolSize());
}
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testBatchWithGet() throws Exception {
LOG.info("test=testBatchWithGet");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
// load test data
List<Put> puts = constructPutRequests();
table.batch(puts, null);
// create a list of gets and run it
List<Row> gets = new ArrayList<>();
for (byte[] k : KEYS) {
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
gets.add(get);
}
Result[] multiRes = new Result[gets.size()];
table.batch(gets, multiRes);
// Same gets using individual call API
List<Result> singleRes = new ArrayList<>();
for (Row get : gets) {
singleRes.add(table.get((Get) get));
}
// Compare results
Assert.assertEquals(singleRes.size(), multiRes.length);
for (int i = 0; i < singleRes.size(); i++) {
Assert.assertTrue(singleRes.get(i).containsColumn(BYTES_FAMILY, QUALIFIER));
Cell[] singleKvs = singleRes.get(i).rawCells();
Cell[] multiKvs = multiRes[i].rawCells();
for (int j = 0; j < singleKvs.length; j++) {
Assert.assertEquals(singleKvs[j], multiKvs[j]);
Assert.assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(singleKvs[j]),
CellUtil.cloneValue(multiKvs[j])));
}
}
table.close();
}
@Test
public void testBadFam() throws Exception {
LOG.info("test=testBadFam");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
List<Row> actions = new ArrayList<>();
Put p = new Put(Bytes.toBytes("row1"));
p.addColumn(Bytes.toBytes("bad_family"), Bytes.toBytes("qual"), Bytes.toBytes("value"));
actions.add(p);
p = new Put(Bytes.toBytes("row2"));
p.addColumn(BYTES_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
actions.add(p);
// row1 and row2 should be in the same region.
Object [] r = new Object[actions.size()];
try {
table.batch(actions, r);
fail();
} catch (RetriesExhaustedWithDetailsException ex) {
LOG.debug(ex.toString(), ex);
// good!
assertFalse(ex.mayHaveClusterIssues());
}
assertEquals(2, r.length);
assertTrue(r[0] instanceof Throwable);
assertTrue(r[1] instanceof Result);
table.close();
}
@Test
public void testFlushCommitsNoAbort() throws Exception {
LOG.info("test=testFlushCommitsNoAbort");
doTestFlushCommits(false);
}
/**
* Only run one Multi test with a forced RegionServer abort. Otherwise, the
* unit tests will take an unnecessarily long time to run.
*/
@Test
public void testFlushCommitsWithAbort() throws Exception {
LOG.info("test=testFlushCommitsWithAbort");
doTestFlushCommits(true);
}
/**
* Set table auto flush to false and test flushing commits
* @param doAbort true if abort one regionserver in the testing
*/
private void doTestFlushCommits(boolean doAbort) throws Exception {
// Load the data
LOG.info("get new table");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
LOG.info("constructPutRequests");
List<Put> puts = constructPutRequests();
table.put(puts);
LOG.info("puts");
final int liveRScount = UTIL.getMiniHBaseCluster().getLiveRegionServerThreads()
.size();
assert liveRScount > 0;
JVMClusterUtil.RegionServerThread liveRS = UTIL.getMiniHBaseCluster()
.getLiveRegionServerThreads().get(0);
if (doAbort) {
liveRS.getRegionServer().abort("Aborting for tests",
new Exception("doTestFlushCommits"));
// If we wait for no regions being online after we abort the server, we
// could ensure the master has re-assigned the regions on killed server
// after writing successfully. It means the server we aborted is dead
// and detected by matser
while (liveRS.getRegionServer().getNumberOfOnlineRegions() != 0) {
Thread.sleep(100);
}
// try putting more keys after the abort. same key/qual... just validating
// no exceptions thrown
puts = constructPutRequests();
table.put(puts);
}
LOG.info("validating loaded data");
validateLoadedData(table);
// Validate server and region count
List<JVMClusterUtil.RegionServerThread> liveRSs = UTIL.getMiniHBaseCluster().getLiveRegionServerThreads();
int count = 0;
for (JVMClusterUtil.RegionServerThread t: liveRSs) {
count++;
LOG.info("Count=" + count + ", Alive=" + t.getRegionServer());
}
LOG.info("Count=" + count);
Assert.assertEquals("Server count=" + count + ", abort=" + doAbort,
(doAbort ? (liveRScount - 1) : liveRScount), count);
if (doAbort) {
UTIL.getMiniHBaseCluster().waitOnRegionServer(0);
UTIL.waitFor(15 * 1000, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
// We disable regions on master so the count should be liveRScount - 1
return UTIL.getMiniHBaseCluster().getMaster()
.getClusterMetrics().getLiveServerMetrics().size() == liveRScount - 1;
}
});
UTIL.waitFor(15 * 1000, UTIL.predicateNoRegionsInTransition());
}
table.close();
LOG.info("done");
}
@Test
public void testBatchWithPut() throws Exception {
LOG.info("test=testBatchWithPut");
Table table = CONNECTION.getTable(TEST_TABLE);
// put multiple rows using a batch
List<Put> puts = constructPutRequests();
Object[] results = new Object[puts.size()];
table.batch(puts, results);
validateSizeAndEmpty(results, KEYS.length);
if (true) {
int liveRScount = UTIL.getMiniHBaseCluster().getLiveRegionServerThreads().size();
assert liveRScount > 0;
JVMClusterUtil.RegionServerThread liveRS =
UTIL.getMiniHBaseCluster().getLiveRegionServerThreads().get(0);
liveRS.getRegionServer().abort("Aborting for tests", new Exception("testBatchWithPut"));
puts = constructPutRequests();
try {
results = new Object[puts.size()];
table.batch(puts, results);
} catch (RetriesExhaustedWithDetailsException ree) {
LOG.info(ree.getExhaustiveDescription());
table.close();
throw ree;
}
validateSizeAndEmpty(results, KEYS.length);
}
validateLoadedData(table);
table.close();
}
@Test
public void testBatchWithDelete() throws Exception {
LOG.info("test=testBatchWithDelete");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
// Load some data
List<Put> puts = constructPutRequests();
Object[] results = new Object[puts.size()];
table.batch(puts, results);
validateSizeAndEmpty(results, KEYS.length);
// Deletes
List<Row> deletes = new ArrayList<>();
for (int i = 0; i < KEYS.length; i++) {
Delete delete = new Delete(KEYS[i]);
delete.addFamily(BYTES_FAMILY);
deletes.add(delete);
}
results= new Object[deletes.size()];
table.batch(deletes, results);
validateSizeAndEmpty(results, KEYS.length);
// Get to make sure ...
for (byte[] k : KEYS) {
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
Assert.assertFalse(table.exists(get));
}
table.close();
}
@Test
public void testHTableDeleteWithList() throws Exception {
LOG.info("test=testHTableDeleteWithList");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
// Load some data
List<Put> puts = constructPutRequests();
Object[] results = new Object[puts.size()];
table.batch(puts, results);
validateSizeAndEmpty(results, KEYS.length);
// Deletes
ArrayList<Delete> deletes = new ArrayList<>();
for (int i = 0; i < KEYS.length; i++) {
Delete delete = new Delete(KEYS[i]);
delete.addFamily(BYTES_FAMILY);
deletes.add(delete);
}
table.delete(deletes);
Assert.assertTrue(deletes.isEmpty());
// Get to make sure ...
for (byte[] k : KEYS) {
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
Assert.assertFalse(table.exists(get));
}
table.close();
}
@Test
public void testBatchWithManyColsInOneRowGetAndPut() throws Exception {
LOG.info("test=testBatchWithManyColsInOneRowGetAndPut");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
List<Row> puts = new ArrayList<>();
for (int i = 0; i < 100; i++) {
Put put = new Put(ONE_ROW);
byte[] qual = Bytes.toBytes("column" + i);
put.addColumn(BYTES_FAMILY, qual, VALUE);
puts.add(put);
}
Object[] results = new Object[puts.size()];
table.batch(puts, results);
// validate
validateSizeAndEmpty(results, 100);
// get the data back and validate that it is correct
List<Row> gets = new ArrayList<>();
for (int i = 0; i < 100; i++) {
Get get = new Get(ONE_ROW);
byte[] qual = Bytes.toBytes("column" + i);
get.addColumn(BYTES_FAMILY, qual);
gets.add(get);
}
Object[] multiRes = new Object[gets.size()];
table.batch(gets, multiRes);
int idx = 0;
for (Object r : multiRes) {
byte[] qual = Bytes.toBytes("column" + idx);
validateResult(r, qual, VALUE);
idx++;
}
table.close();
}
@Test
public void testBatchWithIncrementAndAppend() throws Exception {
LOG.info("test=testBatchWithIncrementAndAppend");
final byte[] QUAL1 = Bytes.toBytes("qual1");
final byte[] QUAL2 = Bytes.toBytes("qual2");
final byte[] QUAL3 = Bytes.toBytes("qual3");
final byte[] QUAL4 = Bytes.toBytes("qual4");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
Delete d = new Delete(ONE_ROW);
table.delete(d);
Put put = new Put(ONE_ROW);
put.addColumn(BYTES_FAMILY, QUAL1, Bytes.toBytes("abc"));
put.addColumn(BYTES_FAMILY, QUAL2, Bytes.toBytes(1L));
table.put(put);
Increment inc = new Increment(ONE_ROW);
inc.addColumn(BYTES_FAMILY, QUAL2, 1);
inc.addColumn(BYTES_FAMILY, QUAL3, 1);
Append a = new Append(ONE_ROW);
a.addColumn(BYTES_FAMILY, QUAL1, Bytes.toBytes("def"));
a.addColumn(BYTES_FAMILY, QUAL4, Bytes.toBytes("xyz"));
List<Row> actions = new ArrayList<>();
actions.add(inc);
actions.add(a);
Object[] multiRes = new Object[actions.size()];
table.batch(actions, multiRes);
validateResult(multiRes[1], QUAL1, Bytes.toBytes("abcdef"));
validateResult(multiRes[1], QUAL4, Bytes.toBytes("xyz"));
validateResult(multiRes[0], QUAL2, Bytes.toBytes(2L));
validateResult(multiRes[0], QUAL3, Bytes.toBytes(1L));
table.close();
}
@Test
public void testNonceCollision() throws Exception {
LOG.info("test=testNonceCollision");
final Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
Table table = connection.getTable(TEST_TABLE);
Put put = new Put(ONE_ROW);
put.addColumn(BYTES_FAMILY, QUALIFIER, Bytes.toBytes(0L));
// Replace nonce manager with the one that returns each nonce twice.
NonceGenerator cnm = new NonceGenerator() {
private final PerClientRandomNonceGenerator delegate = PerClientRandomNonceGenerator.get();
private long lastNonce = -1;
@Override
public synchronized long newNonce() {
long nonce = 0;
if (lastNonce == -1) {
lastNonce = nonce = delegate.newNonce();
} else {
nonce = lastNonce;
lastNonce = -1L;
}
return nonce;
}
@Override
public long getNonceGroup() {
return delegate.getNonceGroup();
}
};
NonceGenerator oldCnm =
ConnectionUtils.injectNonceGeneratorForTesting((ClusterConnection)connection, cnm);
// First test sequential requests.
try {
Increment inc = new Increment(ONE_ROW);
inc.addColumn(BYTES_FAMILY, QUALIFIER, 1L);
table.increment(inc);
// duplicate increment
inc = new Increment(ONE_ROW);
inc.addColumn(BYTES_FAMILY, QUALIFIER, 1L);
Result result = table.increment(inc);
validateResult(result, QUALIFIER, Bytes.toBytes(1L));
Get get = new Get(ONE_ROW);
get.addColumn(BYTES_FAMILY, QUALIFIER);
result = table.get(get);
validateResult(result, QUALIFIER, Bytes.toBytes(1L));
// Now run a bunch of requests in parallel, exactly half should succeed.
int numRequests = 40;
final CountDownLatch startedLatch = new CountDownLatch(numRequests);
final CountDownLatch startLatch = new CountDownLatch(1);
final CountDownLatch doneLatch = new CountDownLatch(numRequests);
for (int i = 0; i < numRequests; ++i) {
Runnable r = new Runnable() {
@Override
public void run() {
Table table = null;
try {
table = connection.getTable(TEST_TABLE);
} catch (IOException e) {
fail("Not expected");
}
Increment inc = new Increment(ONE_ROW);
inc.addColumn(BYTES_FAMILY, QUALIFIER, 1L);
startedLatch.countDown();
try {
startLatch.await();
} catch (InterruptedException e) {
fail("Not expected");
}
try {
table.increment(inc);
} catch (IOException ioEx) {
fail("Not expected");
}
doneLatch.countDown();
}
};
Threads.setDaemonThreadRunning(new Thread(r));
}
startedLatch.await(); // Wait until all threads are ready...
startLatch.countDown(); // ...and unleash the herd!
doneLatch.await();
// Now verify
get = new Get(ONE_ROW);
get.addColumn(BYTES_FAMILY, QUALIFIER);
result = table.get(get);
validateResult(result, QUALIFIER, Bytes.toBytes((numRequests / 2) + 1L));
table.close();
} finally {
ConnectionImplementation.injectNonceGeneratorForTesting((ClusterConnection) connection, oldCnm);
}
}
@Test
public void testBatchWithMixedActions() throws Exception {
LOG.info("test=testBatchWithMixedActions");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
// Load some data to start
List<Put> puts = constructPutRequests();
Object[] results = new Object[puts.size()];
table.batch(puts, results);
validateSizeAndEmpty(results, KEYS.length);
// Batch: get, get, put(new col), delete, get, get of put, get of deleted,
// put
List<Row> actions = new ArrayList<>();
byte[] qual2 = Bytes.toBytes("qual2");
byte[] val2 = Bytes.toBytes("putvalue2");
// 0 get
Get get = new Get(KEYS[10]);
get.addColumn(BYTES_FAMILY, QUALIFIER);
actions.add(get);
// 1 get
get = new Get(KEYS[11]);
get.addColumn(BYTES_FAMILY, QUALIFIER);
actions.add(get);
// 2 put of new column
Put put = new Put(KEYS[10]);
put.addColumn(BYTES_FAMILY, qual2, val2);
actions.add(put);
// 3 delete
Delete delete = new Delete(KEYS[20]);
delete.addFamily(BYTES_FAMILY);
actions.add(delete);
// 4 get
get = new Get(KEYS[30]);
get.addColumn(BYTES_FAMILY, QUALIFIER);
actions.add(get);
// There used to be a 'get' of a previous put here, but removed
// since this API really cannot guarantee order in terms of mixed
// get/puts.
// 5 put of new column
put = new Put(KEYS[40]);
put.addColumn(BYTES_FAMILY, qual2, val2);
actions.add(put);
// 6 RowMutations
RowMutations rm = new RowMutations(KEYS[50]);
put = new Put(KEYS[50]);
put.addColumn(BYTES_FAMILY, qual2, val2);
rm.add((Mutation) put);
byte[] qual3 = Bytes.toBytes("qual3");
byte[] val3 = Bytes.toBytes("putvalue3");
put = new Put(KEYS[50]);
put.addColumn(BYTES_FAMILY, qual3, val3);
rm.add((Mutation) put);
actions.add(rm);
// 7 Add another Get to the mixed sequence after RowMutations
get = new Get(KEYS[10]);
get.addColumn(BYTES_FAMILY, QUALIFIER);
actions.add(get);
results = new Object[actions.size()];
table.batch(actions, results);
// Validation
validateResult(results[0]);
validateResult(results[1]);
validateEmpty(results[3]);
validateResult(results[4]);
validateEmpty(results[5]);
validateEmpty(results[6]);
validateResult(results[7]);
// validate last put, externally from the batch
get = new Get(KEYS[40]);
get.addColumn(BYTES_FAMILY, qual2);
Result r = table.get(get);
validateResult(r, qual2, val2);
// validate last RowMutations, externally from the batch
get = new Get(KEYS[50]);
get.addColumn(BYTES_FAMILY, qual2);
r = table.get(get);
validateResult(r, qual2, val2);
get = new Get(KEYS[50]);
get.addColumn(BYTES_FAMILY, qual3);
r = table.get(get);
validateResult(r, qual3, val3);
table.close();
}
// // Helper methods ////
private void validateResult(Object r) {
validateResult(r, QUALIFIER, VALUE);
}
private void validateResult(Object r1, byte[] qual, byte[] val) {
Result r = (Result)r1;
Assert.assertTrue(r.containsColumn(BYTES_FAMILY, qual));
byte[] value = r.getValue(BYTES_FAMILY, qual);
if (0 != Bytes.compareTo(val, value)) {
fail("Expected [" + Bytes.toStringBinary(val)
+ "] but got [" + Bytes.toStringBinary(value) + "]");
}
}
private List<Put> constructPutRequests() {
List<Put> puts = new ArrayList<>();
for (byte[] k : KEYS) {
Put put = new Put(k);
put.addColumn(BYTES_FAMILY, QUALIFIER, VALUE);
puts.add(put);
}
return puts;
}
private void validateLoadedData(Table table) throws IOException {
// get the data back and validate that it is correct
LOG.info("Validating data on " + table);
List<Get> gets = new ArrayList<>();
for (byte[] k : KEYS) {
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
gets.add(get);
}
int retryNum = 10;
Result[] results = null;
do {
results = table.get(gets);
boolean finished = true;
for (Result result : results) {
if (result.isEmpty()) {
finished = false;
break;
}
}
if (finished) {
break;
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
}
retryNum--;
} while (retryNum > 0);
if (retryNum == 0) {
fail("Timeout for validate data");
} else {
if (results != null) {
for (Result r : results) {
Assert.assertTrue(r.containsColumn(BYTES_FAMILY, QUALIFIER));
Assert.assertEquals(0, Bytes.compareTo(VALUE, r
.getValue(BYTES_FAMILY, QUALIFIER)));
}
LOG.info("Validating data on " + table + " successfully!");
}
}
}
private void validateEmpty(Object r1) {
Result result = (Result)r1;
Assert.assertTrue(result != null);
Assert.assertTrue(result.isEmpty());
}
private void validateSizeAndEmpty(Object[] results, int expectedSize) {
// Validate got back the same number of Result objects, all empty
Assert.assertEquals(expectedSize, results.length);
for (Object result : results) {
validateEmpty(result);
}
}
public static class MyMasterObserver implements MasterObserver, MasterCoprocessor {
private static final AtomicInteger postBalanceCount = new AtomicInteger(0);
private static final AtomicBoolean start = new AtomicBoolean(false);
@Override
public void start(CoprocessorEnvironment env) throws IOException {
start.set(true);
}
@Override
public Optional<MasterObserver> getMasterObserver() {
return Optional.of(this);
}
@Override
public void postBalance(final ObserverContext<MasterCoprocessorEnvironment> ctx,
BalanceRequest request, List<RegionPlan> plans) throws IOException {
if (!plans.isEmpty()) {
postBalanceCount.incrementAndGet();
}
}
}
}
| |
//========================================================================
//Copyright 2007-2010 David Yu dyuproject@gmail.com
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package io.protostuff;
import static io.protostuff.XmlIOFactoryUtil.DEFAULT_INPUT_FACTORY;
import static io.protostuff.XmlIOFactoryUtil.DEFAULT_OUTPUT_FACTORY;
import static javax.xml.stream.XMLStreamConstants.END_ELEMENT;
import static javax.xml.stream.XMLStreamConstants.START_ELEMENT;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
/**
* Utility for the XML serialization/deserialization of messages and objects tied to a schema.
*
* @author David Yu
* @created May 24, 2010
*/
public final class XmlIOUtil
{
private XmlIOUtil()
{
}
public static final String XML_ENCODING = "UTF-8", XML_VERSION = "1.0";
/**
* Creates an xml pipe from a byte array.
*/
public static Pipe newPipe(byte[] data) throws IOException
{
return newPipe(data, 0, data.length);
}
/**
* Creates an xml pipe from a byte array.
*/
public static Pipe newPipe(byte[] data, int offset, int length) throws IOException
{
return newPipe(new ByteArrayInputStream(data, offset, length));
}
/**
* Creates an xml pipe from an {@link InputStream}.
*/
public static Pipe newPipe(InputStream in) throws IOException
{
try
{
return newPipe(DEFAULT_INPUT_FACTORY.createXMLStreamReader(in, XML_ENCODING));
}
catch (XMLStreamException e)
{
throw new XmlInputException(e);
}
}
/**
* Creates an xml pipe from a {@link Reader}.
*/
public static Pipe newPipe(Reader reader) throws IOException
{
try
{
return newPipe(DEFAULT_INPUT_FACTORY.createXMLStreamReader(reader));
}
catch (XMLStreamException e)
{
throw new XmlInputException(e);
}
}
/**
* Creates an xml pipe from an {@link XMLStreamReader}.
*/
public static Pipe newPipe(final XMLStreamReader parser)
{
final XmlInput xmlInput = new XmlInput(parser);
return new Pipe()
{
@Override
protected Input begin(Pipe.Schema<?> pipeSchema) throws IOException
{
// final String simpleName = pipeSchema.wrappedSchema.messageName();
try
{
if (parser.nextTag() != START_ELEMENT ||
!pipeSchema.wrappedSchema.messageName().equals(parser.getLocalName()))
{
throw new XmlInputException("Expected token START_ELEMENT: " +
pipeSchema.wrappedSchema.messageName());
}
if (parser.nextTag() == END_ELEMENT)
{
// if(!simpleName.equals(parser.getLocalName()))
// throw new XmlInputException("Expecting token END_ELEMENT: " +
// simpleName);
// empty message;
return null;
}
}
catch (XMLStreamException e)
{
throw new XmlInputException(e);
}
return xmlInput;
}
@Override
protected void end(Pipe.Schema<?> pipeSchema, Input input,
boolean cleanupOnly) throws IOException
{
if (cleanupOnly)
{
try
{
parser.close();
}
catch (XMLStreamException e)
{
// ignore
}
return;
}
assert input == xmlInput;
// final String simpleName = pipeSchema.wrappedSchema.messageName();
// final String localName = parser.getLocalName();
try
{
parser.close();
}
catch (XMLStreamException e)
{
// end of pipe transfer ... ignore
}
/*
* if(!simpleName.equals(localName)) { throw new XmlInputException("Expecting token END_ELEMENT: " +
* simpleName); }
*/
}
};
}
/**
* Merges the {@code message} with the byte array using the given {@code schema}.
*/
public static <T> void mergeFrom(byte[] data, T message, Schema<T> schema)
{
mergeFrom(data, 0, data.length, message, schema, DEFAULT_INPUT_FACTORY);
}
/**
* Merges the {@code message} with the byte array using the given {@code schema}.
*/
public static <T> void mergeFrom(byte[] data, int offset, int len, T message,
Schema<T> schema)
{
mergeFrom(data, offset, len, message, schema, DEFAULT_INPUT_FACTORY);
}
/**
* Merges the {@code message} with the byte array using the given {@code schema}.
*/
public static <T> void mergeFrom(byte[] data, int offset, int len, T message,
Schema<T> schema, XMLInputFactory inFactory)
{
final ByteArrayInputStream in = new ByteArrayInputStream(data, offset, len);
try
{
mergeFrom(in, message, schema, inFactory);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
/**
* Merges the {@code message} from the {@link InputStream} using the given {@code schema}.
*/
public static <T> void mergeFrom(InputStream in, T message, Schema<T> schema)
throws IOException
{
mergeFrom(in, message, schema, DEFAULT_INPUT_FACTORY);
}
/**
* Merges the {@code message} from the {@link InputStream} using the given {@code schema}.
*/
public static <T> void mergeFrom(InputStream in, T message, Schema<T> schema,
XMLInputFactory inFactory) throws IOException
{
XMLStreamReader parser = null;
try
{
parser = inFactory.createXMLStreamReader(in, XML_ENCODING);
mergeFrom(parser, message, schema);
}
catch (XMLStreamException e)
{
throw new XmlInputException(e);
}
finally
{
if (parser != null)
{
try
{
parser.close();
}
catch (XMLStreamException e)
{
// ignore
}
}
}
}
/**
* Merges the {@code message} from the {@link Reader} using the given {@code schema}.
*/
public static <T> void mergeFrom(Reader r, T message, Schema<T> schema)
throws IOException
{
mergeFrom(r, message, schema, DEFAULT_INPUT_FACTORY);
}
/**
* Merges the {@code message} from the {@link Reader} using the given {@code schema}.
*/
public static <T> void mergeFrom(Reader r, T message, Schema<T> schema,
XMLInputFactory inFactory) throws IOException
{
XMLStreamReader parser = null;
try
{
parser = inFactory.createXMLStreamReader(r);
mergeFrom(parser, message, schema);
}
catch (XMLStreamException e)
{
throw new XmlInputException(e);
}
finally
{
if (parser != null)
{
try
{
parser.close();
}
catch (XMLStreamException e)
{
// ignore
}
}
}
}
/**
* Merges the {@code message} from the {@link XMLStreamReader} using the given {@code schema}.
*/
public static <T> void mergeFrom(XMLStreamReader parser, T message, Schema<T> schema)
throws IOException, XMLStreamException, XmlInputException
{
// final String simpleName = schema.messageName();
if (parser.nextTag() != START_ELEMENT ||
!schema.messageName().equals(parser.getLocalName()))
{
throw new XmlInputException("Expected token START_ELEMENT: " + schema.messageName());
}
if (parser.nextTag() == END_ELEMENT)
{
// if(!simpleName.equals(parser.getLocalName()))
// throw new XmlInputException("Expecting token END_ELEMENT: " + simpleName);
// empty message;
return;
}
schema.mergeFrom(new XmlInput(parser), message);
// if(!simpleName.equals(parser.getLocalName()))
// throw new XmlInputException("Expecting token END_ELEMENT: " + simpleName);
}
/**
* Serializes the {@code message} into a byte array.
*/
public static <T> byte[] toByteArray(T message, Schema<T> schema)
{
return toByteArray(message, schema, DEFAULT_OUTPUT_FACTORY);
}
/**
* Serializes the {@code message} into a byte array.
*/
public static <T> byte[] toByteArray(T message, Schema<T> schema,
XMLOutputFactory outFactory)
{
final ByteArrayOutputStream out = new ByteArrayOutputStream();
try
{
writeTo(out, message, schema, outFactory);
}
catch (IOException e)
{
throw new RuntimeException("Serializing to a byte array threw an IOException " +
"(should never happen).", e);
}
return out.toByteArray();
}
/**
* Serializes the {@code message} into an {@link OutputStream} using the given {@code schema}.
*/
public static <T> void writeTo(OutputStream out, T message, Schema<T> schema)
throws IOException
{
writeTo(out, message, schema, DEFAULT_OUTPUT_FACTORY);
}
/**
* Serializes the {@code message} into an {@link OutputStream} using the given {@code schema}.
*/
public static <T> void writeTo(OutputStream out, T message, Schema<T> schema,
XMLOutputFactory outFactory) throws IOException
{
XMLStreamWriter writer = null;
try
{
writer = outFactory.createXMLStreamWriter(out, XML_ENCODING);
writer.writeStartDocument(XML_ENCODING, XML_VERSION);
writeTo(writer, message, schema);
writer.writeEndDocument();
writer.flush();
}
catch (XMLStreamException e)
{
throw new XmlOutputException(e);
}
finally
{
if (writer != null)
{
try
{
writer.close();
}
catch (XMLStreamException e)
{
// ignore
}
}
}
}
/**
* Serializes the {@code message} into a {@link Writer} using the given {@code schema}.
*/
public static <T> void writeTo(Writer w, T message, Schema<T> schema)
throws IOException
{
writeTo(w, message, schema, DEFAULT_OUTPUT_FACTORY);
}
/**
* Serializes the {@code message} into a {@link Writer} using the given {@code schema}.
*/
public static <T> void writeTo(Writer w, T message, Schema<T> schema,
XMLOutputFactory outFactory) throws IOException
{
XMLStreamWriter writer = null;
try
{
writer = outFactory.createXMLStreamWriter(w);
writer.writeStartDocument(XML_ENCODING, XML_VERSION);
writeTo(writer, message, schema);
writer.writeEndDocument();
writer.flush();
}
catch (XMLStreamException e)
{
throw new XmlOutputException(e);
}
finally
{
if (writer != null)
{
try
{
writer.close();
}
catch (XMLStreamException e)
{
// ignore
}
}
}
}
/**
* Serializes the {@code message} into an {@link XMLStreamWriter} using the given {@code schema}.
*/
public static <T> void writeTo(XMLStreamWriter writer, T message, Schema<T> schema)
throws IOException, XMLStreamException, XmlOutputException
{
writer.writeStartElement(schema.messageName());
schema.writeTo(new XmlOutput(writer, schema), message);
writer.writeEndElement();
}
/**
* Serializes the {@code messages} into the {@link OutputStream} using the given schema.
*/
public static <T> void writeListTo(OutputStream out, List<T> messages, Schema<T> schema)
throws IOException
{
writeListTo(out, messages, schema, DEFAULT_OUTPUT_FACTORY);
}
/**
* Serializes the {@code messages} into the {@link OutputStream} using the given schema.
*/
public static <T> void writeListTo(OutputStream out, List<T> messages, Schema<T> schema,
XMLOutputFactory outFactory) throws IOException
{
XMLStreamWriter writer = null;
try
{
writer = outFactory.createXMLStreamWriter(out, XML_ENCODING);
writer.writeStartDocument(XML_ENCODING, XML_VERSION);
writeListTo(writer, messages, schema);
writer.writeEndDocument();
writer.flush();
}
catch (XMLStreamException e)
{
throw new XmlOutputException(e);
}
finally
{
if (writer != null)
{
try
{
writer.close();
}
catch (XMLStreamException e)
{
// ignore
}
}
}
}
/**
* Serializes the {@code messages} into the {@link XMLStreamWriter} using the given schema.
*/
public static <T> void writeListTo(XMLStreamWriter writer, List<T> messages, Schema<T> schema)
throws IOException, XMLStreamException
{
writer.writeStartElement("list");
if (messages.isEmpty())
{
writer.writeEndElement();
return;
}
final String simpleName = schema.messageName();
final XmlOutput output = new XmlOutput(writer, schema);
for (T m : messages)
{
writer.writeStartElement(simpleName);
schema.writeTo(output, m);
writer.writeEndElement();
}
writer.writeEndElement();
}
/**
* Parses the {@code messages} from the {@link InputStream} using the given {@code schema}.
*/
public static <T> List<T> parseListFrom(InputStream in, Schema<T> schema)
throws IOException
{
return parseListFrom(in, schema, DEFAULT_INPUT_FACTORY);
}
/**
* Parses the {@code messages} from the {@link InputStream} using the given {@code schema}.
*/
public static <T> List<T> parseListFrom(InputStream in, Schema<T> schema,
XMLInputFactory inFactory) throws IOException
{
XMLStreamReader parser = null;
try
{
parser = inFactory.createXMLStreamReader(in);
return parseListFrom(parser, schema);
}
catch (XMLStreamException e)
{
throw new XmlInputException(e);
}
finally
{
if (parser != null)
{
try
{
parser.close();
}
catch (XMLStreamException e)
{
// ignore
}
}
}
}
/**
* Parses the {@code messages} from the {@link XMLStreamReader} using the given {@code schema}.
*/
public static <T> List<T> parseListFrom(XMLStreamReader parser, Schema<T> schema)
throws IOException, XMLStreamException
{
if (parser.nextTag() != START_ELEMENT || !"list".equals(parser.getLocalName()))
throw new XmlInputException("Expected token START_ELEMENT: list");
// final String simpleName = schema.messageName();
final ArrayList<T> list = new ArrayList<T>();
final XmlInput input = new XmlInput(parser);
for (int tag = parser.nextTag(); tag != END_ELEMENT; tag = parser.nextTag())
{
if (tag != START_ELEMENT || !schema.messageName().equals(parser.getLocalName()))
throw new XmlInputException("Expected token START_ELEMENT: " + schema.messageName());
final T message = schema.newMessage();
if (parser.nextTag() == END_ELEMENT)
{
// if(!simpleName.equals(parser.getLocalName()))
// throw new XmlInputException("Expecting token END_ELEMENT: " + simpleName);
// empty message
list.add(message);
continue;
}
schema.mergeFrom(input, message);
// if(!simpleName.equals(parser.getLocalName()))
// throw new XmlInputException("Expecting token END_ELEMENT: " + simpleName);
list.add(message);
}
return list;
}
}
| |
/*
* Copyright (c) 1996, 2021, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.pkcs;
import java.io.*;
import java.security.Key;
import java.security.KeyRep;
import java.security.PrivateKey;
import java.security.KeyFactory;
import java.security.MessageDigest;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.Arrays;
import jdk.internal.access.SharedSecrets;
import sun.security.x509.*;
import sun.security.util.*;
/**
* Holds a PKCS#8 key, for example a private key
*
* According to https://tools.ietf.org/html/rfc5958:
*
* OneAsymmetricKey ::= SEQUENCE {
* version Version,
* privateKeyAlgorithm PrivateKeyAlgorithmIdentifier,
* privateKey PrivateKey,
* attributes [0] Attributes OPTIONAL,
* ...,
* [[2: publicKey [1] PublicKey OPTIONAL ]],
* ...
* }
*
* We support this format but do not parse attributes and publicKey now.
*/
public class PKCS8Key implements PrivateKey {
/** use serialVersionUID from JDK 1.1. for interoperability */
@java.io.Serial
private static final long serialVersionUID = -3836890099307167124L;
/* The algorithm information (name, parameters, etc). */
protected AlgorithmId algid;
/* The key bytes, without the algorithm information */
protected byte[] key;
/* The encoded for the key. Created on demand by encode(). */
protected byte[] encodedKey;
/* The version for this key */
private static final int V1 = 0;
private static final int V2 = 1;
/**
* Default constructor. Constructors in sub-classes that create a new key
* from its components require this. These constructors must initialize
* {@link #algid} and {@link #key}.
*/
protected PKCS8Key() { }
/**
* Another constructor. Constructors in sub-classes that create a new key
* from an encoded byte array require this. We do not assign this
* encoding to {@link #encodedKey} directly.
*
* This method is also used by {@link #parseKey} to create a raw key.
*/
protected PKCS8Key(byte[] input) throws InvalidKeyException {
decode(new ByteArrayInputStream(input));
}
private void decode(InputStream is) throws InvalidKeyException {
DerValue val = null;
try {
val = new DerValue(is);
if (val.tag != DerValue.tag_Sequence) {
throw new InvalidKeyException("invalid key format");
}
int version = val.data.getInteger();
if (version != V1 && version != V2) {
throw new InvalidKeyException("unknown version: " + version);
}
algid = AlgorithmId.parse (val.data.getDerValue ());
key = val.data.getOctetString();
DerValue next;
if (val.data.available() == 0) {
return;
}
next = val.data.getDerValue();
if (next.isContextSpecific((byte)0)) {
if (val.data.available() == 0) {
return;
}
next = val.data.getDerValue();
}
if (next.isContextSpecific((byte)1)) {
if (version == V1) {
throw new InvalidKeyException("publicKey seen in v1");
}
if (val.data.available() == 0) {
return;
}
}
throw new InvalidKeyException("Extra bytes");
} catch (IOException e) {
throw new InvalidKeyException("IOException : " + e.getMessage());
} finally {
if (val != null) {
val.clear();
}
}
}
/**
* Construct PKCS#8 subject public key from a DER encoding. If a
* security provider supports the key algorithm with a specific class,
* a PrivateKey from the provider is returned. Otherwise, a raw
* PKCS8Key object is returned.
*
* <P>This mechanism guarantees that keys (and algorithms) may be
* freely manipulated and transferred, without risk of losing
* information. Also, when a key (or algorithm) needs some special
* handling, that specific need can be accommodated.
*
* @param encoded the DER-encoded SubjectPublicKeyInfo value
* @exception IOException on data format errors
*/
public static PrivateKey parseKey(byte[] encoded) throws IOException {
try {
PKCS8Key rawKey = new PKCS8Key(encoded);
byte[] internal = rawKey.getEncodedInternal();
PKCS8EncodedKeySpec pkcs8KeySpec = new PKCS8EncodedKeySpec(internal);
PrivateKey result = null;
try {
result = KeyFactory.getInstance(rawKey.algid.getName())
.generatePrivate(pkcs8KeySpec);
} catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
// Ignore and return raw key
result = rawKey;
} finally {
if (result != rawKey) {
rawKey.clear();
}
SharedSecrets.getJavaSecuritySpecAccess()
.clearEncodedKeySpec(pkcs8KeySpec);
}
return result;
} catch (InvalidKeyException e) {
throw new IOException("corrupt private key", e);
}
}
/**
* Returns the algorithm to be used with this key.
*/
public String getAlgorithm() {
return algid.getName();
}
/**
* Returns the algorithm ID to be used with this key.
*/
public AlgorithmId getAlgorithmId () {
return algid;
}
/**
* Returns the DER-encoded form of the key as a byte array,
* or {@code null} if an encoding error occurs.
*/
public byte[] getEncoded() {
byte[] b = getEncodedInternal();
return (b == null) ? null : b.clone();
}
/**
* Returns the format for this key: "PKCS#8"
*/
public String getFormat() {
return "PKCS#8";
}
/**
* DER-encodes this key as a byte array stored inside this object
* and return it.
*
* @return the encoding, or null if there is an I/O error.
*/
private synchronized byte[] getEncodedInternal() {
if (encodedKey == null) {
try {
DerOutputStream tmp = new DerOutputStream();
tmp.putInteger(V1);
algid.encode(tmp);
tmp.putOctetString(key);
DerValue out = DerValue.wrap(DerValue.tag_Sequence, tmp);
encodedKey = out.toByteArray();
out.clear();
} catch (IOException e) {
// encodedKey is still null
}
}
return encodedKey;
}
@java.io.Serial
protected Object writeReplace() throws java.io.ObjectStreamException {
return new KeyRep(KeyRep.Type.PRIVATE,
getAlgorithm(),
getFormat(),
getEncodedInternal());
}
/**
* We used to serialize a PKCS8Key as itself (instead of a KeyRep).
*/
@java.io.Serial
private void readObject(ObjectInputStream stream) throws IOException {
try {
decode(stream);
} catch (InvalidKeyException e) {
throw new IOException("deserialized key is invalid: " +
e.getMessage());
}
}
/**
* Compares two private keys. This returns false if the object with which
* to compare is not of type <code>Key</code>.
* Otherwise, the encoding of this key object is compared with the
* encoding of the given key object.
*
* @param object the object with which to compare
* @return {@code true} if this key has the same encoding as the
* object argument; {@code false} otherwise.
*/
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object instanceof PKCS8Key) {
// time-constant comparison
return MessageDigest.isEqual(
getEncodedInternal(),
((PKCS8Key)object).getEncodedInternal());
} else if (object instanceof Key) {
// time-constant comparison
byte[] otherEncoded = ((Key)object).getEncoded();
try {
return MessageDigest.isEqual(
getEncodedInternal(),
otherEncoded);
} finally {
if (otherEncoded != null) {
Arrays.fill(otherEncoded, (byte) 0);
}
}
}
return false;
}
/**
* Calculates a hash code value for this object. Objects
* which are equal will also have the same hashcode.
*/
public int hashCode() {
return Arrays.hashCode(getEncodedInternal());
}
public void clear() {
if (encodedKey != null) {
Arrays.fill(encodedKey, (byte)0);
}
Arrays.fill(key, (byte)0);
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.javaeditor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.ResourceBundle;
import org.eclipse.swt.SWTError;
import org.eclipse.swt.custom.BusyIndicator;
import org.eclipse.swt.dnd.ByteArrayTransfer;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.DND;
import org.eclipse.swt.dnd.RTFTransfer;
import org.eclipse.swt.dnd.TextTransfer;
import org.eclipse.swt.dnd.Transfer;
import org.eclipse.swt.dnd.TransferData;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.core.runtime.Assert;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.text.IRewriteTarget;
import org.eclipse.jface.text.ITextOperationTarget;
import org.eclipse.jface.text.ITextSelection;
import org.eclipse.jface.text.Region;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IWorkbenchCommandConstants;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchPartSite;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.progress.IProgressService;
import org.eclipse.ui.progress.IWorkbenchSiteProgressService;
import org.eclipse.ui.texteditor.IAbstractTextEditorHelpContextIds;
import org.eclipse.ui.texteditor.ITextEditor;
import org.eclipse.ui.texteditor.ITextEditorExtension3;
import org.eclipse.ui.texteditor.TextEditorAction;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.ITypeRoot;
import org.eclipse.jdt.core.Signature;
import org.eclipse.jdt.core.dom.ASTNode;
import org.eclipse.jdt.core.dom.CompilationUnit;
import org.eclipse.jdt.core.dom.IBinding;
import org.eclipse.jdt.core.dom.ITypeBinding;
import org.eclipse.jdt.core.dom.ImportDeclaration;
import org.eclipse.jdt.core.dom.Name;
import org.eclipse.jdt.core.dom.SimpleName;
import org.eclipse.jdt.core.dom.rewrite.ImportRewrite;
import org.eclipse.jdt.internal.corext.codemanipulation.ImportReferencesCollector;
import org.eclipse.jdt.internal.corext.codemanipulation.StubUtility;
import org.eclipse.jdt.internal.corext.dom.Bindings;
import org.eclipse.jdt.internal.corext.util.JavaModelUtil;
import org.eclipse.jdt.ui.JavaUI;
import org.eclipse.jdt.ui.PreferenceConstants;
import org.eclipse.jdt.ui.SharedASTProvider;
import org.eclipse.jdt.internal.ui.IJavaStatusConstants;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.JavaUIMessages;
/**
* Action for cut/copy and paste with support for adding imports on paste.
*/
public final class ClipboardOperationAction extends TextEditorAction {
public static class ClipboardData {
private String fOriginHandle;
private String[] fTypeImports;
private String[] fStaticImports;
public ClipboardData(IJavaElement origin, String[] typeImports, String[] staticImports) {
Assert.isNotNull(origin);
Assert.isNotNull(typeImports);
Assert.isNotNull(staticImports);
fTypeImports= typeImports;
fStaticImports= staticImports;
fOriginHandle= origin.getHandleIdentifier();
}
public ClipboardData(byte[] bytes) throws IOException {
DataInputStream dataIn = new DataInputStream(new ByteArrayInputStream(bytes));
try {
fOriginHandle= dataIn.readUTF();
fTypeImports= readArray(dataIn);
fStaticImports= readArray(dataIn);
} finally {
dataIn.close();
}
}
private static String[] readArray(DataInputStream dataIn) throws IOException {
int count= dataIn.readInt();
String[] array= new String[count];
for (int i = 0; i < count; i++) {
array[i]= dataIn.readUTF();
}
return array;
}
private static void writeArray(DataOutputStream dataOut, String[] array) throws IOException {
dataOut.writeInt(array.length);
for (int i = 0; i < array.length; i++) {
dataOut.writeUTF(array[i]);
}
}
public String[] getTypeImports() {
return fTypeImports;
}
public String[] getStaticImports() {
return fStaticImports;
}
public boolean isFromSame(IJavaElement elem) {
return fOriginHandle.equals(elem.getHandleIdentifier());
}
public byte[] serialize() throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
DataOutputStream dataOut = new DataOutputStream(out);
try {
dataOut.writeUTF(fOriginHandle);
writeArray(dataOut, fTypeImports);
writeArray(dataOut, fStaticImports);
} finally {
dataOut.close();
out.close();
}
return out.toByteArray();
}
}
private static class ClipboardTransfer extends ByteArrayTransfer {
private static final String TYPE_NAME = "source-with-imports-transfer-format" + System.currentTimeMillis(); //$NON-NLS-1$
private static final int TYPEID = registerType(TYPE_NAME);
/* (non-Javadoc)
* @see org.eclipse.swt.dnd.Transfer#getTypeIds()
*/
@Override
protected int[] getTypeIds() {
return new int[] { TYPEID };
}
/* (non-Javadoc)
* @see org.eclipse.swt.dnd.Transfer#getTypeNames()
*/
@Override
protected String[] getTypeNames() {
return new String[] { TYPE_NAME };
}
/* (non-Javadoc)
* @see org.eclipse.swt.dnd.Transfer#javaToNative(java.lang.Object, org.eclipse.swt.dnd.TransferData)
*/
@Override
protected void javaToNative(Object data, TransferData transferData) {
if (data instanceof ClipboardData) {
try {
super.javaToNative(((ClipboardData) data).serialize(), transferData);
} catch (IOException e) {
//it's best to send nothing if there were problems
}
}
}
/* (non-Javadoc)
* Method declared on Transfer.
*/
@Override
protected Object nativeToJava(TransferData transferData) {
byte[] bytes = (byte[]) super.nativeToJava(transferData);
if (bytes != null) {
try {
return new ClipboardData(bytes);
} catch (IOException e) {
}
}
return null;
}
}
private static final ClipboardTransfer fgTransferInstance = new ClipboardTransfer();
/** The text operation code */
private int fOperationCode= -1;
/** The text operation target */
private ITextOperationTarget fOperationTarget;
/**
* Creates the action.
* @param bundle the resource bundle
* @param prefix a prefix to be prepended to the various resource keys
* (described in <code>ResourceAction</code> constructor), or
* <code>null</code> if none
* @param editor the text editor
* @param operationCode the operation code
*/
public ClipboardOperationAction(ResourceBundle bundle, String prefix, ITextEditor editor, int operationCode) {
super(bundle, prefix, editor);
fOperationCode= operationCode;
if (operationCode == ITextOperationTarget.CUT) {
setHelpContextId(IAbstractTextEditorHelpContextIds.CUT_ACTION);
setActionDefinitionId(IWorkbenchCommandConstants.EDIT_CUT);
} else if (operationCode == ITextOperationTarget.COPY) {
setHelpContextId(IAbstractTextEditorHelpContextIds.COPY_ACTION);
setActionDefinitionId(IWorkbenchCommandConstants.EDIT_COPY);
} else if (operationCode == ITextOperationTarget.PASTE) {
setHelpContextId(IAbstractTextEditorHelpContextIds.PASTE_ACTION);
setActionDefinitionId(IWorkbenchCommandConstants.EDIT_PASTE);
} else {
Assert.isTrue(false, "Invalid operation code"); //$NON-NLS-1$
}
update();
}
private boolean isReadOnlyOperation() {
return fOperationCode == ITextOperationTarget.COPY;
}
/* (non-Javadoc)
* @see org.eclipse.jface.action.IAction#run()
*/
@Override
public void run() {
if (fOperationCode == -1 || fOperationTarget == null)
return;
ITextEditor editor= getTextEditor();
if (editor == null)
return;
if (!isReadOnlyOperation() && !validateEditorInputState())
return;
BusyIndicator.showWhile(getDisplay(), new Runnable() {
public void run() {
internalDoOperation();
}
});
}
private Shell getShell() {
ITextEditor editor= getTextEditor();
if (editor != null) {
IWorkbenchPartSite site= editor.getSite();
Shell shell= site.getShell();
if (shell != null && !shell.isDisposed()) {
return shell;
}
}
return null;
}
private Display getDisplay() {
Shell shell= getShell();
if (shell != null) {
return shell.getDisplay();
}
return null;
}
/**
* Returns whether the Smart Insert Mode is selected.
*
* @return <code>true</code> if the Smart Insert Mode is selected
* @since 3.7
*/
private boolean isSmartInsertMode() {
IWorkbenchPage page= JavaPlugin.getActivePage();
if (page != null) {
IEditorPart part= page.getActiveEditor();
if (part instanceof ITextEditorExtension3) {
ITextEditorExtension3 extension= (ITextEditorExtension3)part;
return extension.getInsertMode() == ITextEditorExtension3.SMART_INSERT;
} else if (part != null && EditorUtility.isCompareEditorInput(part.getEditorInput())) {
ITextEditorExtension3 extension= (ITextEditorExtension3)part.getAdapter(ITextEditorExtension3.class);
if (extension != null)
return extension.getInsertMode() == ITextEditorExtension3.SMART_INSERT;
}
}
return false;
}
protected final void internalDoOperation() {
if (PreferenceConstants.getPreferenceStore().getBoolean(PreferenceConstants.EDITOR_IMPORTS_ON_PASTE) && isSmartInsertMode()) {
if (fOperationCode == ITextOperationTarget.PASTE) {
doPasteWithImportsOperation();
} else {
doCutCopyWithImportsOperation();
}
} else {
fOperationTarget.doOperation(fOperationCode);
}
}
/* (non-Javadoc)
* @see org.eclipse.ui.texteditor.IUpdate#update()
*/
@Override
public void update() {
super.update();
if (!isReadOnlyOperation() && !canModifyEditor()) {
setEnabled(false);
return;
}
ITextEditor editor= getTextEditor();
if (fOperationTarget == null && editor!= null && fOperationCode != -1)
fOperationTarget= (ITextOperationTarget) editor.getAdapter(ITextOperationTarget.class);
boolean isEnabled= (fOperationTarget != null && fOperationTarget.canDoOperation(fOperationCode));
setEnabled(isEnabled);
}
/* (non-Javadoc)
* @see org.eclipse.ui.texteditor.TextEditorAction#setEditor(org.eclipse.ui.texteditor.ITextEditor)
*/
@Override
public void setEditor(ITextEditor editor) {
super.setEditor(editor);
fOperationTarget= null;
}
private void doCutCopyWithImportsOperation() {
ITextEditor editor= getTextEditor();
ITypeRoot inputElement= JavaUI.getEditorInputTypeRoot(editor.getEditorInput());
ISelection selection= editor.getSelectionProvider().getSelection();
Object clipboardData= null;
if (inputElement != null && selection instanceof ITextSelection && !selection.isEmpty()) {
ITextSelection textSelection= (ITextSelection) selection;
if (isNonTrivialSelection(textSelection)) {
clipboardData= getClipboardData(inputElement, textSelection.getOffset(), textSelection.getLength());
}
}
fOperationTarget.doOperation(fOperationCode);
if (clipboardData != null) {
/*
* We currently make assumptions about what the styled text widget sets,
* see https://bugs.eclipse.org/bugs/show_bug.cgi?id=61876
*/
Clipboard clipboard= new Clipboard(getDisplay());
try {
Object textData= clipboard.getContents(TextTransfer.getInstance());
/*
* Don't add if we didn't get any text data from the clipboard, see:
* - https://bugs.eclipse.org/bugs/show_bug.cgi?id=70077
* - https://bugs.eclipse.org/bugs/show_bug.cgi?id=200743
*/
if (textData == null)
return;
ArrayList<Object> datas= new ArrayList<Object>(3);
ArrayList<ByteArrayTransfer> transfers= new ArrayList<ByteArrayTransfer>(3);
datas.add(textData);
transfers.add(TextTransfer.getInstance());
Object rtfData= clipboard.getContents(RTFTransfer.getInstance());
if (rtfData != null) {
datas.add(rtfData);
transfers.add(RTFTransfer.getInstance());
}
datas.add(clipboardData);
transfers.add(fgTransferInstance);
Transfer[] dataTypes= transfers.toArray(new Transfer[transfers.size()]);
Object[] data= datas.toArray();
setClipboardContents(clipboard, data, dataTypes);
} finally {
clipboard.dispose();
}
}
}
private void setClipboardContents(Clipboard clipboard, Object[] datas, Transfer[] transfers) {
try {
clipboard.setContents(datas, transfers);
} catch (SWTError e) {
if (e.code != DND.ERROR_CANNOT_SET_CLIPBOARD) {
throw e;
}
// silently fail. see e.g. https://bugs.eclipse.org/bugs/show_bug.cgi?id=65975
}
}
private boolean isNonTrivialSelection(ITextSelection selection) {
if (selection.getLength() < 30) {
String text= selection.getText();
if (text != null) {
for (int i= 0; i < text.length(); i++) {
if (!Character.isJavaIdentifierPart(text.charAt(i))) {
return true;
}
}
}
return false;
}
return true;
}
private ClipboardData getClipboardData(ITypeRoot inputElement, int offset, int length) {
CompilationUnit astRoot= SharedASTProvider.getAST(inputElement, SharedASTProvider.WAIT_ACTIVE_ONLY, null);
if (astRoot == null) {
return null;
}
// do process import if selection spans over import declaration or package
List<ImportDeclaration> list= astRoot.imports();
if (!list.isEmpty()) {
if (offset < ((ASTNode) list.get(list.size() - 1)).getStartPosition()) {
return null;
}
} else if (astRoot.getPackage() != null) {
if (offset < ((ASTNode) astRoot.getPackage()).getStartPosition()) {
return null;
}
}
ArrayList<SimpleName> typeImportsRefs= new ArrayList<SimpleName>();
ArrayList<SimpleName> staticImportsRefs= new ArrayList<SimpleName>();
ImportReferencesCollector.collect(astRoot, inputElement.getJavaProject(), new Region(offset, length), typeImportsRefs, staticImportsRefs);
if (typeImportsRefs.isEmpty() && staticImportsRefs.isEmpty()) {
return null;
}
HashSet<String> namesToImport= new HashSet<String>(typeImportsRefs.size());
for (int i= 0; i < typeImportsRefs.size(); i++) {
Name curr= typeImportsRefs.get(i);
IBinding binding= curr.resolveBinding();
if (binding != null && binding.getKind() == IBinding.TYPE) {
ITypeBinding typeBinding= (ITypeBinding) binding;
if (typeBinding.isArray()) {
typeBinding= typeBinding.getElementType();
}
if (typeBinding.isTypeVariable() || typeBinding.isCapture() || typeBinding.isWildcardType()) { // can be removed when bug 98473 is fixed
continue;
}
if (typeBinding.isMember() || typeBinding.isTopLevel()) {
String name= Bindings.getRawQualifiedName(typeBinding);
if (name.length() > 0) {
namesToImport.add(name);
}
}
}
}
HashSet<String> staticsToImport= new HashSet<String>(staticImportsRefs.size());
for (int i= 0; i < staticImportsRefs.size(); i++) {
Name curr= staticImportsRefs.get(i);
IBinding binding= curr.resolveBinding();
if (binding != null) {
StringBuffer buf= new StringBuffer(Bindings.getImportName(binding));
if (binding.getKind() == IBinding.METHOD) {
buf.append("()"); //$NON-NLS-1$
}
staticsToImport.add(buf.toString());
}
}
if (namesToImport.isEmpty() && staticsToImport.isEmpty()) {
return null;
}
String[] typeImports= namesToImport.toArray(new String[namesToImport.size()]);
String[] staticImports= staticsToImport.toArray(new String[staticsToImport.size()]);
return new ClipboardData(inputElement, typeImports, staticImports);
}
private void doPasteWithImportsOperation() {
ITextEditor editor= getTextEditor();
IJavaElement inputElement= JavaUI.getEditorInputTypeRoot(editor.getEditorInput());
Clipboard clipboard= new Clipboard(getDisplay());
try {
ClipboardData importsData= (ClipboardData)clipboard.getContents(fgTransferInstance);
if (importsData != null && inputElement instanceof ICompilationUnit && !importsData.isFromSame(inputElement)) {
// combine operation and adding of imports
IRewriteTarget target= (IRewriteTarget)editor.getAdapter(IRewriteTarget.class);
if (target != null) {
target.beginCompoundChange();
}
try {
fOperationTarget.doOperation(fOperationCode);
addImports((ICompilationUnit)inputElement, importsData);
} catch (CoreException e) {
JavaPlugin.log(e);
} finally {
if (target != null) {
target.endCompoundChange();
}
}
} else {
fOperationTarget.doOperation(fOperationCode);
}
} finally {
clipboard.dispose();
}
}
private void addImports(final ICompilationUnit unit, ClipboardData data) throws CoreException {
final ImportRewrite rewrite= StubUtility.createImportRewrite(unit, true);
String[] imports= data.getTypeImports();
for (int i= 0; i < imports.length; i++) {
rewrite.addImport(imports[i]);
}
String[] staticImports= data.getStaticImports();
for (int i= 0; i < staticImports.length; i++) {
String name= Signature.getSimpleName(staticImports[i]);
boolean isField= !name.endsWith("()"); //$NON-NLS-1$
if (!isField) {
name= name.substring(0, name.length() - 2);
}
String qualifier= Signature.getQualifier(staticImports[i]);
rewrite.addStaticImport(qualifier, name, isField);
}
try {
getProgressService().busyCursorWhile(new IRunnableWithProgress() {
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
JavaModelUtil.applyEdit(unit, rewrite.rewriteImports(monitor), false, null);
} catch (CoreException e) {
throw new InvocationTargetException(e);
}
}
});
} catch (InvocationTargetException e) {
Throwable cause= e.getCause();
if (cause instanceof CoreException)
throw (CoreException) cause;
throw new CoreException(new Status(IStatus.ERROR, JavaUI.ID_PLUGIN, IJavaStatusConstants.INTERNAL_ERROR, JavaUIMessages.JavaPlugin_internal_error, cause));
} catch (InterruptedException e) {
// Canceled by the user
}
}
private IProgressService getProgressService() {
IEditorPart editor= getTextEditor();
if (editor != null) {
IWorkbenchPartSite site= editor.getSite();
if (site != null)
return (IWorkbenchSiteProgressService) editor.getSite().getAdapter(IWorkbenchSiteProgressService.class);
}
return PlatformUI.getWorkbench().getProgressService();
}
}
| |
/*
* Copyright 2017-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.registry.service;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.function.BiFunction;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.dataflow.audit.service.AuditRecordService;
import org.springframework.cloud.dataflow.audit.service.AuditServiceUtils;
import org.springframework.cloud.dataflow.core.AppRegistration;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.dataflow.core.AuditActionType;
import org.springframework.cloud.dataflow.core.AuditOperationType;
import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository;
import org.springframework.cloud.dataflow.registry.support.AppResourceCommon;
import org.springframework.cloud.dataflow.registry.support.NoSuchAppRegistrationException;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
/**
* Convenience wrapper for the {@link AppRegistryService} that operates on higher level
* {@link DefaultAppRegistryService} objects and supports on-demand loading of
* {@link Resource}s.
* <p>
* Stores AppRegistration with up to two keys:
* </p>
* <ul>
* <li>{@literal <type>.<name>}: URI for the actual app</li>
* <li>{@literal <type>.<name>.metadata}: Optional URI for the app metadata</li>
* </ul>
*
* @author Mark Fisher
* @author Gunnar Hillert
* @author Thomas Risberg
* @author Eric Bottard
* @author Ilayaperumal Gopinathan
* @author Oleg Zhurakousky
* @author Christian Tzolov
* @author Chris Schaefer
*/
@Transactional
public class DefaultAppRegistryService implements AppRegistryService {
public static final String METADATA_KEY_SUFFIX = "metadata";
protected static final Logger logger = LoggerFactory.getLogger(DefaultAppRegistryService.class);
private final AppRegistrationRepository appRegistrationRepository;
private AppResourceCommon appResourceCommon;
protected final AuditRecordService auditRecordService;
protected final AuditServiceUtils auditServiceUtils;
public DefaultAppRegistryService(AppRegistrationRepository appRegistrationRepository,
AppResourceCommon appResourceCommon, AuditRecordService auditRecordService) {
Assert.notNull(appResourceCommon, "'appResourceCommon' must not be null");
Assert.notNull(appRegistrationRepository, "'appRegistrationRepository' must not be null");
Assert.notNull(auditRecordService, "'auditRecordService' must not be null");
this.appResourceCommon = appResourceCommon;
this.appRegistrationRepository = appRegistrationRepository;
this.auditRecordService = auditRecordService;
this.auditServiceUtils = new AuditServiceUtils();
}
@Override
public AppRegistration find(String name, ApplicationType type) {
return this.getDefaultApp(name, type);
}
@Override
public AppRegistration find(String name, ApplicationType type, String version) {
return this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion(name, type, version);
}
@Override
public AppRegistration getDefaultApp(String name, ApplicationType type) {
return this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndDefaultVersionIsTrue(name, type);
}
@Override
public void validate(AppRegistration registration, String uri, String version) {
if (registration != null && StringUtils.hasText(version)) {
String defaultAppUri = registration.getUri().toString();
String defaultAppUriNoVersion = removeLastMatch(defaultAppUri, registration.getVersion());
String newAppUriNoVersion = removeLastMatch(uri, version);
if (!ObjectUtils.nullSafeEquals(defaultAppUriNoVersion, newAppUriNoVersion)) {
throw new IllegalArgumentException("Existing default application [" + defaultAppUri
+ "] can only differ by a version but is [" + uri + "]");
}
}
}
private static String removeLastMatch(String original, String match) {
StringBuilder builder = new StringBuilder();
int start = original.lastIndexOf(match);
builder.append(original.substring(0, start));
builder.append(original.substring(start + match.length()));
return builder.toString();
}
@Override
public void setDefaultApp(String name, ApplicationType type, String version) {
AppRegistration oldDefault = this.appRegistrationRepository
.findAppRegistrationByNameAndTypeAndDefaultVersionIsTrue(name, type);
if (oldDefault != null) {
oldDefault.setDefaultVersion(false);
this.appRegistrationRepository.save(oldDefault);
}
AppRegistration newDefault = this.appRegistrationRepository
.findAppRegistrationByNameAndTypeAndVersion(name, type, version);
if (newDefault == null) {
throw new NoSuchAppRegistrationException(name, type, version);
}
newDefault.setDefaultVersion(true);
this.appRegistrationRepository.save(newDefault);
this.auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.APP_REGISTRATION,
AuditActionType.UPDATE, newDefault.getName(),
this.auditServiceUtils.convertAppRegistrationToAuditData(newDefault), null);
}
@Override
public List<AppRegistration> findAll() {
return this.appRegistrationRepository.findAll();
}
@Override
public Page<AppRegistration> findAllByTypeAndNameIsLike(ApplicationType type, String name, Pageable pageable) {
Page<AppRegistration> result = null;
if (!StringUtils.hasText(name) && type == null) {
result = this.appRegistrationRepository.findAll(pageable);
}
else if (StringUtils.hasText(name) && type == null) {
result = this.appRegistrationRepository.findAllByNameContainingIgnoreCase(name, pageable);
}
else if (StringUtils.hasText(name)) {
result = this.appRegistrationRepository.findAllByTypeAndNameContainingIgnoreCase(type, name, pageable);
}
else {
result = this.appRegistrationRepository.findAllByType(type, pageable);
}
return result;
}
@Override
public Page<AppRegistration> findAllByTypeAndNameIsLikeAndDefaultVersionIsTrue(ApplicationType type, String name,
Pageable pageable) {
Page<AppRegistration> result = null;
if (!StringUtils.hasText(name) && type == null) {
result = this.appRegistrationRepository.findAllByDefaultVersionIsTrue(pageable);
}
else if (StringUtils.hasText(name) && type == null) {
result = this.appRegistrationRepository.findAllByNameContainingIgnoreCaseAndDefaultVersionIsTrue(name,
pageable);
}
else if (StringUtils.hasText(name)) {
result = this.appRegistrationRepository
.findAllByTypeAndNameContainingIgnoreCaseAndDefaultVersionIsTrue(type, name, pageable);
}
else {
result = this.appRegistrationRepository.findAllByTypeAndDefaultVersionIsTrue(type, pageable);
}
for (AppRegistration pagedAppRegistration : result.getContent()) {
for (AppRegistration appRegistration : this.findAll()) {
if (pagedAppRegistration.getName().equals(appRegistration.getName()) &&
pagedAppRegistration.getType().equals(appRegistration.getType())) {
if (pagedAppRegistration.getVersions() == null) {
HashSet<String> versions = new HashSet<>();
versions.add(appRegistration.getVersion());
pagedAppRegistration.setVersions(versions);
}
else {
pagedAppRegistration.getVersions().add(appRegistration.getVersion());
}
}
}
}
return result;
}
@Override
public Page<AppRegistration> findAll(Pageable pageable) {
return this.appRegistrationRepository.findAll(pageable);
}
@Override
public AppRegistration save(String name, ApplicationType type, String version, URI uri, URI metadataUri) {
return this.save(new AppRegistration(name, type, version, uri, metadataUri));
}
@Override
public AppRegistration save(AppRegistration app) {
AppRegistration createdApp;
AppRegistration appRegistration = this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion(
app.getName(), app.getType(), app.getVersion());
if (appRegistration != null) {
appRegistration.setUri(app.getUri());
appRegistration.setMetadataUri(app.getMetadataUri());
createdApp = this.appRegistrationRepository.save(appRegistration);
populateAuditData(AuditActionType.UPDATE, createdApp);
}
else {
if (getDefaultApp(app.getName(), app.getType()) == null) {
app.setDefaultVersion(true);
}
createdApp = this.appRegistrationRepository.save(app);
populateAuditData(AuditActionType.CREATE, createdApp);
}
return createdApp;
}
private void populateAuditData(AuditActionType auditActionType, AppRegistration appRegistration) {
if (appRegistration == null) {
logger.error("App registration failed, app not saved into database!");
}
else {
this.auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.APP_REGISTRATION,
auditActionType, appRegistration.getName(),
this.auditServiceUtils.convertAppRegistrationToAuditData(appRegistration), null);
}
}
/**
* Deletes an {@link AppRegistration}. If the {@link AppRegistration} does not exist, a
* {@link NoSuchAppRegistrationException} will be thrown.
*
* @param name Name of the AppRegistration to delete
* @param type Type of the AppRegistration to delete
* @param version Version of the AppRegistration to delete
*/
public void delete(String name, ApplicationType type, String version) {
this.appRegistrationRepository.deleteAppRegistrationByNameAndTypeAndVersion(name, type, version);
populateAuditData(AuditActionType.DELETE,
new AppRegistration(name, type, version, URI.create(""), URI.create("")));
}
@Override
public void deleteAll(Iterable<AppRegistration> appRegistrations) {
this.appRegistrationRepository.deleteAll(appRegistrations);
}
protected boolean isOverwrite(AppRegistration app, boolean overwrite) {
return overwrite || this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion(app.getName(),
app.getType(), app.getVersion()) == null;
}
@Override
public boolean appExist(String name, ApplicationType type) {
return getDefaultApp(name, type) != null;
}
@Override
public boolean appExist(String name, ApplicationType type, String version) {
return find(name, type, version) != null;
}
@Override
public Resource getAppResource(AppRegistration appRegistration) {
return this.appResourceCommon.getResource(appRegistration.getUri().toString());
}
@Override
public Resource getAppMetadataResource(AppRegistration appRegistration) {
return this.appResourceCommon.getMetadataResource(appRegistration.getUri(), appRegistration.getMetadataUri());
}
@Override
public String getResourceVersion(Resource resource) {
return this.appResourceCommon.getResourceVersion(resource);
}
@Override
public String getResourceWithoutVersion(Resource resource) {
return this.appResourceCommon.getResourceWithoutVersion(resource);
}
/**
* Returns the version for the given resource URI string.
*
* @param uriString String representation of the resource URI
* @return the resource version
*/
@Override
public String getResourceVersion(String uriString) {
return this.getResourceVersion(this.appResourceCommon.getResource(uriString));
}
@Override
public Page<AppRegistration> findAllByTypeAndNameIsLikeAndVersionAndDefaultVersion(ApplicationType type,
String name, String version, boolean defaultVersion, Pageable pageable) {
return appRegistrationRepository.findAllByTypeAndNameIsLikeAndVersionAndDefaultVersion(type, name, version,
defaultVersion, pageable);
}
protected Properties loadProperties(Resource resource) {
try {
return PropertiesLoaderUtils.loadProperties(resource);
}
catch (IOException e) {
throw new RuntimeException("Error reading from " + resource.getDescription(), e);
}
}
protected URI warnOnMalformedURI(String key, URI uri) {
if (StringUtils.isEmpty(uri)) {
logger.warn(String.format("Error when registering '%s': URI is required", key));
}
else if (!StringUtils.hasText(uri.getScheme())) {
logger.warn(
String.format("Error when registering '%s' with URI %s: URI scheme must be specified", key, uri));
}
else if (!StringUtils.hasText(uri.getSchemeSpecificPart())) {
logger.warn(String.format("Error when registering '%s' with URI %s: URI scheme-specific part must be " +
"specified", key, uri));
}
return uri;
}
@Override
public List<AppRegistration> importAll(boolean overwrite, Resource... resources) {
List<AppRegistration> registrations = new ArrayList<>();
Stream.of(resources)
// parallel takes effect if multiple resources
.parallel()
// take lines
.flatMap(this::resourceAsLines)
// take valid splitted lines
.flatMap(this::splitValidLines)
// reduce to AppRegistration map key'd by <type><name><version>
.reduce(new HashMap<String, AppRegistration>(), reduceToAppRegistrations(), (left, right) -> {
// combiner is used if multiple resources caused parallel stream,
// then just let last processed resource to override.
left.putAll(right);
return left;
})
// don't care about keys anymore
.values()
// back to stream
.stream()
// drop registration if it doesn't have main uri as user only had metadata
.filter(ar -> ar.getUri() != null)
// filter by overriding, save to repo and collect updated registrations
.filter(ar -> isOverwrite(ar, overwrite))
.map(ar -> {
save(ar);
registrations.add(ar);
return ar;
}).collect(Collectors.toList());
return registrations;
}
private BiFunction<HashMap<String, AppRegistration>, ? super String[], HashMap<String, AppRegistration>> reduceToAppRegistrations() {
return (map, lineSplit) -> {
String[] typeName = lineSplit[0].split("\\.");
if (typeName.length < 2 || typeName.length > 3) {
throw new IllegalArgumentException("Invalid format for app key '" + lineSplit[0]
+ "'in file. Must be <type>.<name> or <type>.<name>.metadata");
}
String type = typeName[0].trim();
String name = typeName[1].trim();
String version = getResourceVersion(lineSplit[1]);
// This is now versioned key
String key = type + name + version;
if (!map.containsKey(key) && map.containsKey(type + name + "latest")) {
key = type + name + "latest";
}
AppRegistration ar = map.getOrDefault(key, new AppRegistration());
ar.setName(name);
ar.setType(ApplicationType.valueOf(type));
ar.setVersion(version);
if (typeName.length == 2) {
// normal app uri
try {
ar.setUri(new URI(lineSplit[1]));
warnOnMalformedURI(lineSplit[0], ar.getUri());
}
catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
else if (typeName.length == 3) {
// metadata app uri
try {
ar.setMetadataUri(new URI(lineSplit[1]));
warnOnMalformedURI(lineSplit[0], ar.getMetadataUri());
}
catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
map.put(key, ar);
return map;
};
}
private Stream<String> resourceAsLines(Resource resource) {
try {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(resource.getInputStream()));
return bufferedReader.lines();
}
catch (Exception e) {
throw new RuntimeException("Error reading from " + resource.getDescription(), e);
}
}
private Stream<String[]> splitValidLines(String line) {
// split to key/value, filter out non valid lines and trim key and value.
return Stream.of(line)
.filter(skipCommentLines())
.map(l -> l.split("="))
.filter(split -> split.length == 2)
.map(split -> new String[] { split[0].trim(), split[1].trim() });
}
private Predicate<String> skipCommentLines() {
// skipping obvious lines which we don't even try to parse
return line -> line != null &&
StringUtils.hasText(line) &&
(!line.startsWith("#") || !line.startsWith("/"));
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2012, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.keycloak.testsuite.forms;
import org.junit.*;
import org.keycloak.events.Details;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.PasswordPolicy;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.services.managers.RealmManager;
import org.keycloak.testsuite.AssertEvents;
import org.keycloak.testsuite.OAuthClient;
import org.keycloak.testsuite.pages.AppPage;
import org.keycloak.testsuite.pages.AppPage.RequestType;
import org.keycloak.testsuite.pages.LoginPage;
import org.keycloak.testsuite.pages.RegisterPage;
import org.keycloak.testsuite.rule.KeycloakRule;
import org.keycloak.testsuite.rule.WebResource;
import org.keycloak.testsuite.rule.WebRule;
import org.openqa.selenium.WebDriver;
import static org.junit.Assert.assertEquals;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class RegisterTest {
@ClassRule
public static KeycloakRule keycloakRule = new KeycloakRule();
@Rule
public AssertEvents events = new AssertEvents(keycloakRule);
@Rule
public WebRule webRule = new WebRule(this);
@WebResource
protected WebDriver driver;
@WebResource
protected AppPage appPage;
@WebResource
protected LoginPage loginPage;
@WebResource
protected RegisterPage registerPage;
@WebResource
protected OAuthClient oauth;
@Test
public void registerExistingUser() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", "registerExistingUser@email", "test-user@localhost", "password", "password");
registerPage.assertCurrent();
assertEquals("Username already exists.", registerPage.getError());
// assert form keeps form fields on error
assertEquals("firstName", registerPage.getFirstName());
assertEquals("lastName", registerPage.getLastName());
assertEquals("registerExistingUser@email", registerPage.getEmail());
assertEquals("", registerPage.getUsername());
assertEquals("", registerPage.getPassword());
assertEquals("", registerPage.getPasswordConfirm());
events.expectRegister("test-user@localhost", "registerExistingUser@email")
.removeDetail(Details.EMAIL)
.user((String) null).error("username_in_use").assertEvent();
}
@Test
public void registerUserInvalidPasswordConfirm() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", "registerUserInvalidPasswordConfirm@email", "registerUserInvalidPasswordConfirm", "password", "invalid");
registerPage.assertCurrent();
assertEquals("Password confirmation doesn't match.", registerPage.getError());
// assert form keeps form fields on error
assertEquals("firstName", registerPage.getFirstName());
assertEquals("lastName", registerPage.getLastName());
assertEquals("registerUserInvalidPasswordConfirm@email", registerPage.getEmail());
assertEquals("registerUserInvalidPasswordConfirm", registerPage.getUsername());
assertEquals("", registerPage.getPassword());
assertEquals("", registerPage.getPasswordConfirm());
events.expectRegister("registerUserInvalidPasswordConfirm", "registerUserInvalidPasswordConfirm@email")
.removeDetail(Details.USERNAME)
.removeDetail(Details.EMAIL)
.user((String) null).error("invalid_registration").assertEvent();
}
@Test
public void registerUserMissingPassword() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", "registerUserMissingPassword@email", "registerUserMissingPassword", null, null);
registerPage.assertCurrent();
assertEquals("Please specify password.", registerPage.getError());
events.expectRegister("registerUserMissingPassword", "registerUserMissingPassword@email")
.removeDetail(Details.USERNAME)
.removeDetail(Details.EMAIL)
.user((String) null).error("invalid_registration").assertEvent();
}
@Test
public void registerPasswordPolicy() {
keycloakRule.configure(new KeycloakRule.KeycloakSetup() {
@Override
public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) {
appRealm.setPasswordPolicy(new PasswordPolicy("length"));
}
});
try {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", "registerPasswordPolicy@email", "registerPasswordPolicy", "pass", "pass");
registerPage.assertCurrent();
assertEquals("Invalid password: minimum length 8.", registerPage.getError());
events.expectRegister("registerPasswordPolicy", "registerPasswordPolicy@email")
.removeDetail(Details.USERNAME)
.removeDetail(Details.EMAIL)
.user((String) null).error("invalid_registration").assertEvent();
registerPage.register("firstName", "lastName", "registerPasswordPolicy@email", "registerPasswordPolicy", "password", "password");
assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
String userId = events.expectRegister("registerPasswordPolicy", "registerPasswordPolicy@email").assertEvent().getUserId();
events.expectLogin().user(userId).detail(Details.USERNAME, "registerpasswordpolicy").assertEvent();
} finally {
keycloakRule.configure(new KeycloakRule.KeycloakSetup() {
@Override
public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) {
appRealm.setPasswordPolicy(new PasswordPolicy(null));
}
});
}
}
@Test
public void registerUserMissingUsername() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", "registerUserMissingUsername@email", null, "password", "password");
registerPage.assertCurrent();
assertEquals("Please specify username.", registerPage.getError());
events.expectRegister(null, "registerUserMissingUsername@email")
.removeDetail(Details.USERNAME)
.removeDetail(Details.EMAIL)
.error("invalid_registration").assertEvent();
}
@Test
public void registerUserManyErrors() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register(null, null, null, null, null, null);
registerPage.assertCurrent();
assertEquals("Please specify username.\n" +
"Please specify first name.\n" +
"Please specify last name.\n" +
"Please specify email.\n" +
"Please specify password.", registerPage.getError());
events.expectRegister(null, "registerUserMissingUsername@email")
.removeDetail(Details.USERNAME)
.removeDetail(Details.EMAIL)
.error("invalid_registration").assertEvent();
}
@Test
public void registerUserMissingEmail() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", null, "registerUserMissingEmail", "password", "password");
registerPage.assertCurrent();
assertEquals("Please specify email.", registerPage.getError());
events.expectRegister("registerUserMissingEmail", null)
.removeDetail("email")
.error("invalid_registration").assertEvent();
}
@Test
public void registerUserInvalidEmail() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", "registerUserInvalidEmailemail", "registerUserInvalidEmail", "password", "password");
registerPage.assertCurrent();
assertEquals("registerUserInvalidEmailemail", registerPage.getEmail());
assertEquals("Invalid email address.", registerPage.getError());
events.expectRegister("registerUserInvalidEmail", "registerUserInvalidEmailemail")
.error("invalid_registration").assertEvent();
}
@Test
public void registerUserSuccess() {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.register("firstName", "lastName", "registerUserSuccess@email", "registerUserSuccess", "password", "password");
assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
String userId = events.expectRegister("registerUserSuccess", "registerUserSuccess@email").assertEvent().getUserId();
events.expectLogin().detail("username", "registerusersuccess").user(userId).assertEvent();
UserModel user = getUser(userId);
Assert.assertNotNull(user);
Assert.assertNotNull(user.getCreatedTimestamp());
// test that timestamp is current with 10s tollerance
Assert.assertTrue((System.currentTimeMillis() - user.getCreatedTimestamp()) < 10000);
// test user info is set from form
assertEquals("registerusersuccess", user.getUsername());
assertEquals("registerusersuccess@email", user.getEmail());
assertEquals("firstName", user.getFirstName());
assertEquals("lastName", user.getLastName());
}
protected UserModel getUser(String userId) {
KeycloakSession samlServerSession = keycloakRule.startSession();
try {
RealmModel brokerRealm = samlServerSession.realms().getRealm("test");
return samlServerSession.users().getUserById(userId, brokerRealm);
} finally {
keycloakRule.stopSession(samlServerSession, false);
}
}
@Test
public void registerExistingUser_emailAsUsername() {
configureRelamRegistrationEmailAsUsername(true);
try {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.registerWithEmailAsUsername("firstName", "lastName", "test-user@localhost", "password", "password");
registerPage.assertCurrent();
assertEquals("Email already exists.", registerPage.getError());
events.expectRegister("test-user@localhost", "test-user@localhost").user((String) null).error("email_in_use").assertEvent();
} finally {
configureRelamRegistrationEmailAsUsername(false);
}
}
@Test
public void registerUserMissingOrInvalidEmail_emailAsUsername() {
configureRelamRegistrationEmailAsUsername(true);
try {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.registerWithEmailAsUsername("firstName", "lastName", null, "password", "password");
registerPage.assertCurrent();
assertEquals("Please specify email.", registerPage.getError());
events.expectRegister(null, null).removeDetail("username").removeDetail("email").error("invalid_registration").assertEvent();
registerPage.registerWithEmailAsUsername("firstName", "lastName", "registerUserInvalidEmailemail", "password", "password");
registerPage.assertCurrent();
assertEquals("Invalid email address.", registerPage.getError());
events.expectRegister("registerUserInvalidEmailemail", "registerUserInvalidEmailemail").error("invalid_registration").assertEvent();
} finally {
configureRelamRegistrationEmailAsUsername(false);
}
}
@Test
public void registerUserSuccess_emailAsUsername() {
configureRelamRegistrationEmailAsUsername(true);
try {
loginPage.open();
loginPage.clickRegister();
registerPage.assertCurrent();
registerPage.registerWithEmailAsUsername("firstName", "lastName", "registerUserSuccessE@email", "password", "password");
assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType());
String userId = events.expectRegister("registerUserSuccessE@email", "registerUserSuccessE@email").assertEvent().getUserId();
events.expectLogin().detail("username", "registerusersuccesse@email").user(userId).assertEvent();
UserModel user = getUser(userId);
Assert.assertNotNull(user);
Assert.assertNotNull(user.getCreatedTimestamp());
// test that timestamp is current with 10s tollerance
Assert.assertTrue((System.currentTimeMillis() - user.getCreatedTimestamp()) < 10000);
} finally {
configureRelamRegistrationEmailAsUsername(false);
}
}
protected void configureRelamRegistrationEmailAsUsername(final boolean value) {
keycloakRule.configure(new KeycloakRule.KeycloakSetup() {
@Override
public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) {
appRealm.setRegistrationEmailAsUsername(value);
}
});
}
}
| |
package com.ibuildapp.romanblack.CustomFormPlugin.views;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Rect;
import android.graphics.drawable.GradientDrawable;
import android.os.Build;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.GridLayoutManager;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.Transformation;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.support.v7.widget.RecyclerView;
import com.ibuildapp.romanblack.CustomFormPlugin.PhotoViewActivity;
import com.ibuildapp.romanblack.CustomFormPlugin.R;
import com.ibuildapp.romanblack.CustomFormPlugin.groups.GroupItemPhotoPicker;
import com.ibuildapp.romanblack.CustomFormPlugin.utils.ImageUtils;
import com.ibuildapp.romanblack.CustomFormPlugin.utils.Statics;
public class PhotoPickerLayout extends LinearLayout{
public interface OnPreparedListener{
void onPrepared();
}
private static float DENSITY = 0;
private static int displayWidth;
private static int itemSize;
public static int COLUMN_COUNT = 0;
private static int counter;
private static int newId(){
return counter++;
}
private int uniqueId;
private static final int SPACE = 1;
private GroupItemPhotoPicker item;
private TextView label;
private RecyclerView photosView;
private Button button;
private PhotoPickerAdapter adapter;
public PhotoPickerLayout(Context context) {
super(context);
init();
}
public PhotoPickerLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init(){
if (displayWidth == 0) {
DisplayMetrics displaymetrics = new DisplayMetrics();
((Activity) getContext()).getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
displayWidth = displaymetrics.widthPixels;
DENSITY = getResources().getDisplayMetrics().density;
itemSize = (int) (80*DENSITY);
COLUMN_COUNT = displayWidth/itemSize;
}
uniqueId = newId();
LayoutInflater.from(getContext()).inflate(R.layout.custom_form_photo_picker_layout, this, true);
setBackgroundColor(Statics.color1);
setOrientation(VERTICAL);
GradientDrawable drawable = new GradientDrawable();
drawable.setShape(GradientDrawable.RECTANGLE);
label = (TextView) findViewById(R.id.custom_form_photo_picker_label);
label.setTextColor(Statics.color3);
button = (Button) findViewById(R.id.custom_form_photo_picker_button);
if(android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
button.setAllCaps(false);
float borderSize = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 1, getResources().getDisplayMetrics());
drawable.setStroke((int) borderSize, Statics.color5);
drawable.setColor(Statics.color5);
button.setBackgroundDrawable(drawable);
button.setTextColor(Statics.color1);
photosView = (RecyclerView) findViewById(R.id.custom_form_photo_picker_photos_grid);
photosView.setLayoutManager(new GridLayoutManager(getContext(), COLUMN_COUNT, GridLayoutManager.VERTICAL, false));
adapter = new PhotoPickerAdapter();
photosView.setAdapter(adapter);
photosView.addItemDecoration(new RecyclerView.ItemDecoration() {
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) {
outRect.bottom = (int) (5*SPACE * DENSITY);
outRect.top = (int) (SPACE * DENSITY);
outRect.right = (int) (SPACE * DENSITY);
outRect.left = (int) (SPACE * DENSITY);
}
});
DefaultItemAnimator animator = new DefaultItemAnimator();
photosView.setItemAnimator(animator);
}
public void setData(GroupItemPhotoPicker item) {
this.item = item;
if (item.getLabel() == null || "".equals(item.getLabel()))
label.setVisibility(GONE);
else
label.setVisibility(VISIBLE);
this.label.setText(item.getLabel());
this.button.setText(item.getValue());
}
public int getUniqueId() {
return uniqueId;
}
public boolean canAddPhoto() {
return item.getPhotos().size()< item.getLimit();
}
public void prepareInsert(final OnPreparedListener listener) {
int size = item.getPhotos().size();
if ((size+1)%COLUMN_COUNT == 1){
ResizeAnimation anim = new ResizeAnimation(photosView, photosView.getWidth(), photosView.getHeight(), photosView.getWidth(), photosView.getHeight()+itemSize);
anim.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
listener.onPrepared();
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
photosView.startAnimation(anim);
}
else
listener.onPrepared();
}
public class PhotoPickerHolder extends RecyclerView.ViewHolder{
private SquareImageView mainView;
private SquareLinearLayout deletePic;
public PhotoPickerHolder(View itemView) {
super(itemView);
mainView = (SquareImageView) itemView.findViewById(R.id.custom_form_photo_picker_main_image);
deletePic = (SquareLinearLayout) itemView.findViewById(R.id.custom_form_photo_picker_delete);
}
}
public class PhotoPickerAdapter extends RecyclerView.Adapter<PhotoPickerHolder>{
@Override
public PhotoPickerHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View v = LayoutInflater.from(parent.getContext()).inflate(R.layout.custom_form_photo_picker_item, parent, false);
PhotoPickerHolder holder = new PhotoPickerHolder(v);
return holder;
}
@Override
public void onBindViewHolder(final PhotoPickerHolder holder, final int position) {
holder.mainView.setImageBitmap(item.getPhotos().get(position).getThumbnail());
holder.mainView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(getContext(), PhotoViewActivity.class);
intent.putExtra(ImageUtils.BITMAP_EXTRA, item.getPhotos().get(position).getImageSource() );
getContext().startActivity(intent);
}
});
final int pos = position;
holder.deletePic.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (item.getPhotos().size() == 1) {
item.getPhotos().clear();
} else
item.getPhotos().remove(pos);
adapter.notifyItemRemoved(pos);
adapter.notifyItemRangeChanged(0, item.getPhotos().size());
int size = item.getPhotos().size();
if (size % COLUMN_COUNT == 0) {
ResizeAnimation anim = new ResizeAnimation(photosView, photosView.getWidth(), photosView.getHeight(), photosView.getWidth(), photosView.getHeight() - itemSize);
anim.setStartOffset(300);
photosView.startAnimation(anim);
}
}
});
}
@Override
public int getItemCount() {
return item.getPhotos().size();
}
}
public Button getButton(){
return button;
}
public GroupItemPhotoPicker getItem(){
return item;
}
public PhotoPickerAdapter getAdapter(){
return adapter;
}
public class ResizeAnimation extends Animation {
private View mView;
private float mToHeight;
private float mFromHeight;
private float mToWidth;
private float mFromWidth;
public ResizeAnimation(View v, float fromWidth, float fromHeight, float toWidth, float toHeight) {
mToHeight = toHeight;
mToWidth = toWidth;
mFromHeight = fromHeight;
mFromWidth = fromWidth;
mView = v;
setDuration(300);
}
@Override
protected void applyTransformation(float interpolatedTime, Transformation t) {
float height =
(mToHeight - mFromHeight) * interpolatedTime + mFromHeight;
float width = (mToWidth - mFromWidth) * interpolatedTime + mFromWidth;
LayoutParams p = (LayoutParams) mView.getLayoutParams();
p.height = (int) height;
p.width = (int) width;
mView.requestLayout();
}
}
}
| |
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.webapp.alertsapi.servlet.enunciate;
import java.io.UnsupportedEncodingException;
import com.deleidos.rtws.commons.exception.PermissionDeniedException;
import com.deleidos.rtws.commons.model.response.StandardResponse;
import com.deleidos.rtws.commons.model.user.Filter;
import com.deleidos.rtws.webapp.alertsapi.client.NamedFilterRestClient;
import com.deleidos.rtws.webapp.alertsapi.client.NamedFilterWatchlistRestClient;
import javax.ws.rs.Path;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
/**
* FilterServiceImpl is an implementation of the FilterService interface.
* It is used to manipulate the filters used by a NamedFilterRestClient.
*/
@Path ( "/filter" )
public class FilterServiceImpl implements FilterService {
Logger logger = Logger.getLogger(FilterServiceImpl.class);
private NamedFilterRestClient filterClient;
private NamedFilterWatchlistRestClient watchlistClient;
/**
* Gets the NamedFilterRestClient.
*/
public synchronized NamedFilterRestClient getFilterClient() {
if (filterClient == null) {
filterClient = new NamedFilterRestClient();
}
return filterClient;
}
/**
* Gets the NamedFilterWatchlistRestClient.
*/
public synchronized NamedFilterWatchlistRestClient getWatchlistClient() {
if (watchlistClient == null) {
watchlistClient = new NamedFilterWatchlistRestClient();
}
return watchlistClient;
}
/**
* Delete a filter.
*
* @param id Filter ID of the filter to delete
* @return Return status of deletion
*/
public StandardResponse<?> deleteFilter(Long id)
{
StandardResponse<?> response = getFilterClient().deleteFilter(id);
if (response.getStandardHeader().getCode() == 200) {
getWatchlistClient().deleteWatchListFilter(null, id);
}
return response;
}
/**
* Update a filter.
*
* @param id Filter ID of the filter to update
* @param name Filter Name of the filter to update
* @param model Filter Model of the filter to update
* @param jsonDefinition JSON Definition of the update
* @param emailSubject subject format for emailed alerts
* @param emailBody body format for emailed alerts
* @return Return Filter object of PUT-ed filter.
* @throws PermissionDeniedException
* @throws UnsupportedEncodingException
*/
public StandardResponse<?> putFilter(Long id, String name, String model,
String jsonDefinition, String emailSubject, String emailBody)
throws PermissionDeniedException, UnsupportedEncodingException {
if (StringUtils.isBlank(jsonDefinition)) {
throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity("Invalid form parameter - jsonDefinition").type("text/plain").build());
}
Filter filter = new Filter();
filter.setKey(id);
filter.setName(name);
filter.setModel(model);
filter.setEmailSubject(emailSubject);
filter.setEmailMessage(emailBody);
return getFilterClient().updateFilter(filter, jsonDefinition);
}
/**
* Insert a filter.
*
* @param name Filter Name of the filter to update
* @param model Filter Model of the filter to update
* @param jsonDefinition JSON Definition of the filter
* @param emailSubject subject format for emailed alerts
* @param emailBody body format for emailed alerts
* @return Return Filter object of POST-ed filter.
* @throws PermissionDeniedException
* @throws UnsupportedEncodingException
*/
public StandardResponse<?> postFilter(String name, String model,
String jsonDefinition, String emailSubject, String emailBody)
throws PermissionDeniedException, UnsupportedEncodingException {
if (StringUtils.isBlank(jsonDefinition)) {
throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity("Invalid form parameter - jsonDefinition").type("text/plain").build());
}
Filter filter = new Filter();
filter.setName(name);
filter.setModel(model);
filter.setEmailSubject(emailSubject);
filter.setEmailMessage(emailBody);
return getFilterClient().createFilter(filter, jsonDefinition);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2021 the original author or authors.
*/
package org.assertj.core.perf;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.catchThrowable;
import static org.assertj.core.test.ErrorMessagesForTest.shouldBeEqualMessage;
import static org.assertj.core.test.Maps.mapOf;
import static org.assertj.core.util.DateUtil.parseDatetime;
import static org.assertj.core.util.Sets.newLinkedHashSet;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.math.BigDecimal;
import java.net.URI;
import java.time.LocalTime;
import java.time.OffsetTime;
import java.time.ZoneOffset;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.concurrent.CompletableFuture;
import java.util.function.DoublePredicate;
import java.util.function.IntPredicate;
import java.util.function.LongPredicate;
import java.util.function.Predicate;
import org.assertj.core.api.BaseAssertionsTest;
import org.assertj.core.api.SoftAssertions;
import org.assertj.core.data.MapEntry;
import org.assertj.core.test.CartoonCharacter;
import org.assertj.core.util.Lists;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
/**
* results in 3.9.0 : ~3000ms
* results in 3.9.1+ : ~9300ms
* results in 3.10.0 : ~6000ms
* results in 3.10.0 with Raphael changes < 1.8.10: ~5500ms
* results in 3.10.0 with 1.8.10: ~5100ms
* results in 3.10.0 with 1.8.11: ~5000ms
*/
@Disabled
class SoftAssertionsPerfTest extends BaseAssertionsTest {
private SoftAssertions softly;
private CartoonCharacter homer;
private CartoonCharacter fred;
private CartoonCharacter lisa;
private CartoonCharacter maggie;
private CartoonCharacter bart;
private Map<String, Object> iterableMap;
private static long start;
@BeforeAll
public static void beforeAll() {
start = System.currentTimeMillis();
}
@AfterAll
public static void afterAll() {
long duration = System.currentTimeMillis() - start;
System.out.println("SoftAssertionsTest execution time (ms): " + duration);
}
@BeforeEach
public void setup() {
softly = new SoftAssertions();
bart = new CartoonCharacter("Bart Simpson");
lisa = new CartoonCharacter("Lisa Simpson");
maggie = new CartoonCharacter("Maggie Simpson");
homer = new CartoonCharacter("Homer Simpson");
homer.getChildren().add(bart);
homer.getChildren().add(lisa);
homer.getChildren().add(maggie);
CartoonCharacter pebbles = new CartoonCharacter("Pebbles Flintstone");
fred = new CartoonCharacter("Fred Flintstone");
fred.getChildren().add(pebbles);
List<String> names = asList("Dave", "Jeff");
LinkedHashSet<String> jobs = newLinkedHashSet("Plumber", "Builder");
Iterable<String> cities = asList("Dover", "Boston", "Paris");
int[] ranks = { 1, 2, 3 };
iterableMap = new LinkedHashMap<>();
iterableMap.put("name", names);
iterableMap.put("job", jobs);
iterableMap.put("city", cities);
iterableMap.put("rank", ranks);
}
@Test
void all_assertions_should_pass() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void all_assertions_should_pass2() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void all_assertions_should_pass3() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void all_assertions_should_pass4() {
softly.assertThat(1).isEqualTo(1);
softly.assertThat(Lists.newArrayList(1, 2)).containsOnly(1, 2);
softly.assertAll();
}
@Test
void should_return_success_of_last_assertion() {
softly.assertThat(true).isFalse();
softly.assertThat(true).isEqualTo(true);
assertThat(softly.wasSuccess()).isTrue();
}
@Test
void should_return_success_of_last_assertion_with_nested_calls() {
softly.assertThat(true).isFalse();
softly.assertThat(true).isTrue(); // isTrue() calls isEqualTo(true)
assertThat(softly.wasSuccess()).isTrue();
}
@Test
void should_return_failure_of_last_assertion() {
softly.assertThat(true).isTrue();
softly.assertThat(true).isEqualTo(false);
assertThat(softly.wasSuccess()).isFalse();
}
@Test
void should_return_failure_of_last_assertion_with_nested_calls() {
softly.assertThat(true).isTrue();
softly.assertThat(true).isFalse(); // isFalse() calls isEqualTo(false)
assertThat(softly.wasSuccess()).isFalse();
}
@Test
void should_be_able_to_catch_exceptions_thrown_by_map_assertions() {
// GIVEN
Map<String, String> map = mapOf(MapEntry.entry("54", "55"));
// WHEN
softly.assertThat(map).contains(MapEntry.entry("1", "2")).isEmpty();
// THEN
List<Throwable> errors = softly.errorsCollected();
assertThat(errors).hasSize(2);
}
@Test
void should_be_able_to_catch_exceptions_thrown_by_all_proxied_methods() {
// perform a bunch of soft assertions
softly.assertThat(BigDecimal.ZERO).isEqualTo(BigDecimal.ONE);
softly.assertThat(Boolean.FALSE).isTrue();
softly.assertThat(false).isTrue();
softly.assertThat(new boolean[] { false }).isEqualTo(new boolean[] { true });
softly.assertThat(new Byte((byte) 0)).isEqualTo((byte) 1);
softly.assertThat((byte) 2).inHexadecimal().isEqualTo((byte) 3);
softly.assertThat(new byte[] { 4 }).isEqualTo(new byte[] { 5 });
softly.assertThat(new Character((char) 65)).isEqualTo(new Character((char) 66));
softly.assertThat((char) 67).isEqualTo((char) 68);
softly.assertThat(new char[] { 69 }).isEqualTo(new char[] { 70 });
softly.assertThat(new StringBuilder("a")).isEqualTo(new StringBuilder("b"));
softly.assertThat(Object.class).isEqualTo(String.class);
softly.assertThat(parseDatetime("1999-12-31T23:59:59")).isEqualTo(parseDatetime("2000-01-01T00:00:01"));
softly.assertThat(new Double(6.0d)).isEqualTo(new Double(7.0d));
softly.assertThat(8.0d).isEqualTo(9.0d);
softly.assertThat(new double[] { 10.0d }).isEqualTo(new double[] { 11.0d });
softly.assertThat(new File("a"))
.overridingErrorMessage(shouldBeEqualMessage("File(a)", "File(b)"))
.isEqualTo(new File("b"));
softly.assertThat(new Float(12f)).isEqualTo(new Float(13f));
softly.assertThat(14f).isEqualTo(15f);
softly.assertThat(new float[] { 16f }).isEqualTo(new float[] { 17f });
softly.assertThat(new ByteArrayInputStream(new byte[] { (byte) 65 }))
.hasSameContentAs(new ByteArrayInputStream(new byte[] { (byte) 66 }));
softly.assertThat(new Integer(20)).isEqualTo(new Integer(21));
softly.assertThat(22).isEqualTo(23);
softly.assertThat(new int[] { 24 }).isEqualTo(new int[] { 25 });
softly.assertThat((Iterable<String>) Lists.newArrayList("26")).isEqualTo(Lists.newArrayList("27"));
softly.assertThat(Lists.newArrayList("28").iterator()).hasNext();
softly.assertThat(Lists.newArrayList("30")).isEqualTo(Lists.newArrayList("31"));
softly.assertThat(new Long(32L)).isEqualTo(new Long(33L));
softly.assertThat(34L).isEqualTo(35L);
softly.assertThat(new long[] { 36L }).isEqualTo(new long[] { 37L });
softly.assertThat(mapOf(MapEntry.entry("38", "39"))).isEqualTo(mapOf(MapEntry.entry("40", "41")));
softly.assertThat(new Short((short) 42)).isEqualTo(new Short((short) 43));
softly.assertThat((short) 44).isEqualTo((short) 45);
softly.assertThat(new short[] { (short) 46 }).isEqualTo(new short[] { (short) 47 });
softly.assertThat("48").isEqualTo("49");
softly.assertThat(new Object() {
@Override
public String toString() {
return "50";
}
}).isEqualTo(new Object() {
@Override
public String toString() {
return "51";
}
});
softly.assertThat(new Object[] { new Object() {
@Override
public String toString() {
return "52";
}
} }).isEqualTo(new Object[] { new Object() {
@Override
public String toString() {
return "53";
}
} });
final IllegalArgumentException illegalArgumentException = new IllegalArgumentException("IllegalArgumentException message");
softly.assertThat(illegalArgumentException).hasMessage("NullPointerException message");
softly.assertThatThrownBy(() -> {
throw new Exception("something was wrong");
}).hasMessage("something was good");
softly.assertThat(mapOf(MapEntry.entry("54", "55"))).contains(MapEntry.entry("1", "2"));
softly.assertThat(LocalTime.of(12, 00)).isEqualTo(LocalTime.of(13, 00));
softly.assertThat(OffsetTime.of(12, 0, 0, 0, ZoneOffset.UTC))
.isEqualTo(OffsetTime.of(13, 0, 0, 0, ZoneOffset.UTC));
softly.assertThat(Optional.of("not empty")).isEqualTo("empty");
softly.assertThat(OptionalInt.of(0)).isEqualTo(1);
softly.assertThat(OptionalDouble.of(0.0)).isEqualTo(1.0);
softly.assertThat(OptionalLong.of(0L)).isEqualTo(1L);
softly.assertThat(URI.create("http://assertj.org")).hasPort(8888);
softly.assertThat(CompletableFuture.completedFuture("done")).isCompletedExceptionally();
softly.assertThat((Predicate<String>) s -> s.equals("something")).accepts("something else");
softly.assertThat((IntPredicate) s -> s == 1).accepts(2);
softly.assertThat((LongPredicate) s -> s == 1).accepts(2);
softly.assertThat((DoublePredicate) s -> s == 1).accepts(2);
// assert everything, but catch the error since it is a perf test
catchThrowable(() -> softly.assertAll());
}
}
| |
/*
* Copyright 2014-2022 TNG Technology Consulting GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tngtech.archunit.core.importer;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.LinkedHashMultimap;
import com.tngtech.archunit.Internal;
import com.tngtech.archunit.base.Optional;
import com.tngtech.archunit.core.domain.AccessTarget;
import com.tngtech.archunit.core.domain.AccessTarget.CodeUnitAccessTarget;
import com.tngtech.archunit.core.domain.AccessTarget.ConstructorCallTarget;
import com.tngtech.archunit.core.domain.AccessTarget.ConstructorReferenceTarget;
import com.tngtech.archunit.core.domain.AccessTarget.FieldAccessTarget;
import com.tngtech.archunit.core.domain.AccessTarget.MethodCallTarget;
import com.tngtech.archunit.core.domain.AccessTarget.MethodReferenceTarget;
import com.tngtech.archunit.core.domain.JavaClass;
import com.tngtech.archunit.core.domain.JavaClassDescriptor;
import com.tngtech.archunit.core.domain.JavaCodeUnit;
import com.tngtech.archunit.core.domain.JavaConstructor;
import com.tngtech.archunit.core.domain.JavaField;
import com.tngtech.archunit.core.domain.JavaFieldAccess.AccessType;
import com.tngtech.archunit.core.domain.JavaMethod;
import com.tngtech.archunit.core.importer.DomainBuilders.CodeUnitAccessTargetBuilder;
import com.tngtech.archunit.core.importer.DomainBuilders.FieldAccessTargetBuilder;
import com.tngtech.archunit.core.importer.RawAccessRecord.CodeUnit;
import com.tngtech.archunit.core.importer.RawAccessRecord.TargetInfo;
import static com.tngtech.archunit.core.domain.JavaModifier.STATIC;
import static com.tngtech.archunit.core.importer.DomainBuilders.newConstructorCallTargetBuilder;
import static com.tngtech.archunit.core.importer.DomainBuilders.newConstructorReferenceTargetBuilder;
import static com.tngtech.archunit.core.importer.DomainBuilders.newMethodCallTargetBuilder;
import static com.tngtech.archunit.core.importer.DomainBuilders.newMethodReferenceTargetBuilder;
interface AccessRecord<TARGET extends AccessTarget> {
JavaCodeUnit getOrigin();
TARGET getTarget();
int getLineNumber();
@Internal
interface FieldAccessRecord extends AccessRecord<FieldAccessTarget> {
AccessType getAccessType();
}
@Internal
abstract class Factory<RAW_RECORD, PROCESSED_RECORD> {
abstract PROCESSED_RECORD create(RAW_RECORD record, ImportedClasses classes);
static Factory<RawAccessRecord, AccessRecord<ConstructorCallTarget>> forConstructorCallRecord() {
return new Factory<RawAccessRecord, AccessRecord<ConstructorCallTarget>>() {
@Override
AccessRecord<ConstructorCallTarget> create(RawAccessRecord record, ImportedClasses classes) {
return new RawAccessRecordProcessed<>(record, classes, CONSTRUCTOR_CALL_TARGET_FACTORY);
}
};
}
static Factory<RawAccessRecord, AccessRecord<ConstructorReferenceTarget>> forConstructorReferenceRecord() {
return new Factory<RawAccessRecord, AccessRecord<ConstructorReferenceTarget>>() {
@Override
AccessRecord<ConstructorReferenceTarget> create(RawAccessRecord record, ImportedClasses classes) {
return new RawAccessRecordProcessed<>(record, classes, CONSTRUCTOR_REFERENCE_TARGET_FACTORY);
}
};
}
static Factory<RawAccessRecord, AccessRecord<MethodCallTarget>> forMethodCallRecord() {
return new Factory<RawAccessRecord, AccessRecord<MethodCallTarget>>() {
@Override
AccessRecord<MethodCallTarget> create(RawAccessRecord record, ImportedClasses classes) {
return new RawAccessRecordProcessed<>(record, classes, METHOD_CALL_TARGET_FACTORY);
}
};
}
static Factory<RawAccessRecord, AccessRecord<MethodReferenceTarget>> forMethodReferenceRecord() {
return new Factory<RawAccessRecord, AccessRecord<MethodReferenceTarget>>() {
@Override
AccessRecord<MethodReferenceTarget> create(RawAccessRecord record, ImportedClasses classes) {
return new RawAccessRecordProcessed<>(record, classes, METHOD_REFERENCE_TARGET_FACTORY);
}
};
}
static Factory<RawAccessRecord.ForField, FieldAccessRecord> forFieldAccessRecord() {
return new Factory<RawAccessRecord.ForField, FieldAccessRecord>() {
@Override
FieldAccessRecord create(RawAccessRecord.ForField record, ImportedClasses classes) {
return new RawFieldAccessRecordProcessed(record, classes);
}
};
}
private static final Supplier<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorCallTarget>> CONSTRUCTOR_CALL_TARGET_BUILDER_SUPPLIER =
new Supplier<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorCallTarget>>() {
@Override
public CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorCallTarget> get() {
return newConstructorCallTargetBuilder();
}
};
private static final Supplier<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorReferenceTarget>> CONSTRUCTOR_REFERENCE_TARGET_BUILDER_SUPPLIER =
new Supplier<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorReferenceTarget>>() {
@Override
public CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorReferenceTarget> get() {
return newConstructorReferenceTargetBuilder();
}
};
private static final Supplier<CodeUnitAccessTargetBuilder<JavaMethod, MethodCallTarget>> METHOD_CALL_TARGET_BUILDER_SUPPLIER =
new Supplier<CodeUnitAccessTargetBuilder<JavaMethod, MethodCallTarget>>() {
@Override
public CodeUnitAccessTargetBuilder<JavaMethod, MethodCallTarget> get() {
return newMethodCallTargetBuilder();
}
};
private static final Supplier<CodeUnitAccessTargetBuilder<JavaMethod, MethodReferenceTarget>> METHOD_REFERENCE_TARGET_BUILDER_SUPPLIER =
new Supplier<CodeUnitAccessTargetBuilder<JavaMethod, MethodReferenceTarget>>() {
@Override
public CodeUnitAccessTargetBuilder<JavaMethod, MethodReferenceTarget> get() {
return newMethodReferenceTargetBuilder();
}
};
private static final AccessTargetFactory<ConstructorCallTarget> CONSTRUCTOR_CALL_TARGET_FACTORY = new ConstructorAccessTargetFactory<>(CONSTRUCTOR_CALL_TARGET_BUILDER_SUPPLIER);
private static final AccessTargetFactory<ConstructorReferenceTarget> CONSTRUCTOR_REFERENCE_TARGET_FACTORY = new ConstructorAccessTargetFactory<>(CONSTRUCTOR_REFERENCE_TARGET_BUILDER_SUPPLIER);
private static final AccessTargetFactory<MethodCallTarget> METHOD_CALL_TARGET_FACTORY = new MethodAccessTargetFactory<>(METHOD_CALL_TARGET_BUILDER_SUPPLIER);
private static final AccessTargetFactory<MethodReferenceTarget> METHOD_REFERENCE_TARGET_FACTORY = new MethodAccessTargetFactory<>(METHOD_REFERENCE_TARGET_BUILDER_SUPPLIER);
private static final AccessTargetFactory<FieldAccessTarget> FIELD_ACCESS_TARGET_FACTORY = new FieldAccessTargetFactory();
private interface AccessTargetFactory<TARGET extends AccessTarget> {
TARGET create(JavaClass targetOwner, TargetInfo targetInfo, ImportedClasses classes);
}
private static class ConstructorAccessTargetFactory<TARGET extends CodeUnitAccessTarget> implements AccessTargetFactory<TARGET> {
private final Supplier<CodeUnitAccessTargetBuilder<JavaConstructor, TARGET>> targetBuilderSupplier;
private ConstructorAccessTargetFactory(Supplier<CodeUnitAccessTargetBuilder<JavaConstructor, TARGET>> targetBuilderSupplier) {
this.targetBuilderSupplier = targetBuilderSupplier;
}
@Override
public TARGET create(JavaClass targetOwner, TargetInfo target, ImportedClasses classes) {
Supplier<Optional<JavaConstructor>> memberSupplier = new ConstructorSupplier(targetOwner, target);
List<JavaClass> paramTypes = getArgumentTypesFrom(target.desc, classes);
JavaClass returnType = classes.getOrResolve(void.class.getName());
return targetBuilderSupplier.get()
.withOwner(targetOwner)
.withParameters(paramTypes)
.withReturnType(returnType)
.withMember(memberSupplier)
.build();
}
private static class ConstructorSupplier implements Supplier<Optional<JavaConstructor>> {
private final JavaClass targetOwner;
private final TargetInfo target;
ConstructorSupplier(JavaClass targetOwner, TargetInfo target) {
this.targetOwner = targetOwner;
this.target = target;
}
@Override
public Optional<JavaConstructor> get() {
for (JavaConstructor constructor : targetOwner.getConstructors()) {
if (constructor.getDescriptor().equals(target.desc)) {
return Optional.of(constructor);
}
}
return Optional.empty();
}
}
}
private static class MethodAccessTargetFactory<TARGET extends CodeUnitAccessTarget> implements AccessTargetFactory<TARGET> {
private final Supplier<CodeUnitAccessTargetBuilder<JavaMethod, TARGET>> targetBuilderSupplier;
private MethodAccessTargetFactory(Supplier<CodeUnitAccessTargetBuilder<JavaMethod, TARGET>> targetBuilderSupplier) {
this.targetBuilderSupplier = targetBuilderSupplier;
}
@Override
public TARGET create(JavaClass targetOwner, TargetInfo target, ImportedClasses classes) {
Supplier<Optional<JavaMethod>> methodsSupplier = new MethodSupplier(targetOwner, target);
List<JavaClass> parameters = getArgumentTypesFrom(target.desc, classes);
JavaClass returnType = classes.getOrResolve(JavaClassDescriptorImporter.importAsmMethodReturnType(target.desc).getFullyQualifiedClassName());
return targetBuilderSupplier.get()
.withOwner(targetOwner)
.withName(target.name)
.withParameters(parameters)
.withReturnType(returnType)
.withMember(methodsSupplier)
.build();
}
private static class MethodSupplier implements Supplier<Optional<JavaMethod>> {
private final JavaClass targetOwner;
private final TargetInfo target;
MethodSupplier(JavaClass targetOwner, TargetInfo target) {
this.targetOwner = targetOwner;
this.target = target;
}
@Override
public Optional<JavaMethod> get() {
return searchTargetMethod(targetOwner, target);
}
}
}
private static class FieldAccessTargetFactory implements AccessTargetFactory<FieldAccessTarget> {
@Override
public FieldAccessTarget create(JavaClass targetOwner, TargetInfo target, ImportedClasses classes) {
Supplier<Optional<JavaField>> fieldSupplier = new FieldSupplier(targetOwner, target);
JavaClass fieldType = classes.getOrResolve(JavaClassDescriptorImporter.importAsmTypeFromDescriptor(target.desc).getFullyQualifiedClassName());
return new FieldAccessTargetBuilder()
.withOwner(targetOwner)
.withName(target.name)
.withType(fieldType)
.withMember(fieldSupplier)
.build();
}
private static class FieldSupplier implements Supplier<Optional<JavaField>> {
private final JavaClass targetOwner;
private final TargetInfo target;
FieldSupplier(JavaClass targetOwner, TargetInfo target) {
this.targetOwner = targetOwner;
this.target = target;
}
@Override
public Optional<JavaField> get() {
return searchTargetField(targetOwner, target);
}
}
}
private static class RawAccessRecordProcessed<TARGET extends AccessTarget> implements AccessRecord<TARGET> {
private final RawAccessRecord record;
private final ImportedClasses classes;
private final JavaClass targetOwner;
private final AccessTargetFactory<TARGET> accessTargetFactory;
private final Supplier<JavaCodeUnit> originSupplier;
RawAccessRecordProcessed(RawAccessRecord record, ImportedClasses classes, AccessTargetFactory<TARGET> accessTargetFactory) {
this.record = record;
this.classes = classes;
targetOwner = this.classes.getOrResolve(record.target.owner.getFullyQualifiedClassName());
this.accessTargetFactory = accessTargetFactory;
originSupplier = createOriginSupplier(record.caller, classes);
}
@Override
public JavaCodeUnit getOrigin() {
return originSupplier.get();
}
@Override
public TARGET getTarget() {
return accessTargetFactory.create(targetOwner, record.target, classes);
}
@Override
public int getLineNumber() {
return record.lineNumber;
}
}
private static class RawFieldAccessRecordProcessed extends RawAccessRecordProcessed<FieldAccessTarget> implements FieldAccessRecord {
private final AccessType accessType;
RawFieldAccessRecordProcessed(RawAccessRecord.ForField record, ImportedClasses classes) {
super(record, classes, FIELD_ACCESS_TARGET_FACTORY);
accessType = record.accessType;
}
@Override
public AccessType getAccessType() {
return accessType;
}
}
private static Supplier<JavaCodeUnit> createOriginSupplier(final CodeUnit origin, final ImportedClasses classes) {
return Suppliers.memoize(new Supplier<JavaCodeUnit>() {
@Override
public JavaCodeUnit get() {
return Factory.getOrigin(origin, classes);
}
});
}
private static JavaCodeUnit getOrigin(CodeUnit rawOrigin, ImportedClasses classes) {
for (JavaCodeUnit method : classes.getOrResolve(rawOrigin.getDeclaringClassName()).getCodeUnits()) {
if (rawOrigin.is(method)) {
return method;
}
}
throw new IllegalStateException("Never found a " + JavaCodeUnit.class.getSimpleName() +
" that matches supposed origin " + rawOrigin);
}
private static List<JavaClass> getArgumentTypesFrom(String descriptor, ImportedClasses classes) {
ImmutableList.Builder<JavaClass> result = ImmutableList.builder();
for (JavaClassDescriptor type : JavaClassDescriptorImporter.importAsmMethodArgumentTypes(descriptor)) {
result.add(classes.getOrResolve(type.getFullyQualifiedClassName()));
}
return result.build();
}
private static Optional<JavaField> searchTargetField(JavaClass targetOwner, TargetInfo targetInfo) {
Optional<JavaField> directlyFound = targetOwner.tryGetField(targetInfo.name);
if (directlyFound.isPresent()) {
return directlyFound;
}
// if a matching field has been found in an interface, it must be the one and only matching field,
// since it is public static final and the compiler would forbid the call without disambiguation otherwise
Optional<JavaField> foundOnInterface = searchFieldInInterfaces(targetOwner, targetInfo);
if (foundOnInterface.isPresent()) {
return foundOnInterface;
}
return searchFieldInSuperClass(targetOwner, targetInfo);
}
private static Optional<JavaField> searchFieldInInterfaces(JavaClass targetOwner, TargetInfo targetInfo) {
for (JavaClass rawInterface : targetOwner.getRawInterfaces()) {
Optional<JavaField> foundOnInterface = searchTargetField(rawInterface, targetInfo);
if (foundOnInterface.isPresent()) {
return foundOnInterface;
}
}
return Optional.empty();
}
private static Optional<JavaField> searchFieldInSuperClass(JavaClass targetOwner, TargetInfo targetInfo) {
return targetOwner.getRawSuperclass().isPresent()
? searchTargetField(targetOwner.getRawSuperclass().get(), targetInfo)
: Optional.<JavaField>empty();
}
private static Optional<JavaMethod> searchTargetMethod(JavaClass targetOwner, TargetInfo targetInfo) {
MatchingMethods matchingMethods = new MatchingMethods(targetInfo);
matchingMethods.addMatching(targetOwner.getMethods(), true);
return matchingMethods.hasMatch()
// shortcut -> if we found it directly in the class we don't need to look further up the hierarchy
? matchingMethods.determineMostSpecificMethod()
: searchTargetMethodInHierarchy(targetOwner, matchingMethods);
}
private static Optional<JavaMethod> searchTargetMethodInHierarchy(JavaClass targetOwner, MatchingMethods matchingMethods) {
Optional<JavaClass> superclass = targetOwner.getRawSuperclass();
if (superclass.isPresent()) {
matchingMethods.addMatching(superclass.get().getMethods(), true);
searchTargetMethodInHierarchy(superclass.get(), matchingMethods);
}
for (JavaClass interfaceType : targetOwner.getRawInterfaces()) {
matchingMethods.addMatching(interfaceType.getMethods(), false);
searchTargetMethodInHierarchy(interfaceType, matchingMethods);
}
return matchingMethods.determineMostSpecificMethod();
}
private static class MatchingMethods {
private final TargetInfo target;
private final LinkedHashMultimap<JavaClass, JavaMethod> matchingMethodsByReturnType = LinkedHashMultimap.create();
private MatchingMethods(TargetInfo target) {
this.target = target;
}
void addMatching(Collection<JavaMethod> methods, boolean includeStatic) {
for (JavaMethod method : methods) {
if (matches(method, includeStatic)) {
matchingMethodsByReturnType.put(method.getRawReturnType(), method);
}
}
}
private boolean matches(JavaMethod method, boolean includeStatic) {
return method.getName().equals(target.name)
&& method.getDescriptor().equals(target.desc)
&& (includeStatic || !method.getModifiers().contains(STATIC));
}
boolean hasMatch() {
return !matchingMethodsByReturnType.isEmpty();
}
/**
* We roughly follow the algorithm of {@link Class#getMethod(String, Class[])}. We look for the most specific return type,
* if there should be return types without a hierarchical correlation we simply pick the first. If there should be methods
* with the same return type, but declaring classes without hierarchical correlation we will try to follow the JDK version,
* even though it does not seem to be specified clearly (thus it could change with a different JDK implementation, but
* unit tests should tell us).
*/
Optional<JavaMethod> determineMostSpecificMethod() {
if (!hasMatch()) {
return Optional.empty();
}
if (matchingMethodsByReturnType.size() == 1) {
return determineMostSpecificMethodWithSameReturnType(matchingMethodsByReturnType.values());
}
Collection<JavaMethod> methodsWithMostSpecificReturnType = determineMethodsWithMostSpecificReturnType(matchingMethodsByReturnType);
return determineMostSpecificMethodWithSameReturnType(methodsWithMostSpecificReturnType);
}
private static Optional<JavaMethod> determineMostSpecificMethodWithSameReturnType(Collection<JavaMethod> methods) {
JavaMethod result = null;
for (JavaMethod method : methods) {
if (result == null || method.getOwner().isAssignableTo(result.getOwner().getName())) {
result = method;
}
}
return Optional.ofNullable(result);
}
private static Collection<JavaMethod> determineMethodsWithMostSpecificReturnType(LinkedHashMultimap<JavaClass, JavaMethod> matchingMethodsByReturnType) {
Map.Entry<JavaClass, Collection<JavaMethod>> result = null;
for (Map.Entry<JavaClass, Collection<JavaMethod>> entry : matchingMethodsByReturnType.asMap().entrySet()) {
if (result == null || entry.getKey().isAssignableTo(result.getKey().getName())) {
result = entry;
}
}
return result != null ? result.getValue() : Collections.<JavaMethod>emptySet();
}
}
}
}
| |
package edu.harvard.iq.dataverse.authorization;
import edu.harvard.iq.dataverse.DataverseLocaleBean;
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
import edu.harvard.iq.dataverse.UserServiceBean;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
import edu.harvard.iq.dataverse.affiliation.AffiliationServiceBean;
import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationProviderFactoryNotFoundException;
import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException;
import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory;
import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProviderFactory;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GitHubOAuth2AP;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GoogleOAuth2AP;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP;
import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData;
import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean;
import edu.harvard.iq.dataverse.passwordreset.PasswordResetData;
import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
import java.sql.Timestamp;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.ejb.EJBException;
import javax.ejb.Singleton;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.NonUniqueResultException;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
/**
* The AuthenticationManager is responsible for registering and listing
* AuthenticationProviders. There's a single instance per application.
*
* Register the providers in the {@link #startup()} method.
*/
@Named
@Singleton
public class AuthenticationServiceBean {
private static final Logger logger = Logger.getLogger(AuthenticationServiceBean.class.getName());
/**
* Where all registered authentication providers live.
*/
final Map<String, AuthenticationProvider> authenticationProviders = new HashMap<>();
/**
* Index of all OAuth2 providers. They also live in {@link #authenticationProviders}.
*/
final Map<String, AbstractOAuth2AuthenticationProvider> oAuth2authenticationProviders = new HashMap<>();
final Map<String, AuthenticationProviderFactory> providerFactories = new HashMap<>();
@EJB
BuiltinUserServiceBean builtinUserServiceBean;
@EJB
IndexServiceBean indexService;
@EJB
protected ActionLogServiceBean actionLogSvc;
@EJB
UserNotificationServiceBean userNotificationService;
@EJB
ConfirmEmailServiceBean confirmEmailService;
@EJB
PasswordResetServiceBean passwordResetServiceBean;
@EJB
UserServiceBean userService;
@EJB
PasswordValidatorServiceBean passwordValidatorService;
@EJB
AffiliationServiceBean affiliationBean;
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
@PostConstruct
public void startup() {
// First, set up the factories
try {
registerProviderFactory( new BuiltinAuthenticationProviderFactory(builtinUserServiceBean, passwordValidatorService, this) );
registerProviderFactory( new ShibAuthenticationProviderFactory() );
registerProviderFactory( new OAuth2AuthenticationProviderFactory() );
} catch (AuthorizationSetupException ex) {
logger.log(Level.SEVERE, "Exception setting up the authentication provider factories: " + ex.getMessage(), ex);
}
// Now, load the providers.
em.createNamedQuery("AuthenticationProviderRow.findAllEnabled", AuthenticationProviderRow.class)
.getResultList().forEach((row) -> {
try {
registerProvider( loadProvider(row) );
} catch ( AuthenticationProviderFactoryNotFoundException e ) {
logger.log(Level.SEVERE, "Cannot find authentication provider factory with alias '" + e.getFactoryAlias() + "'",e);
} catch (AuthorizationSetupException ex) {
logger.log(Level.SEVERE, "Exception setting up the authentication provider '" + row.getId() + "': " + ex.getMessage(), ex);
}
});
}
public void registerProviderFactory(AuthenticationProviderFactory aFactory)
throws AuthorizationSetupException
{
if ( providerFactories.containsKey(aFactory.getAlias()) ) {
throw new AuthorizationSetupException(
"Duplicate alias " + aFactory.getAlias() + " for authentication provider factory.");
}
providerFactories.put( aFactory.getAlias(), aFactory);
logger.log( Level.FINE, "Registered Authentication Provider Factory {0} as {1}",
new Object[]{aFactory.getInfo(), aFactory.getAlias()});
}
/**
* Tries to load and {@link AuthenticationProvider} using the passed {@link AuthenticationProviderRow}.
* @param aRow The row to load the provider from.
* @return The provider, if successful
* @throws AuthenticationProviderFactoryNotFoundException If the row specifies a non-existent factory
* @throws AuthorizationSetupException If the factory failed to instantiate a provider from the row.
*/
public AuthenticationProvider loadProvider( AuthenticationProviderRow aRow )
throws AuthenticationProviderFactoryNotFoundException, AuthorizationSetupException {
AuthenticationProviderFactory fact = getProviderFactory(aRow.getFactoryAlias());
if ( fact == null ) throw new AuthenticationProviderFactoryNotFoundException(aRow.getFactoryAlias());
return fact.buildProvider(aRow);
}
public void registerProvider(AuthenticationProvider aProvider) throws AuthorizationSetupException {
if ( authenticationProviders.containsKey(aProvider.getId()) ) {
throw new AuthorizationSetupException(
"Duplicate id " + aProvider.getId() + " for authentication provider.");
}
authenticationProviders.put( aProvider.getId(), aProvider);
actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "registerProvider")
.setInfo(aProvider.getId() + ":" + aProvider.getInfo().getTitle()));
if ( aProvider instanceof AbstractOAuth2AuthenticationProvider ) {
oAuth2authenticationProviders.put(aProvider.getId(), (AbstractOAuth2AuthenticationProvider) aProvider);
}
}
public AbstractOAuth2AuthenticationProvider getOAuth2Provider( String id ) {
return oAuth2authenticationProviders.get(id);
}
public Set<AbstractOAuth2AuthenticationProvider> getOAuth2Providers() {
return new HashSet<>(oAuth2authenticationProviders.values());
}
public void deregisterProvider( String id ) {
oAuth2authenticationProviders.remove( id );
if ( authenticationProviders.remove(id) != null ) {
actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "deregisterProvider")
.setInfo(id));
logger.log(Level.INFO,"Deregistered provider {0}", new Object[]{id});
logger.log(Level.INFO,"Providers left {0}", new Object[]{getAuthenticationProviderIds()});
}
}
public Set<String> getAuthenticationProviderIds() {
return authenticationProviders.keySet();
}
public Collection<AuthenticationProvider> getAuthenticationProviders() {
return authenticationProviders.values();
}
public <T extends AuthenticationProvider> Set<String> getAuthenticationProviderIdsOfType( Class<T> aClass ) {
Set<String> retVal = new TreeSet<>();
for ( Map.Entry<String, AuthenticationProvider> p : authenticationProviders.entrySet() ) {
if ( aClass.isAssignableFrom( p.getValue().getClass() ) ) {
retVal.add( p.getKey() );
}
}
return retVal;
}
public AuthenticationProviderFactory getProviderFactory( String alias ) {
return providerFactories.get(alias);
}
public AuthenticationProvider getAuthenticationProvider( String id ) {
return authenticationProviders.get( id );
}
public AuthenticatedUser findByID(Object pk){
if (pk==null){
return null;
}
return em.find(AuthenticatedUser.class, pk);
}
public void removeApiToken(AuthenticatedUser user){
if (user!=null) {
ApiToken apiToken = findApiTokenByUser(user);
if (apiToken != null) {
em.remove(apiToken);
}
}
}
public boolean isOrcidEnabled() {
return oAuth2authenticationProviders.values().stream().anyMatch( s -> s.getId().toLowerCase().contains("orcid") );
}
/**
* Use with care! This method was written primarily for developers
* interested in API testing who want to:
*
* 1. Create a temporary user and get an API token.
*
* 2. Do some work with that API token.
*
* 3. Delete all the stuff that was created with the API token.
*
* 4. Delete the temporary user.
*
* Before calling this method, make sure you've deleted all the stuff tied
* to the user, including stuff they've created, role assignments, group
* assignments, etc.
*
* Longer term, the intention is to have a "disableAuthenticatedUser"
* method/command. See https://github.com/IQSS/dataverse/issues/2419
*/
public void deleteAuthenticatedUser(Object pk) {
AuthenticatedUser user = em.find(AuthenticatedUser.class, pk);
if (user != null) {
ApiToken apiToken = findApiTokenByUser(user);
if (apiToken != null) {
em.remove(apiToken);
}
ConfirmEmailData confirmEmailData = confirmEmailService.findSingleConfirmEmailDataByUser(user);
if (confirmEmailData != null) {
/**
* @todo This could probably be a cascade delete instead.
*/
em.remove(confirmEmailData);
}
userNotificationService.findByUser(user.getId()).forEach(userNotificationService::delete);
AuthenticationProvider prv = lookupProvider(user);
if ( prv != null && prv.isUserDeletionAllowed() ) {
prv.deleteUser(user.getAuthenticatedUserLookup().getPersistentUserId());
}
actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "deleteUser")
.setInfo(user.getUserIdentifier()));
em.remove(user.getAuthenticatedUserLookup());
em.remove(user);
}
}
public AuthenticatedUser getAuthenticatedUser( String identifier ) {
try {
return em.createNamedQuery("AuthenticatedUser.findByIdentifier", AuthenticatedUser.class)
.setParameter("identifier", identifier)
.getSingleResult();
} catch ( NoResultException nre ) {
return null;
}
}
public AuthenticatedUser getAdminUser() {
try {
return em.createNamedQuery("AuthenticatedUser.findAdminUser", AuthenticatedUser.class)
.setMaxResults(1)
.getSingleResult();
} catch (Exception ex) {
return null;
}
}
public AuthenticatedUser getAuthenticatedUserByEmail( String email ) {
try {
return em.createNamedQuery("AuthenticatedUser.findByEmail", AuthenticatedUser.class)
.setParameter("email", email)
.getSingleResult();
} catch ( NoResultException ex ) {
logger.log(Level.INFO, "no user found using {0}", email);
return null;
} catch ( NonUniqueResultException ex ) {
logger.log(Level.INFO, "multiple users found using {0}: {1}", new Object[]{email, ex});
return null;
}
}
/**
* Returns an {@link AuthenticatedUser} matching the passed provider id and the authentication request. If
* no such user exist, it is created and then returned.
*
* <strong>Invariant:</strong> upon successful return from this call, an {@link AuthenticatedUser} record
* matching the request and provider exists in the database.
*
* @param authenticationProviderId
* @param req
* @return The authenticated user for the passed provider id and authentication request.
* @throws AuthenticationFailedException
*/
public AuthenticatedUser getUpdateAuthenticatedUser( String authenticationProviderId, AuthenticationRequest req ) throws AuthenticationFailedException {
AuthenticationProvider prv = getAuthenticationProvider(authenticationProviderId);
if ( prv == null ) throw new IllegalArgumentException("No authentication provider listed under id " + authenticationProviderId );
if ( ! (prv instanceof CredentialsAuthenticationProvider) ) {
throw new IllegalArgumentException( authenticationProviderId + " does not support credentials-based authentication." );
}
AuthenticationResponse resp = ((CredentialsAuthenticationProvider)prv).authenticate(req);
if ( resp.getStatus() == AuthenticationResponse.Status.SUCCESS ) {
// yay! see if we already have this user.
AuthenticatedUser user = lookupUser(authenticationProviderId, resp.getUserId());
if (user != null){
user = userService.updateLastLogin(user);
}
if ( user == null ) {
throw new IllegalStateException("Authenticated user does not exist. The functionality to support creating one at this point in authentication has been removed.");
//return createAuthenticatedUser(
// new UserRecordIdentifier(authenticationProviderId, resp.getUserId()), resp.getUserId(), resp.getUserDisplayInfo(), true );
} else {
if (BuiltinAuthenticationProvider.PROVIDER_ID.equals(user.getAuthenticatedUserLookup().getAuthenticationProviderId())) {
return user;
} else {
return updateAuthenticatedUser(user, resp.getUserDisplayInfo());
}
}
} else {
throw new AuthenticationFailedException(resp, "Authentication Failed: " + resp.getMessage());
}
}
/**
* @param email
* @return {@code true} iff the none of the authenticated users has the passed email address.
*/
public boolean isEmailAddressAvailable(String email) {
return em.createNamedQuery("AuthenticatedUser.findByEmail", AuthenticatedUser.class)
.setParameter("email", email)
.getResultList().isEmpty();
}
public AuthenticatedUser lookupUser(UserRecordIdentifier id) {
return lookupUser(id.repoId, id.userIdInRepo);
}
public AuthenticatedUser lookupUser(String authPrvId, String userPersistentId) {
TypedQuery<AuthenticatedUserLookup> typedQuery = em.createNamedQuery("AuthenticatedUserLookup.findByAuthPrvID_PersUserId", AuthenticatedUserLookup.class);
typedQuery.setParameter("authPrvId", authPrvId);
typedQuery.setParameter("persUserId", userPersistentId);
try {
AuthenticatedUserLookup au = typedQuery.getSingleResult();
return au.getAuthenticatedUser();
} catch (NoResultException | NonUniqueResultException ex) {
return null;
}
}
public AuthenticationProvider lookupProvider( AuthenticatedUser user ) {
return authenticationProviders.get(user.getAuthenticatedUserLookup().getAuthenticationProviderId());
}
public ApiToken findApiToken(String token) {
try {
return em.createNamedQuery("ApiToken.findByTokenString", ApiToken.class)
.setParameter("tokenString", token)
.getSingleResult();
} catch (NoResultException ex) {
return null;
}
}
public ApiToken findApiTokenByUser(AuthenticatedUser au) {
if (au == null) {
return null;
}
TypedQuery<ApiToken> typedQuery = em.createNamedQuery("ApiToken.findByUser", ApiToken.class);
typedQuery.setParameter("user", au);
try {
return typedQuery.getSingleResult();
} catch (NoResultException | NonUniqueResultException ex) {
logger.log(Level.INFO, "When looking up API token for {0} caught {1}", new Object[]{au, ex});
return null;
}
}
// A method for generating a new API token;
// TODO: this is a simple, one-size-fits-all solution; we'll need
// to expand this system, to be able to generate tokens with different
// lifecycles/valid for specific actions only, etc.
// -- L.A. 4.0 beta12
public ApiToken generateApiTokenForUser(AuthenticatedUser au) {
if (au == null) {
return null;
}
ApiToken apiToken = new ApiToken();
apiToken.setTokenString(java.util.UUID.randomUUID().toString());
apiToken.setAuthenticatedUser(au);
Calendar c = Calendar.getInstance();
apiToken.setCreateTime(new Timestamp(c.getTimeInMillis()));
c.roll(Calendar.YEAR, 1);
apiToken.setExpireTime(new Timestamp(c.getTimeInMillis()));
save(apiToken);
actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "generateApiToken")
.setInfo("user:" + au.getIdentifier() + " token:" + apiToken.getTokenString()));
return apiToken;
}
public AuthenticatedUser lookupUser( String apiToken ) {
ApiToken tkn = findApiToken(apiToken);
if ( tkn == null ) return null;
if ( tkn.isDisabled() ) return null;
if ( tkn.getExpireTime() != null ) {
if ( tkn.getExpireTime().before( new Timestamp(new Date().getTime())) ) {
em.remove(tkn);
return null;
}
}
return tkn.getAuthenticatedUser();
}
public AuthenticatedUser save( AuthenticatedUser user ) {
em.persist(user);
em.flush();
return user;
}
public AuthenticatedUser update( AuthenticatedUser user ) {
return em.merge(user);
}
public ApiToken save( ApiToken aToken ) {
if ( aToken.getId() == null ) {
em.persist(aToken);
return aToken;
} else {
return em.merge( aToken );
}
}
/**
* Associates the passed {@link AuthenticatedUser} with a new provider.
* @param authenticatedUser the authenticated being re-associated
* @param authenticationProviderId Id of the new provider
* @param persistentIdInProvider Id of the user in the new provider
* @return {@code true} iff the change was successful.
*/
public boolean updateProvider( AuthenticatedUser authenticatedUser, String authenticationProviderId, String persistentIdInProvider ) {
try {
AuthenticatedUserLookup aul = em.createNamedQuery("AuthenticatedUserLookup.findByAuthUser", AuthenticatedUserLookup.class)
.setParameter("authUser", authenticatedUser)
.getSingleResult();
aul.setAuthenticationProviderId(authenticationProviderId);
aul.setPersistentUserId(persistentIdInProvider);
actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth,
authenticatedUser.getIdentifier() + " now associated with provider " + authenticationProviderId + " id: " + persistentIdInProvider) );
return true;
} catch ( NoResultException | NonUniqueResultException ex ) {
logger.log(Level.WARNING, "Error converting user " + authenticatedUser.getUserIdentifier() + ": " + ex.getMessage(), ex);
return false;
}
}
/**
* Creates an authenticated user based on the passed
* {@code userDisplayInfo}, a lookup entry for them based
* UserIdentifier.getLookupStringPerAuthProvider (within the supplied
* authentication provider), and internal user identifier (used for role
* assignments, etc.) based on UserIdentifier.getInternalUserIdentifer.
*
* @param userRecordId
* @param proposedAuthenticatedUserIdentifier
* @param userDisplayInfo
* @param generateUniqueIdentifier if {@code true}, create a new, unique user identifier for the created user, if the suggested one exists.
* @return the newly created user, or {@code null} if the proposed identifier exists and {@code generateUniqueIdentifier} was {@code false}.
* @throws EJBException which may wrap an ConstraintViolationException if the proposed user does not pass bean validation.
*/
public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecordId,
String proposedAuthenticatedUserIdentifier,
AuthenticatedUserDisplayInfo userDisplayInfo,
boolean generateUniqueIdentifier) {
AuthenticatedUser authenticatedUser = new AuthenticatedUser();
// set account creation time & initial login time (same timestamp)
authenticatedUser.setCreatedTime(new Timestamp(new Date().getTime()));
authenticatedUser.setLastLoginTime(authenticatedUser.getCreatedTime());
DataverseLocaleBean d = new DataverseLocaleBean();
String localeCode = d.getLocaleCode();
if (!localeCode.equalsIgnoreCase("en")) {
String affProp = "affiliation";
Locale enLocale = new Locale("en");
ResourceBundle fromBundle = BundleUtil.getResourceBundle(affProp + "_" + localeCode);
ResourceBundle toBundle = ResourceBundle.getBundle(affProp, enLocale);
String affiliation = userDisplayInfo.getAffiliation();
String newAffiliation = affiliationBean.convertAffiliation(affiliation, fromBundle, toBundle);
userDisplayInfo.setAffiliation(newAffiliation);
}
authenticatedUser.applyDisplayInfo(userDisplayInfo);
// we have no desire for leading or trailing whitespace in identifiers
if (proposedAuthenticatedUserIdentifier != null) {
proposedAuthenticatedUserIdentifier = proposedAuthenticatedUserIdentifier.trim();
}
// we now select a username for the generated AuthenticatedUser, or give up
String internalUserIdentifer = proposedAuthenticatedUserIdentifier;
// TODO should lock table authenticated users for write here
if (identifierExists(internalUserIdentifer)) {
if (!generateUniqueIdentifier) {
return null;
}
int i = 1;
String identifier = internalUserIdentifer + i;
while (identifierExists(identifier)) {
i += 1;
}
authenticatedUser.setUserIdentifier(identifier);
} else {
authenticatedUser.setUserIdentifier(internalUserIdentifer);
}
authenticatedUser = save(authenticatedUser);
// TODO should unlock table authenticated users for write here
AuthenticatedUserLookup auusLookup = userRecordId.createAuthenticatedUserLookup(authenticatedUser);
em.persist(auusLookup);
authenticatedUser.setAuthenticatedUserLookup(auusLookup);
if (ShibAuthenticationProvider.PROVIDER_ID.equals(auusLookup.getAuthenticationProviderId())) {
Timestamp emailConfirmedNow = new Timestamp(new Date().getTime());
// Email addresses for Shib users are confirmed by the Identity Provider.
authenticatedUser.setEmailConfirmed(emailConfirmedNow);
authenticatedUser = save(authenticatedUser);
} else {
/* @todo Rather than creating a token directly here it might be
* better to do something like "startConfirmEmailProcessForNewUser". */
confirmEmailService.createToken(authenticatedUser);
}
actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.Auth, "createUser")
.setInfo(authenticatedUser.getIdentifier()));
return authenticatedUser;
}
/**
* Checks whether the {@code idtf} is already taken by another {@link AuthenticatedUser}.
* @param idtf
* @return {@code true} iff there's already a user by that username.
*/
public boolean identifierExists( String idtf ) {
return em.createNamedQuery("AuthenticatedUser.countOfIdentifier", Number.class)
.setParameter("identifier", idtf)
.getSingleResult().intValue() > 0;
}
public AuthenticatedUser updateAuthenticatedUser(AuthenticatedUser user, AuthenticatedUserDisplayInfo userDisplayInfo) {
DataverseLocaleBean d = new DataverseLocaleBean();
String localeCode = d.getLocaleCode();
if (!localeCode.equalsIgnoreCase("en")) {
affiliationBean.updateAuthenticatedUserAffiliation(userDisplayInfo, localeCode);
}
user.applyDisplayInfo(userDisplayInfo);
actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "updateUser")
.setInfo(user.getIdentifier()));
return update(user);
}
public List<AuthenticatedUser> findAllAuthenticatedUsers() {
return em.createNamedQuery("AuthenticatedUser.findAll", AuthenticatedUser.class).getResultList();
}
public List<AuthenticatedUser> findSuperUsers() {
return em.createNamedQuery("AuthenticatedUser.findSuperUsers", AuthenticatedUser.class).getResultList();
}
public Set<AuthenticationProviderFactory> listProviderFactories() {
return new HashSet<>( providerFactories.values() );
}
public Timestamp getCurrentTimestamp() {
return new Timestamp(new Date().getTime());
}
// TODO should probably be moved to the Shib provider - this is a classic Shib-specific
// use case. This class should deal with general autnetications.
@Deprecated
/**
* @deprecated. Switch to convertBuiltInUserToRemoteUser instead.
* @todo. Switch to convertBuiltInUserToRemoteUser instead.
*/
public AuthenticatedUser convertBuiltInToShib(AuthenticatedUser builtInUserToConvert, String shibProviderId, UserIdentifier newUserIdentifierInLookupTable) {
logger.info("converting user " + builtInUserToConvert.getId() + " from builtin to shib");
String builtInUserIdentifier = builtInUserToConvert.getIdentifier();
logger.info("builtin user identifier: " + builtInUserIdentifier);
TypedQuery<AuthenticatedUserLookup> typedQuery = em.createQuery("SELECT OBJECT(o) FROM AuthenticatedUserLookup AS o WHERE o.authenticatedUser = :auid", AuthenticatedUserLookup.class);
typedQuery.setParameter("auid", builtInUserToConvert);
AuthenticatedUserLookup authuserLookup;
try {
authuserLookup = typedQuery.getSingleResult();
} catch (NoResultException | NonUniqueResultException ex) {
logger.info("exception caught: " + ex);
return null;
}
if (authuserLookup == null) {
return null;
}
String oldProviderId = authuserLookup.getAuthenticationProviderId();
logger.info("we expect this to be 'builtin': " + oldProviderId);
authuserLookup.setAuthenticationProviderId(shibProviderId);
String oldUserLookupIdentifier = authuserLookup.getPersistentUserId();
logger.info("this should be 'pete' or whatever the old builtin username was: " + oldUserLookupIdentifier);
String perUserShibIdentifier = newUserIdentifierInLookupTable.getLookupStringPerAuthProvider();
authuserLookup.setPersistentUserId(perUserShibIdentifier);
/**
* @todo this should be a transaction of some kind. We want to update
* the authenticateduserlookup and also delete the row from the
* builtinuser table in a single transaction.
*/
em.persist(authuserLookup);
String builtinUsername = builtInUserIdentifier.replaceFirst(AuthenticatedUser.IDENTIFIER_PREFIX, "");
BuiltinUser builtin = builtinUserServiceBean.findByUserName(builtinUsername);
if (builtin != null) {
// These were created by AuthenticationResponse.Status.BREAKOUT in canLogInAsBuiltinUser
List<PasswordResetData> oldTokens = passwordResetServiceBean.findPasswordResetDataByDataverseUser(builtin);
for (PasswordResetData oldToken : oldTokens) {
em.remove(oldToken);
}
em.remove(builtin);
} else {
logger.info("Couldn't delete builtin user because could find it based on username " + builtinUsername);
}
AuthenticatedUser shibUser = lookupUser(shibProviderId, perUserShibIdentifier);
if (shibUser != null) {
return shibUser;
}
return null;
}
public AuthenticatedUser convertBuiltInUserToRemoteUser(AuthenticatedUser builtInUserToConvert, String newProviderId, UserIdentifier newUserIdentifierInLookupTable) {
logger.info("converting user " + builtInUserToConvert.getId() + " from builtin to remote");
String builtInUserIdentifier = builtInUserToConvert.getIdentifier();
logger.info("builtin user identifier: " + builtInUserIdentifier);
TypedQuery<AuthenticatedUserLookup> typedQuery = em.createQuery("SELECT OBJECT(o) FROM AuthenticatedUserLookup AS o WHERE o.authenticatedUser = :auid", AuthenticatedUserLookup.class);
typedQuery.setParameter("auid", builtInUserToConvert);
AuthenticatedUserLookup authuserLookup;
try {
authuserLookup = typedQuery.getSingleResult();
} catch (NoResultException | NonUniqueResultException ex) {
logger.info("exception caught: " + ex);
return null;
}
if (authuserLookup == null) {
return null;
}
String oldProviderId = authuserLookup.getAuthenticationProviderId();
logger.info("we expect this to be 'builtin': " + oldProviderId);
authuserLookup.setAuthenticationProviderId(newProviderId);
String oldUserLookupIdentifier = authuserLookup.getPersistentUserId();
logger.info("this should be 'pete' or whatever the old builtin username was: " + oldUserLookupIdentifier);
String perUserIdentifier = newUserIdentifierInLookupTable.getLookupStringPerAuthProvider();
authuserLookup.setPersistentUserId(perUserIdentifier);
/**
* @todo this should be a transaction of some kind. We want to update
* the authenticateduserlookup and also delete the row from the
* builtinuser table in a single transaction.
*/
em.persist(authuserLookup);
String builtinUsername = builtInUserIdentifier.replaceFirst(AuthenticatedUser.IDENTIFIER_PREFIX, "");
BuiltinUser builtin = builtinUserServiceBean.findByUserName(builtinUsername);
if (builtin != null) {
// These were created by AuthenticationResponse.Status.BREAKOUT in canLogInAsBuiltinUser
List<PasswordResetData> oldTokens = passwordResetServiceBean.findPasswordResetDataByDataverseUser(builtin);
for (PasswordResetData oldToken : oldTokens) {
em.remove(oldToken);
}
em.remove(builtin);
} else {
logger.info("Couldn't delete builtin user because could find it based on username " + builtinUsername);
}
AuthenticatedUser nonBuiltinUser = lookupUser(newProviderId, perUserIdentifier);
if (nonBuiltinUser != null) {
return nonBuiltinUser;
}
return null;
}
/**
* @param idOfAuthUserToConvert The id of the remote AuthenticatedUser
* (Shibboleth user or OAuth user) to convert to a BuiltinUser.
* @param newEmailAddress The new email address that will be used instead of
* the user's old email address from the institution that they have left.
* @return BuiltinUser
* @throws java.lang.Exception You must catch and report back to the user (a
* superuser) any Exceptions.
*/
public BuiltinUser convertRemoteToBuiltIn(Long idOfAuthUserToConvert, String newEmailAddress) throws Exception {
AuthenticatedUser authenticatedUser = findByID(idOfAuthUserToConvert);
if (authenticatedUser == null) {
throw new Exception("User id " + idOfAuthUserToConvert + " not found.");
}
AuthenticatedUser existingUserWithSameEmail = getAuthenticatedUserByEmail(newEmailAddress);
if (existingUserWithSameEmail != null) {
throw new Exception("User id " + idOfAuthUserToConvert + " (" + authenticatedUser.getIdentifier() + ") cannot be converted from remote to BuiltIn because the email address " + newEmailAddress + " is already in use by user id " + existingUserWithSameEmail.getId() + " (" + existingUserWithSameEmail.getIdentifier() + ").");
}
BuiltinUser builtinUser = new BuiltinUser();
builtinUser.setUserName(authenticatedUser.getUserIdentifier());
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
Set<ConstraintViolation<BuiltinUser>> violations = validator.validate(builtinUser);
int numViolations = violations.size();
if (numViolations > 0) {
StringBuilder logMsg = new StringBuilder();
for (ConstraintViolation<?> violation : violations) {
logMsg.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
}
throw new Exception("User id " + idOfAuthUserToConvert + " cannot be converted from remote to BuiltIn because of constraint violations on the BuiltIn user that would be created: " + numViolations + ". Details: " + logMsg);
}
try {
builtinUser = builtinUserServiceBean.save(builtinUser);
} catch (IllegalArgumentException ex) {
throw new Exception("User id " + idOfAuthUserToConvert + " cannot be converted from remote to BuiltIn because of an IllegalArgumentException creating the row in the builtinuser table: " + ex);
}
AuthenticatedUserLookup lookup = authenticatedUser.getAuthenticatedUserLookup();
if (lookup == null) {
throw new Exception("User id " + idOfAuthUserToConvert + " does not have an 'authenticateduserlookup' row");
}
String providerId = lookup.getAuthenticationProviderId();
if (providerId == null) {
throw new Exception("User id " + idOfAuthUserToConvert + " provider id is null.");
}
String builtinProviderId = BuiltinAuthenticationProvider.PROVIDER_ID;
if (providerId.equals(builtinProviderId)) {
throw new Exception("User id " + idOfAuthUserToConvert + " cannot be converted from remote to BuiltIn because current provider id is '" + providerId + "' which is the same as '" + builtinProviderId + "'. This user is already a BuiltIn user.");
}
lookup.setAuthenticationProviderId(BuiltinAuthenticationProvider.PROVIDER_ID);
lookup.setPersistentUserId(authenticatedUser.getUserIdentifier());
em.persist(lookup);
authenticatedUser.setEmail(newEmailAddress);
em.persist(authenticatedUser);
em.flush();
return builtinUser;
}
public AuthenticatedUser canLogInAsBuiltinUser(String username, String password) {
logger.fine("checking to see if " + username + " knows the password...");
if (password == null) {
logger.info("password was null");
return null;
}
AuthenticationRequest authReq = new AuthenticationRequest();
/**
* @todo Should the credential key really be a Bundle key?
* BuiltinAuthenticationProvider.KEY_USERNAME_OR_EMAIL, for example, is
* "login.builtin.credential.usernameOrEmail" as of this writing.
*/
authReq.putCredential(BuiltinAuthenticationProvider.KEY_USERNAME_OR_EMAIL, username);
authReq.putCredential(BuiltinAuthenticationProvider.KEY_PASSWORD, password);
/**
* @todo Should probably set IP address here.
*/
// authReq.setIpAddress(session.getUser().getRequestMetadata().getIpAddress());
String credentialsAuthProviderId = BuiltinAuthenticationProvider.PROVIDER_ID;
try {
AuthenticatedUser au = getUpdateAuthenticatedUser(credentialsAuthProviderId, authReq);
logger.fine("User authenticated:" + au.getEmail());
return au;
} catch (AuthenticationFailedException ex) {
logger.info("The username and/or password entered is invalid: " + ex.getResponse().getMessage());
if (AuthenticationResponse.Status.BREAKOUT.equals(ex.getResponse().getStatus())) {
/**
* Note that this "BREAKOUT" status creates PasswordResetData!
* We'll delete it just before blowing away the BuiltinUser in
* AuthenticationServiceBean.convertBuiltInToShib
*/
logger.info("AuthenticationFailedException caught in canLogInAsBuiltinUser: The username and/or password entered is invalid: " + ex.getResponse().getMessage() + " - Maybe the user (" + username + ") hasn't upgraded their password? Checking the old password...");
BuiltinUser builtinUser = builtinUserServiceBean.findByUserName(username);
if (builtinUser != null) {
boolean userAuthenticated = PasswordEncryption.getVersion(builtinUser.getPasswordEncryptionVersion()).check(password, builtinUser.getEncryptedPassword());
if (userAuthenticated == true) {
AuthenticatedUser authUser = lookupUser(BuiltinAuthenticationProvider.PROVIDER_ID, builtinUser.getUserName());
if (authUser != null) {
return authUser;
} else {
logger.info("canLogInAsBuiltinUser: Couldn't find AuthenticatedUser based on BuiltinUser username " + builtinUser.getUserName());
}
} else {
logger.info("canLogInAsBuiltinUser: User doesn't know old pre-bcrypt password either.");
}
} else {
logger.info("canLogInAsBuiltinUser: Couldn't run `check` because no BuiltinUser found with username " + username);
}
}
return null;
} catch (EJBException ex) {
Throwable cause = ex;
StringBuilder sb = new StringBuilder();
sb.append(ex + " ");
while (cause.getCause() != null) {
cause = cause.getCause();
sb.append(cause.getClass().getCanonicalName() + " ");
sb.append(cause.getMessage()).append(" ");
/**
* @todo Investigate why authSvc.authenticate is throwing
* NullPointerException. If you convert a Shib user or an OAuth
* user to a Builtin user, the password will be null.
*/
if (cause instanceof NullPointerException) {
for (int i = 0; i < 2; i++) {
StackTraceElement stacktrace = cause.getStackTrace()[i];
if (stacktrace != null) {
String classCanonicalName = stacktrace.getClass().getCanonicalName();
String methodName = stacktrace.getMethodName();
int lineNumber = stacktrace.getLineNumber();
String error = "at " + stacktrace.getClassName() + "." + stacktrace.getMethodName() + "(" + stacktrace.getFileName() + ":" + lineNumber + ") ";
sb.append(error);
}
}
}
}
logger.info("When trying to validate password, exception calling authSvc.authenticate: " + sb.toString());
return null;
}
}
/**
* @todo Consider making the sort order configurable by making it a colum on
* AuthenticationProviderRow
*/
public List<String> getAuthenticationProviderIdsSorted() {
GitHubOAuth2AP github = new GitHubOAuth2AP(null, null);
GoogleOAuth2AP google = new GoogleOAuth2AP(null, null);
return Arrays.asList(
BuiltinAuthenticationProvider.PROVIDER_ID,
ShibAuthenticationProvider.PROVIDER_ID,
OrcidOAuth2AP.PROVIDER_ID_PRODUCTION,
OrcidOAuth2AP.PROVIDER_ID_SANDBOX,
github.getId(),
google.getId()
);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.rmnode;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceOption;
import org.apache.hadoop.yarn.api.records.ResourceUtilization;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.server.api.protocolrecords.LogAggregationReport;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.records.OpportunisticContainersStatus;
import org.apache.hadoop.yarn.server.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.NodesListManagerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.NodesListManagerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.NodesListManager;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRunningOnNodeEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.AllocationExpirationInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeResourceUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.utils.BuilderUtils.ContainerIdComparator;
import org.apache.hadoop.yarn.state.InvalidStateTransitionException;
import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.resource.Resources;
import com.google.common.annotations.VisibleForTesting;
/**
* This class is used to keep track of all the applications/containers
* running on a node.
*
*/
@Private
@Unstable
@SuppressWarnings("unchecked")
public class RMNodeImpl implements RMNode, EventHandler<RMNodeEvent> {
private static final Log LOG = LogFactory.getLog(RMNodeImpl.class);
private static final RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
private final ReadLock readLock;
private final WriteLock writeLock;
private final ConcurrentLinkedQueue<UpdatedContainerInfo> nodeUpdateQueue;
private volatile boolean nextHeartBeat = true;
private final NodeId nodeId;
private final RMContext context;
private final String hostName;
private final int commandPort;
private int httpPort;
private final String nodeAddress; // The containerManager address
private String httpAddress;
/* Snapshot of total resources before receiving decommissioning command */
private volatile Resource originalTotalCapability;
private volatile Resource totalCapability;
private final Node node;
private String healthReport;
private long lastHealthReportTime;
private String nodeManagerVersion;
private Integer decommissioningTimeout;
private long timeStamp;
/* Aggregated resource utilization for the containers. */
private ResourceUtilization containersUtilization;
/* Resource utilization for the node. */
private ResourceUtilization nodeUtilization;
/** Physical resources in the node. */
private volatile Resource physicalResource;
/* Container Queue Information for the node.. Used by Distributed Scheduler */
private OpportunisticContainersStatus opportunisticContainersStatus;
private final ContainerAllocationExpirer containerAllocationExpirer;
/* set of containers that have just launched */
private final Set<ContainerId> launchedContainers =
new HashSet<ContainerId>();
/* track completed container globally */
private final Set<ContainerId> completedContainers =
new HashSet<ContainerId>();
/* set of containers that need to be cleaned */
private final Set<ContainerId> containersToClean = new TreeSet<ContainerId>(
new ContainerIdComparator());
/* set of containers that need to be signaled */
private final List<SignalContainerRequest> containersToSignal =
new ArrayList<SignalContainerRequest>();
/*
* set of containers to notify NM to remove them from its context. Currently,
* this includes containers that were notified to AM about their completion
*/
private final Set<ContainerId> containersToBeRemovedFromNM =
new HashSet<ContainerId>();
/* the list of applications that have finished and need to be purged */
private final List<ApplicationId> finishedApplications =
new ArrayList<ApplicationId>();
/* the list of applications that are running on this node */
private final List<ApplicationId> runningApplications =
new ArrayList<ApplicationId>();
private final Map<ContainerId, Container> toBeDecreasedContainers =
new HashMap<>();
private final Map<ContainerId, Container> nmReportedIncreasedContainers =
new HashMap<>();
private NodeHeartbeatResponse latestNodeHeartBeatResponse = recordFactory
.newRecordInstance(NodeHeartbeatResponse.class);
private static final StateMachineFactory<RMNodeImpl,
NodeState,
RMNodeEventType,
RMNodeEvent> stateMachineFactory
= new StateMachineFactory<RMNodeImpl,
NodeState,
RMNodeEventType,
RMNodeEvent>(NodeState.NEW)
//Transitions from NEW state
.addTransition(NodeState.NEW, NodeState.RUNNING,
RMNodeEventType.STARTED, new AddNodeTransition())
.addTransition(NodeState.NEW, NodeState.NEW,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenUnusableTransition())
.addTransition(NodeState.NEW, NodeState.DECOMMISSIONED,
RMNodeEventType.DECOMMISSION,
new DeactivateNodeTransition(NodeState.DECOMMISSIONED))
//Transitions from RUNNING state
.addTransition(NodeState.RUNNING,
EnumSet.of(NodeState.RUNNING, NodeState.UNHEALTHY),
RMNodeEventType.STATUS_UPDATE,
new StatusUpdateWhenHealthyTransition())
.addTransition(NodeState.RUNNING, NodeState.DECOMMISSIONED,
RMNodeEventType.DECOMMISSION,
new DeactivateNodeTransition(NodeState.DECOMMISSIONED))
.addTransition(NodeState.RUNNING, NodeState.DECOMMISSIONING,
RMNodeEventType.GRACEFUL_DECOMMISSION,
new DecommissioningNodeTransition(NodeState.RUNNING,
NodeState.DECOMMISSIONING))
.addTransition(NodeState.RUNNING, NodeState.LOST,
RMNodeEventType.EXPIRE,
new DeactivateNodeTransition(NodeState.LOST))
.addTransition(NodeState.RUNNING, NodeState.REBOOTED,
RMNodeEventType.REBOOTING,
new DeactivateNodeTransition(NodeState.REBOOTED))
.addTransition(NodeState.RUNNING, NodeState.RUNNING,
RMNodeEventType.CLEANUP_APP, new CleanUpAppTransition())
.addTransition(NodeState.RUNNING, NodeState.RUNNING,
RMNodeEventType.CLEANUP_CONTAINER, new CleanUpContainerTransition())
.addTransition(NodeState.RUNNING, NodeState.RUNNING,
RMNodeEventType.FINISHED_CONTAINERS_PULLED_BY_AM,
new AddContainersToBeRemovedFromNMTransition())
.addTransition(NodeState.RUNNING, EnumSet.of(NodeState.RUNNING),
RMNodeEventType.RECONNECTED, new ReconnectNodeTransition())
.addTransition(NodeState.RUNNING, NodeState.RUNNING,
RMNodeEventType.RESOURCE_UPDATE, new UpdateNodeResourceWhenRunningTransition())
.addTransition(NodeState.RUNNING, NodeState.RUNNING,
RMNodeEventType.DECREASE_CONTAINER,
new DecreaseContainersTransition())
.addTransition(NodeState.RUNNING, NodeState.RUNNING,
RMNodeEventType.SIGNAL_CONTAINER, new SignalContainerTransition())
.addTransition(NodeState.RUNNING, NodeState.SHUTDOWN,
RMNodeEventType.SHUTDOWN,
new DeactivateNodeTransition(NodeState.SHUTDOWN))
//Transitions from REBOOTED state
.addTransition(NodeState.REBOOTED, NodeState.REBOOTED,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenUnusableTransition())
//Transitions from DECOMMISSIONED state
.addTransition(NodeState.DECOMMISSIONED, NodeState.DECOMMISSIONED,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenUnusableTransition())
.addTransition(NodeState.DECOMMISSIONED, NodeState.DECOMMISSIONED,
RMNodeEventType.FINISHED_CONTAINERS_PULLED_BY_AM,
new AddContainersToBeRemovedFromNMTransition())
//Transitions from DECOMMISSIONING state
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONED,
RMNodeEventType.DECOMMISSION,
new DeactivateNodeTransition(NodeState.DECOMMISSIONED))
.addTransition(NodeState.DECOMMISSIONING, NodeState.RUNNING,
RMNodeEventType.RECOMMISSION,
new RecommissionNodeTransition(NodeState.RUNNING))
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONING,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenRunningTransition())
.addTransition(NodeState.DECOMMISSIONING,
EnumSet.of(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONED),
RMNodeEventType.STATUS_UPDATE,
new StatusUpdateWhenHealthyTransition())
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONING,
RMNodeEventType.GRACEFUL_DECOMMISSION,
new DecommissioningNodeTransition(NodeState.DECOMMISSIONING,
NodeState.DECOMMISSIONING))
.addTransition(NodeState.DECOMMISSIONING, NodeState.LOST,
RMNodeEventType.EXPIRE,
new DeactivateNodeTransition(NodeState.LOST))
.addTransition(NodeState.DECOMMISSIONING, NodeState.REBOOTED,
RMNodeEventType.REBOOTING,
new DeactivateNodeTransition(NodeState.REBOOTED))
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONING,
RMNodeEventType.FINISHED_CONTAINERS_PULLED_BY_AM,
new AddContainersToBeRemovedFromNMTransition())
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONING,
RMNodeEventType.CLEANUP_APP, new CleanUpAppTransition())
.addTransition(NodeState.DECOMMISSIONING, NodeState.SHUTDOWN,
RMNodeEventType.SHUTDOWN,
new DeactivateNodeTransition(NodeState.SHUTDOWN))
// TODO (in YARN-3223) update resource when container finished.
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONING,
RMNodeEventType.CLEANUP_CONTAINER, new CleanUpContainerTransition())
// TODO (in YARN-3223) update resource when container finished.
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONING,
RMNodeEventType.FINISHED_CONTAINERS_PULLED_BY_AM,
new AddContainersToBeRemovedFromNMTransition())
.addTransition(NodeState.DECOMMISSIONING, EnumSet.of(
NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONED),
RMNodeEventType.RECONNECTED, new ReconnectNodeTransition())
.addTransition(NodeState.DECOMMISSIONING, NodeState.DECOMMISSIONING,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenRunningTransition())
//Transitions from LOST state
.addTransition(NodeState.LOST, NodeState.LOST,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenUnusableTransition())
.addTransition(NodeState.LOST, NodeState.LOST,
RMNodeEventType.FINISHED_CONTAINERS_PULLED_BY_AM,
new AddContainersToBeRemovedFromNMTransition())
//Transitions from UNHEALTHY state
.addTransition(NodeState.UNHEALTHY,
EnumSet.of(NodeState.UNHEALTHY, NodeState.RUNNING),
RMNodeEventType.STATUS_UPDATE,
new StatusUpdateWhenUnHealthyTransition())
.addTransition(NodeState.UNHEALTHY, NodeState.DECOMMISSIONED,
RMNodeEventType.DECOMMISSION,
new DeactivateNodeTransition(NodeState.DECOMMISSIONED))
.addTransition(NodeState.UNHEALTHY, NodeState.DECOMMISSIONING,
RMNodeEventType.GRACEFUL_DECOMMISSION,
new DecommissioningNodeTransition(NodeState.UNHEALTHY,
NodeState.DECOMMISSIONING))
.addTransition(NodeState.UNHEALTHY, NodeState.LOST,
RMNodeEventType.EXPIRE,
new DeactivateNodeTransition(NodeState.LOST))
.addTransition(NodeState.UNHEALTHY, NodeState.REBOOTED,
RMNodeEventType.REBOOTING,
new DeactivateNodeTransition(NodeState.REBOOTED))
.addTransition(NodeState.UNHEALTHY, EnumSet.of(NodeState.UNHEALTHY),
RMNodeEventType.RECONNECTED, new ReconnectNodeTransition())
.addTransition(NodeState.UNHEALTHY, NodeState.UNHEALTHY,
RMNodeEventType.CLEANUP_APP, new CleanUpAppTransition())
.addTransition(NodeState.UNHEALTHY, NodeState.UNHEALTHY,
RMNodeEventType.CLEANUP_CONTAINER, new CleanUpContainerTransition())
.addTransition(NodeState.UNHEALTHY, NodeState.UNHEALTHY,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenUnusableTransition())
.addTransition(NodeState.UNHEALTHY, NodeState.UNHEALTHY,
RMNodeEventType.FINISHED_CONTAINERS_PULLED_BY_AM,
new AddContainersToBeRemovedFromNMTransition())
.addTransition(NodeState.UNHEALTHY, NodeState.UNHEALTHY,
RMNodeEventType.SIGNAL_CONTAINER, new SignalContainerTransition())
.addTransition(NodeState.UNHEALTHY, NodeState.SHUTDOWN,
RMNodeEventType.SHUTDOWN,
new DeactivateNodeTransition(NodeState.SHUTDOWN))
//Transitions from SHUTDOWN state
.addTransition(NodeState.SHUTDOWN, NodeState.SHUTDOWN,
RMNodeEventType.RESOURCE_UPDATE,
new UpdateNodeResourceWhenUnusableTransition())
.addTransition(NodeState.SHUTDOWN, NodeState.SHUTDOWN,
RMNodeEventType.FINISHED_CONTAINERS_PULLED_BY_AM,
new AddContainersToBeRemovedFromNMTransition())
// create the topology tables
.installTopology();
private final StateMachine<NodeState, RMNodeEventType,
RMNodeEvent> stateMachine;
public RMNodeImpl(NodeId nodeId, RMContext context, String hostName,
int cmPort, int httpPort, Node node, Resource capability,
String nodeManagerVersion) {
this(nodeId, context, hostName, cmPort, httpPort, node, capability,
nodeManagerVersion, null);
}
public RMNodeImpl(NodeId nodeId, RMContext context, String hostName,
int cmPort, int httpPort, Node node, Resource capability,
String nodeManagerVersion, Resource physResource) {
this.nodeId = nodeId;
this.context = context;
this.hostName = hostName;
this.commandPort = cmPort;
this.httpPort = httpPort;
this.totalCapability = capability;
this.nodeAddress = hostName + ":" + cmPort;
this.httpAddress = hostName + ":" + httpPort;
this.node = node;
this.healthReport = "Healthy";
this.lastHealthReportTime = System.currentTimeMillis();
this.nodeManagerVersion = nodeManagerVersion;
this.timeStamp = 0;
this.physicalResource = physResource;
this.latestNodeHeartBeatResponse.setResponseId(0);
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
this.readLock = lock.readLock();
this.writeLock = lock.writeLock();
this.stateMachine = stateMachineFactory.make(this);
this.nodeUpdateQueue = new ConcurrentLinkedQueue<UpdatedContainerInfo>();
this.containerAllocationExpirer = context.getContainerAllocationExpirer();
}
@Override
public String toString() {
return this.nodeId.toString();
}
@Override
public String getHostName() {
return hostName;
}
@Override
public int getCommandPort() {
return commandPort;
}
@Override
public int getHttpPort() {
return httpPort;
}
// Test only
public void setHttpPort(int port) {
this.httpPort = port;
}
@Override
public NodeId getNodeID() {
return this.nodeId;
}
@Override
public String getNodeAddress() {
return this.nodeAddress;
}
@Override
public String getHttpAddress() {
return this.httpAddress;
}
@Override
public Resource getTotalCapability() {
return this.totalCapability;
}
@Override
public String getRackName() {
return node.getNetworkLocation();
}
@Override
public Node getNode() {
return this.node;
}
@Override
public String getHealthReport() {
this.readLock.lock();
try {
return this.healthReport;
} finally {
this.readLock.unlock();
}
}
public void setHealthReport(String healthReport) {
this.writeLock.lock();
try {
this.healthReport = healthReport;
} finally {
this.writeLock.unlock();
}
}
public void setLastHealthReportTime(long lastHealthReportTime) {
this.writeLock.lock();
try {
this.lastHealthReportTime = lastHealthReportTime;
} finally {
this.writeLock.unlock();
}
}
@Override
public long getLastHealthReportTime() {
this.readLock.lock();
try {
return this.lastHealthReportTime;
} finally {
this.readLock.unlock();
}
}
@Override
public String getNodeManagerVersion() {
return nodeManagerVersion;
}
@Override
public ResourceUtilization getAggregatedContainersUtilization() {
this.readLock.lock();
try {
return this.containersUtilization;
} finally {
this.readLock.unlock();
}
}
public void setAggregatedContainersUtilization(
ResourceUtilization containersUtilization) {
this.writeLock.lock();
try {
this.containersUtilization = containersUtilization;
} finally {
this.writeLock.unlock();
}
}
@Override
public ResourceUtilization getNodeUtilization() {
this.readLock.lock();
try {
return this.nodeUtilization;
} finally {
this.readLock.unlock();
}
}
public void setNodeUtilization(ResourceUtilization nodeUtilization) {
this.writeLock.lock();
try {
this.nodeUtilization = nodeUtilization;
} finally {
this.writeLock.unlock();
}
}
@Override
public Resource getPhysicalResource() {
return this.physicalResource;
}
public void setPhysicalResource(Resource physicalResource) {
this.physicalResource = physicalResource;
}
@Override
public NodeState getState() {
this.readLock.lock();
try {
return this.stateMachine.getCurrentState();
} finally {
this.readLock.unlock();
}
}
@Override
public List<ApplicationId> getAppsToCleanup() {
this.readLock.lock();
try {
return new ArrayList<ApplicationId>(this.finishedApplications);
} finally {
this.readLock.unlock();
}
}
@Override
public List<ApplicationId> getRunningApps() {
this.readLock.lock();
try {
return new ArrayList<ApplicationId>(this.runningApplications);
} finally {
this.readLock.unlock();
}
}
@Override
public List<ContainerId> getContainersToCleanUp() {
this.readLock.lock();
try {
return new ArrayList<ContainerId>(this.containersToClean);
} finally {
this.readLock.unlock();
}
};
@Override
public void updateNodeHeartbeatResponseForCleanup(NodeHeartbeatResponse response) {
this.writeLock.lock();
try {
response.addAllContainersToCleanup(
new ArrayList<ContainerId>(this.containersToClean));
response.addAllApplicationsToCleanup(this.finishedApplications);
response.addContainersToBeRemovedFromNM(
new ArrayList<ContainerId>(this.containersToBeRemovedFromNM));
response.addAllContainersToSignal(this.containersToSignal);
this.completedContainers.removeAll(this.containersToBeRemovedFromNM);
this.containersToClean.clear();
this.finishedApplications.clear();
this.containersToSignal.clear();
this.containersToBeRemovedFromNM.clear();
} finally {
this.writeLock.unlock();
}
};
@VisibleForTesting
public Collection<Container> getToBeDecreasedContainers() {
return toBeDecreasedContainers.values();
}
@Override
public void updateNodeHeartbeatResponseForContainersDecreasing(
NodeHeartbeatResponse response) {
this.writeLock.lock();
try {
response.addAllContainersToDecrease(toBeDecreasedContainers.values());
toBeDecreasedContainers.clear();
} finally {
this.writeLock.unlock();
}
}
@Override
public NodeHeartbeatResponse getLastNodeHeartBeatResponse() {
this.readLock.lock();
try {
return this.latestNodeHeartBeatResponse;
} finally {
this.readLock.unlock();
}
}
@Override
public void resetLastNodeHeartBeatResponse() {
this.writeLock.lock();
try {
latestNodeHeartBeatResponse.setResponseId(0);
} finally {
this.writeLock.unlock();
}
}
public void handle(RMNodeEvent event) {
LOG.debug("Processing " + event.getNodeId() + " of type " + event.getType());
try {
writeLock.lock();
NodeState oldState = getState();
try {
stateMachine.doTransition(event.getType(), event);
} catch (InvalidStateTransitionException e) {
LOG.error("Can't handle this event at current state", e);
LOG.error("Invalid event " + event.getType() +
" on Node " + this.nodeId + " oldState " + oldState);
}
if (oldState != getState()) {
LOG.info(nodeId + " Node Transitioned from " + oldState + " to "
+ getState());
}
}
finally {
writeLock.unlock();
}
}
private void updateMetricsForRejoinedNode(NodeState previousNodeState) {
ClusterMetrics metrics = ClusterMetrics.getMetrics();
metrics.incrNumActiveNodes();
switch (previousNodeState) {
case LOST:
metrics.decrNumLostNMs();
break;
case REBOOTED:
metrics.decrNumRebootedNMs();
break;
case DECOMMISSIONED:
metrics.decrDecommisionedNMs();
break;
case UNHEALTHY:
metrics.decrNumUnhealthyNMs();
break;
case SHUTDOWN:
metrics.decrNumShutdownNMs();
break;
case DECOMMISSIONING:
metrics.decrDecommissioningNMs();
break;
default:
LOG.debug("Unexpected previous node state");
}
}
// Update metrics when moving to Decommissioning state
private void updateMetricsForGracefulDecommission(NodeState initialState,
NodeState finalState) {
ClusterMetrics metrics = ClusterMetrics.getMetrics();
switch (initialState) {
case UNHEALTHY :
metrics.decrNumUnhealthyNMs();
break;
case RUNNING :
metrics.decrNumActiveNodes();
break;
case DECOMMISSIONING :
metrics.decrDecommissioningNMs();
break;
default :
LOG.warn("Unexpected initial state");
}
switch (finalState) {
case DECOMMISSIONING :
metrics.incrDecommissioningNMs();
break;
case RUNNING :
metrics.incrNumActiveNodes();
break;
default :
LOG.warn("Unexpected final state");
}
}
private void updateMetricsForDeactivatedNode(NodeState initialState,
NodeState finalState) {
ClusterMetrics metrics = ClusterMetrics.getMetrics();
switch (initialState) {
case RUNNING:
metrics.decrNumActiveNodes();
break;
case DECOMMISSIONING:
metrics.decrDecommissioningNMs();
break;
case DECOMMISSIONED:
metrics.decrDecommisionedNMs();
break;
case UNHEALTHY:
metrics.decrNumUnhealthyNMs();
break;
case NEW:
break;
default:
LOG.warn("Unexpected initial state");
}
switch (finalState) {
case DECOMMISSIONED:
metrics.incrDecommisionedNMs();
break;
case LOST:
metrics.incrNumLostNMs();
break;
case REBOOTED:
metrics.incrNumRebootedNMs();
break;
case UNHEALTHY:
metrics.incrNumUnhealthyNMs();
break;
case SHUTDOWN:
metrics.incrNumShutdownNMs();
break;
default:
LOG.warn("Unexpected final state");
}
}
private static void handleRunningAppOnNode(RMNodeImpl rmNode,
RMContext context, ApplicationId appId, NodeId nodeId) {
RMApp app = context.getRMApps().get(appId);
// if we failed getting app by appId, maybe something wrong happened, just
// add the app to the finishedApplications list so that the app can be
// cleaned up on the NM
if (null == app) {
LOG.warn("Cannot get RMApp by appId=" + appId
+ ", just added it to finishedApplications list for cleanup");
rmNode.finishedApplications.add(appId);
rmNode.runningApplications.remove(appId);
return;
}
// Add running applications back due to Node add or Node reconnection.
rmNode.runningApplications.add(appId);
context.getDispatcher().getEventHandler()
.handle(new RMAppRunningOnNodeEvent(appId, nodeId));
}
private static void updateNodeResourceFromEvent(RMNodeImpl rmNode,
RMNodeResourceUpdateEvent event){
ResourceOption resourceOption = event.getResourceOption();
// Set resource on RMNode
rmNode.totalCapability = resourceOption.getResource();
}
private static NodeHealthStatus updateRMNodeFromStatusEvents(
RMNodeImpl rmNode, RMNodeStatusEvent statusEvent) {
// Switch the last heartbeatresponse.
rmNode.latestNodeHeartBeatResponse = statusEvent.getLatestResponse();
NodeHealthStatus remoteNodeHealthStatus = statusEvent.getNodeHealthStatus();
rmNode.setHealthReport(remoteNodeHealthStatus.getHealthReport());
rmNode.setLastHealthReportTime(remoteNodeHealthStatus
.getLastHealthReportTime());
rmNode.setAggregatedContainersUtilization(statusEvent
.getAggregatedContainersUtilization());
rmNode.setNodeUtilization(statusEvent.getNodeUtilization());
return remoteNodeHealthStatus;
}
public static class AddNodeTransition implements
SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
// Inform the scheduler
RMNodeStartedEvent startEvent = (RMNodeStartedEvent) event;
List<NMContainerStatus> containers = null;
NodeId nodeId = rmNode.nodeId;
RMNode previousRMNode =
rmNode.context.getInactiveRMNodes().remove(nodeId);
if (previousRMNode != null) {
rmNode.updateMetricsForRejoinedNode(previousRMNode.getState());
} else {
NodeId unknownNodeId =
NodesListManager.createUnknownNodeId(nodeId.getHost());
previousRMNode =
rmNode.context.getInactiveRMNodes().remove(unknownNodeId);
if (previousRMNode != null) {
ClusterMetrics.getMetrics().decrDecommisionedNMs();
}
// Increment activeNodes explicitly because this is a new node.
ClusterMetrics.getMetrics().incrNumActiveNodes();
containers = startEvent.getNMContainerStatuses();
if (containers != null && !containers.isEmpty()) {
for (NMContainerStatus container : containers) {
if (container.getContainerState() == ContainerState.RUNNING) {
rmNode.launchedContainers.add(container.getContainerId());
}
}
}
}
if (null != startEvent.getRunningApplications()) {
for (ApplicationId appId : startEvent.getRunningApplications()) {
handleRunningAppOnNode(rmNode, rmNode.context, appId, rmNode.nodeId);
}
}
rmNode.context.getDispatcher().getEventHandler()
.handle(new NodeAddedSchedulerEvent(rmNode, containers));
rmNode.context.getDispatcher().getEventHandler().handle(
new NodesListManagerEvent(
NodesListManagerEventType.NODE_USABLE, rmNode));
}
}
public static class ReconnectNodeTransition implements
MultipleArcTransition<RMNodeImpl, RMNodeEvent, NodeState> {
@Override
public NodeState transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeReconnectEvent reconnectEvent = (RMNodeReconnectEvent) event;
RMNode newNode = reconnectEvent.getReconnectedNode();
rmNode.nodeManagerVersion = newNode.getNodeManagerVersion();
List<ApplicationId> runningApps = reconnectEvent.getRunningApplications();
boolean noRunningApps =
(runningApps == null) || (runningApps.size() == 0);
// No application running on the node, so send node-removal event with
// cleaning up old container info.
if (noRunningApps) {
if (rmNode.getState() == NodeState.DECOMMISSIONING) {
// When node in decommissioning, and no running apps on this node,
// it will return as decommissioned state.
deactivateNode(rmNode, NodeState.DECOMMISSIONED);
return NodeState.DECOMMISSIONED;
}
rmNode.nodeUpdateQueue.clear();
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeRemovedSchedulerEvent(rmNode));
if (rmNode.getHttpPort() == newNode.getHttpPort()) {
if (!rmNode.getTotalCapability().equals(
newNode.getTotalCapability())) {
rmNode.totalCapability = newNode.getTotalCapability();
}
if (rmNode.getState().equals(NodeState.RUNNING)) {
// Only add old node if old state is RUNNING
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeAddedSchedulerEvent(rmNode));
}
} else {
// Reconnected node differs, so replace old node and start new node
switch (rmNode.getState()) {
case RUNNING:
ClusterMetrics.getMetrics().decrNumActiveNodes();
break;
case UNHEALTHY:
ClusterMetrics.getMetrics().decrNumUnhealthyNMs();
break;
default:
LOG.debug("Unexpected Rmnode state");
}
rmNode.context.getRMNodes().put(newNode.getNodeID(), newNode);
rmNode.context.getDispatcher().getEventHandler().handle(
new RMNodeStartedEvent(newNode.getNodeID(), null, null));
}
} else {
rmNode.httpPort = newNode.getHttpPort();
rmNode.httpAddress = newNode.getHttpAddress();
boolean isCapabilityChanged = false;
if (!rmNode.getTotalCapability().equals(
newNode.getTotalCapability())) {
rmNode.totalCapability = newNode.getTotalCapability();
isCapabilityChanged = true;
}
handleNMContainerStatus(reconnectEvent.getNMContainerStatuses(), rmNode);
for (ApplicationId appId : reconnectEvent.getRunningApplications()) {
handleRunningAppOnNode(rmNode, rmNode.context, appId, rmNode.nodeId);
}
if (isCapabilityChanged
&& rmNode.getState().equals(NodeState.RUNNING)) {
// Update scheduler node's capacity for reconnect node.
rmNode.context
.getDispatcher()
.getEventHandler()
.handle(
new NodeResourceUpdateSchedulerEvent(rmNode, ResourceOption
.newInstance(newNode.getTotalCapability(), -1)));
}
}
return rmNode.getState();
}
private void handleNMContainerStatus(
List<NMContainerStatus> nmContainerStatuses, RMNodeImpl rmnode) {
if (nmContainerStatuses != null) {
List<ContainerStatus> containerStatuses =
new ArrayList<ContainerStatus>();
for (NMContainerStatus nmContainerStatus : nmContainerStatuses) {
containerStatuses.add(createContainerStatus(nmContainerStatus));
}
rmnode.handleContainerStatus(containerStatuses);
}
}
private ContainerStatus createContainerStatus(
NMContainerStatus remoteContainer) {
ContainerStatus cStatus =
ContainerStatus.newInstance(remoteContainer.getContainerId(),
remoteContainer.getContainerState(),
remoteContainer.getDiagnostics(),
remoteContainer.getContainerExitStatus());
return cStatus;
}
}
public static class UpdateNodeResourceWhenRunningTransition
implements SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeResourceUpdateEvent updateEvent = (RMNodeResourceUpdateEvent)event;
updateNodeResourceFromEvent(rmNode, updateEvent);
// Notify new resourceOption to scheduler
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeResourceUpdateSchedulerEvent(rmNode, updateEvent.getResourceOption()));
}
}
public static class UpdateNodeResourceWhenUnusableTransition
implements SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
// The node is not usable, only log a warn message
LOG.warn("Try to update resource on a "+ rmNode.getState().toString() +
" node: "+rmNode.toString());
updateNodeResourceFromEvent(rmNode, (RMNodeResourceUpdateEvent)event);
// No need to notify scheduler as schedulerNode is not function now
// and can sync later from RMnode.
}
}
public static class CleanUpAppTransition
implements SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
ApplicationId appId = ((RMNodeCleanAppEvent) event).getAppId();
rmNode.finishedApplications.add(appId);
rmNode.runningApplications.remove(appId);
}
}
public static class CleanUpContainerTransition implements
SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
rmNode.containersToClean.add(((
RMNodeCleanContainerEvent) event).getContainerId());
}
}
public static class AddContainersToBeRemovedFromNMTransition implements
SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
rmNode.containersToBeRemovedFromNM.addAll(((
RMNodeFinishedContainersPulledByAMEvent) event).getContainers());
}
}
public static class DecreaseContainersTransition
implements SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeDecreaseContainerEvent de = (RMNodeDecreaseContainerEvent) event;
for (Container c : de.getToBeDecreasedContainers()) {
rmNode.toBeDecreasedContainers.put(c.getId(), c);
}
}
}
public static class DeactivateNodeTransition
implements SingleArcTransition<RMNodeImpl, RMNodeEvent> {
private final NodeState finalState;
public DeactivateNodeTransition(NodeState finalState) {
this.finalState = finalState;
}
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeImpl.deactivateNode(rmNode, finalState);
}
}
/**
* Put a node in deactivated (decommissioned or shutdown) status.
* @param rmNode
* @param finalState
*/
public static void deactivateNode(RMNodeImpl rmNode, NodeState finalState) {
if (rmNode.getNodeID().getPort() == -1) {
rmNode.updateMetricsForDeactivatedNode(rmNode.getState(), finalState);
return;
}
reportNodeUnusable(rmNode, finalState);
// Deactivate the node
rmNode.context.getRMNodes().remove(rmNode.nodeId);
LOG.info("Deactivating Node " + rmNode.nodeId + " as it is now "
+ finalState);
rmNode.context.getInactiveRMNodes().put(rmNode.nodeId, rmNode);
if (rmNode.context.getNodesListManager().isUntrackedNode(rmNode.hostName)) {
rmNode.setUntrackedTimeStamp(Time.monotonicNow());
}
}
/**
* Report node is UNUSABLE and update metrics.
* @param rmNode
* @param finalState
*/
public static void reportNodeUnusable(RMNodeImpl rmNode,
NodeState finalState) {
// Inform the scheduler
rmNode.nodeUpdateQueue.clear();
// If the current state is NodeState.UNHEALTHY
// Then node is already been removed from the
// Scheduler
NodeState initialState = rmNode.getState();
if (!initialState.equals(NodeState.UNHEALTHY)) {
rmNode.context.getDispatcher().getEventHandler()
.handle(new NodeRemovedSchedulerEvent(rmNode));
}
rmNode.context.getDispatcher().getEventHandler().handle(
new NodesListManagerEvent(
NodesListManagerEventType.NODE_UNUSABLE, rmNode));
//Update the metrics
rmNode.updateMetricsForDeactivatedNode(initialState, finalState);
}
/**
* The transition to put node in decommissioning state.
*/
public static class DecommissioningNodeTransition
implements SingleArcTransition<RMNodeImpl, RMNodeEvent> {
private final NodeState initState;
private final NodeState finalState;
public DecommissioningNodeTransition(NodeState initState,
NodeState finalState) {
this.initState = initState;
this.finalState = finalState;
}
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
Integer timeout = null;
if (RMNodeDecommissioningEvent.class.isInstance(event)) {
RMNodeDecommissioningEvent e = ((RMNodeDecommissioningEvent) event);
timeout = e.getDecommissioningTimeout();
}
// Pick up possible updates on decommissioningTimeout.
if (rmNode.getState() == NodeState.DECOMMISSIONING) {
if (!Objects.equals(rmNode.getDecommissioningTimeout(), timeout)) {
LOG.info("Update " + rmNode.getNodeID() +
" DecommissioningTimeout to be " + timeout);
rmNode.decommissioningTimeout = timeout;
} else {
LOG.info(rmNode.getNodeID() + " is already DECOMMISSIONING");
}
return;
}
LOG.info("Put Node " + rmNode.nodeId + " in DECOMMISSIONING.");
// Update NM metrics during graceful decommissioning.
rmNode.updateMetricsForGracefulDecommission(initState, finalState);
rmNode.decommissioningTimeout = timeout;
if (rmNode.originalTotalCapability == null){
rmNode.originalTotalCapability =
Resources.clone(rmNode.totalCapability);
LOG.info("Preserve original total capability: "
+ rmNode.originalTotalCapability);
}
}
}
public static class RecommissionNodeTransition
implements SingleArcTransition<RMNodeImpl, RMNodeEvent> {
private final NodeState finalState;
public RecommissionNodeTransition(NodeState finalState) {
this.finalState = finalState;
}
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
// Restore the original total capability
if (rmNode.originalTotalCapability != null) {
rmNode.totalCapability = rmNode.originalTotalCapability;
rmNode.originalTotalCapability = null;
}
LOG.info("Node " + rmNode.nodeId + " in DECOMMISSIONING is " +
"recommissioned back to RUNNING.");
rmNode
.updateMetricsForGracefulDecommission(rmNode.getState(), finalState);
//update the scheduler with the restored original total capability
rmNode.context
.getDispatcher()
.getEventHandler()
.handle(
new NodeResourceUpdateSchedulerEvent(rmNode, ResourceOption
.newInstance(rmNode.totalCapability, 0)));
}
}
/**
* Status update transition when node is healthy.
*/
public static class StatusUpdateWhenHealthyTransition implements
MultipleArcTransition<RMNodeImpl, RMNodeEvent, NodeState> {
@Override
public NodeState transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeStatusEvent statusEvent = (RMNodeStatusEvent) event;
rmNode.setOpportunisticContainersStatus(
statusEvent.getOpportunisticContainersStatus());
NodeHealthStatus remoteNodeHealthStatus = updateRMNodeFromStatusEvents(
rmNode, statusEvent);
NodeState initialState = rmNode.getState();
boolean isNodeDecommissioning =
initialState.equals(NodeState.DECOMMISSIONING);
if (isNodeDecommissioning) {
List<ApplicationId> keepAliveApps = statusEvent.getKeepAliveAppIds();
if (rmNode.runningApplications.isEmpty() &&
(keepAliveApps == null || keepAliveApps.isEmpty())) {
RMNodeImpl.deactivateNode(rmNode, NodeState.DECOMMISSIONED);
return NodeState.DECOMMISSIONED;
}
}
if (!remoteNodeHealthStatus.getIsNodeHealthy()) {
LOG.info("Node " + rmNode.nodeId +
" reported UNHEALTHY with details: " +
remoteNodeHealthStatus.getHealthReport());
// if a node in decommissioning receives an unhealthy report,
// it will stay in decommissioning.
if (isNodeDecommissioning) {
return NodeState.DECOMMISSIONING;
} else {
reportNodeUnusable(rmNode, NodeState.UNHEALTHY);
return NodeState.UNHEALTHY;
}
}
rmNode.handleContainerStatus(statusEvent.getContainers());
rmNode.handleReportedIncreasedContainers(
statusEvent.getNMReportedIncreasedContainers());
List<LogAggregationReport> logAggregationReportsForApps =
statusEvent.getLogAggregationReportsForApps();
if (logAggregationReportsForApps != null
&& !logAggregationReportsForApps.isEmpty()) {
rmNode.handleLogAggregationStatus(logAggregationReportsForApps);
}
if(rmNode.nextHeartBeat) {
rmNode.nextHeartBeat = false;
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeUpdateSchedulerEvent(rmNode));
}
// Update DTRenewer in secure mode to keep these apps alive. Today this is
// needed for log-aggregation to finish long after the apps are gone.
if (UserGroupInformation.isSecurityEnabled()) {
rmNode.context.getDelegationTokenRenewer().updateKeepAliveApplications(
statusEvent.getKeepAliveAppIds());
}
return initialState;
}
}
public static class StatusUpdateWhenUnHealthyTransition implements
MultipleArcTransition<RMNodeImpl, RMNodeEvent, NodeState> {
@Override
public NodeState transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeStatusEvent statusEvent = (RMNodeStatusEvent)event;
// Switch the last heartbeatresponse.
NodeHealthStatus remoteNodeHealthStatus = updateRMNodeFromStatusEvents(
rmNode, statusEvent);
if (remoteNodeHealthStatus.getIsNodeHealthy()) {
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeAddedSchedulerEvent(rmNode));
rmNode.context.getDispatcher().getEventHandler().handle(
new NodesListManagerEvent(
NodesListManagerEventType.NODE_USABLE, rmNode));
// ??? how about updating metrics before notifying to ensure that
// notifiers get update metadata because they will very likely query it
// upon notification
// Update metrics
rmNode.updateMetricsForRejoinedNode(NodeState.UNHEALTHY);
return NodeState.RUNNING;
}
return NodeState.UNHEALTHY;
}
}
public static class SignalContainerTransition implements
SingleArcTransition<RMNodeImpl, RMNodeEvent> {
@Override
public void transition(RMNodeImpl rmNode, RMNodeEvent event) {
rmNode.containersToSignal.add(((
RMNodeSignalContainerEvent) event).getSignalRequest());
}
}
@Override
public List<UpdatedContainerInfo> pullContainerUpdates() {
List<UpdatedContainerInfo> latestContainerInfoList =
new ArrayList<UpdatedContainerInfo>();
UpdatedContainerInfo containerInfo;
while ((containerInfo = nodeUpdateQueue.poll()) != null) {
latestContainerInfoList.add(containerInfo);
}
this.nextHeartBeat = true;
return latestContainerInfoList;
}
@VisibleForTesting
public void setNextHeartBeat(boolean nextHeartBeat) {
this.nextHeartBeat = nextHeartBeat;
}
@VisibleForTesting
public int getQueueSize() {
return nodeUpdateQueue.size();
}
// For test only.
@VisibleForTesting
public Set<ContainerId> getLaunchedContainers() {
return this.launchedContainers;
}
@VisibleForTesting
public Set<ContainerId> getCompletedContainers() {
return this.completedContainers;
}
@Override
public Set<String> getNodeLabels() {
RMNodeLabelsManager nlm = context.getNodeLabelManager();
if (nlm == null || nlm.getLabelsOnNode(nodeId) == null) {
return CommonNodeLabelsManager.EMPTY_STRING_SET;
}
return nlm.getLabelsOnNode(nodeId);
}
private void handleReportedIncreasedContainers(
List<Container> reportedIncreasedContainers) {
for (Container container : reportedIncreasedContainers) {
ContainerId containerId = container.getId();
// Don't bother with containers already scheduled for cleanup, or for
// applications already killed. The scheduler doens't need to know any
// more about this container
if (containersToClean.contains(containerId)) {
LOG.info("Container " + containerId + " already scheduled for "
+ "cleanup, no further processing");
continue;
}
ApplicationId containerAppId =
containerId.getApplicationAttemptId().getApplicationId();
if (finishedApplications.contains(containerAppId)) {
LOG.info("Container " + containerId
+ " belongs to an application that is already killed,"
+ " no further processing");
continue;
}
this.nmReportedIncreasedContainers.put(containerId, container);
}
}
private void handleContainerStatus(List<ContainerStatus> containerStatuses) {
// Filter the map to only obtain just launched containers and finished
// containers.
List<ContainerStatus> newlyLaunchedContainers =
new ArrayList<ContainerStatus>();
List<ContainerStatus> newlyCompletedContainers =
new ArrayList<ContainerStatus>();
int numRemoteRunningContainers = 0;
for (ContainerStatus remoteContainer : containerStatuses) {
ContainerId containerId = remoteContainer.getContainerId();
// Don't bother with containers already scheduled for cleanup, or for
// applications already killed. The scheduler doens't need to know any
// more about this container
if (containersToClean.contains(containerId)) {
LOG.info("Container " + containerId + " already scheduled for "
+ "cleanup, no further processing");
continue;
}
ApplicationId containerAppId =
containerId.getApplicationAttemptId().getApplicationId();
if (finishedApplications.contains(containerAppId)) {
LOG.info("Container " + containerId
+ " belongs to an application that is already killed,"
+ " no further processing");
continue;
} else if (!runningApplications.contains(containerAppId)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Container " + containerId
+ " is the first container get launched for application "
+ containerAppId);
}
handleRunningAppOnNode(this, context, containerAppId, nodeId);
}
// Process running containers
if (remoteContainer.getState() == ContainerState.RUNNING ||
remoteContainer.getState() == ContainerState.SCHEDULED) {
++numRemoteRunningContainers;
if (!launchedContainers.contains(containerId)) {
// Just launched container. RM knows about it the first time.
launchedContainers.add(containerId);
newlyLaunchedContainers.add(remoteContainer);
// Unregister from containerAllocationExpirer.
containerAllocationExpirer
.unregister(new AllocationExpirationInfo(containerId));
}
} else {
// A finished container
launchedContainers.remove(containerId);
if (completedContainers.add(containerId)) {
newlyCompletedContainers.add(remoteContainer);
}
// Unregister from containerAllocationExpirer.
containerAllocationExpirer
.unregister(new AllocationExpirationInfo(containerId));
}
}
List<ContainerStatus> lostContainers =
findLostContainers(numRemoteRunningContainers, containerStatuses);
for (ContainerStatus remoteContainer : lostContainers) {
ContainerId containerId = remoteContainer.getContainerId();
if (completedContainers.add(containerId)) {
newlyCompletedContainers.add(remoteContainer);
}
}
if (newlyLaunchedContainers.size() != 0
|| newlyCompletedContainers.size() != 0) {
nodeUpdateQueue.add(new UpdatedContainerInfo(newlyLaunchedContainers,
newlyCompletedContainers));
}
}
private List<ContainerStatus> findLostContainers(int numRemoteRunning,
List<ContainerStatus> containerStatuses) {
if (numRemoteRunning >= launchedContainers.size()) {
return Collections.emptyList();
}
Set<ContainerId> nodeContainers =
new HashSet<ContainerId>(numRemoteRunning);
List<ContainerStatus> lostContainers = new ArrayList<ContainerStatus>(
launchedContainers.size() - numRemoteRunning);
for (ContainerStatus remoteContainer : containerStatuses) {
if (remoteContainer.getState() == ContainerState.RUNNING
&& remoteContainer.getExecutionType() == ExecutionType.GUARANTEED) {
nodeContainers.add(remoteContainer.getContainerId());
}
}
Iterator<ContainerId> iter = launchedContainers.iterator();
while (iter.hasNext()) {
ContainerId containerId = iter.next();
if (!nodeContainers.contains(containerId)) {
String diag = "Container " + containerId
+ " was running but not reported from " + nodeId;
LOG.warn(diag);
lostContainers.add(SchedulerUtils.createAbnormalContainerStatus(
containerId, diag));
iter.remove();
}
}
return lostContainers;
}
private void handleLogAggregationStatus(
List<LogAggregationReport> logAggregationReportsForApps) {
for (LogAggregationReport report : logAggregationReportsForApps) {
RMApp rmApp = this.context.getRMApps().get(report.getApplicationId());
if (rmApp != null) {
((RMAppImpl)rmApp).aggregateLogReport(this.nodeId, report);
}
}
}
@Override
public List<Container> pullNewlyIncreasedContainers() {
try {
writeLock.lock();
if (nmReportedIncreasedContainers.isEmpty()) {
return Collections.EMPTY_LIST;
} else {
List<Container> container =
new ArrayList<Container>(nmReportedIncreasedContainers.values());
nmReportedIncreasedContainers.clear();
return container;
}
} finally {
writeLock.unlock();
}
}
public Resource getOriginalTotalCapability() {
return this.originalTotalCapability;
}
public OpportunisticContainersStatus getOpportunisticContainersStatus() {
this.readLock.lock();
try {
return this.opportunisticContainersStatus;
} finally {
this.readLock.unlock();
}
}
public void setOpportunisticContainersStatus(
OpportunisticContainersStatus opportunisticContainersStatus) {
this.writeLock.lock();
try {
this.opportunisticContainersStatus = opportunisticContainersStatus;
} finally {
this.writeLock.unlock();
}
}
@Override
public long getUntrackedTimeStamp() {
return this.timeStamp;
}
@Override
public void setUntrackedTimeStamp(long ts) {
this.timeStamp = ts;
}
@Override
public Integer getDecommissioningTimeout() {
return decommissioningTimeout;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.HiveStatsUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.io.HdfsUtils;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask;
import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.merge.MergeFileTask;
import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObj;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.BucketCol;
import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.SortCol;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
import org.apache.hadoop.hive.ql.plan.LoadMultiFilesDesc;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* MoveTask implementation.
**/
public class MoveTask extends Task<MoveWork> implements Serializable {
private static final long serialVersionUID = 1L;
private static transient final Logger LOG = LoggerFactory.getLogger(MoveTask.class);
public MoveTask() {
super();
}
private void moveFile(Path sourcePath, Path targetPath, boolean isDfsDir)
throws HiveException {
try {
String mesg = "Moving data to " + (isDfsDir ? "" : "local ") + "directory "
+ targetPath.toString();
String mesg_detail = " from " + sourcePath.toString();
console.printInfo(mesg, mesg_detail);
FileSystem fs = sourcePath.getFileSystem(conf);
if (isDfsDir) {
moveFileInDfs (sourcePath, targetPath, fs);
} else {
// This is a local file
FileSystem dstFs = FileSystem.getLocal(conf);
moveFileFromDfsToLocal(sourcePath, targetPath, fs, dstFs);
}
} catch (Exception e) {
throw new HiveException("Unable to move source " + sourcePath + " to destination "
+ targetPath, e);
}
}
private void moveFileInDfs (Path sourcePath, Path targetPath, FileSystem fs)
throws HiveException, IOException {
// if source exists, rename. Otherwise, create a empty directory
if (fs.exists(sourcePath)) {
Path deletePath = null;
// If it multiple level of folder are there fs.rename is failing so first
// create the targetpath.getParent() if it not exist
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_INSERT_INTO_MULTILEVEL_DIRS)) {
deletePath = createTargetPath(targetPath, fs);
}
Hive.clearDestForSubDirSrc(conf, targetPath, sourcePath, false);
if (!Hive.moveFile(conf, sourcePath, targetPath, true, false)) {
try {
if (deletePath != null) {
fs.delete(deletePath, true);
}
} catch (IOException e) {
LOG.info("Unable to delete the path created for facilitating rename"
+ deletePath);
}
throw new HiveException("Unable to rename: " + sourcePath
+ " to: " + targetPath);
}
} else if (!fs.mkdirs(targetPath)) {
throw new HiveException("Unable to make directory: " + targetPath);
}
}
private void moveFileFromDfsToLocal(Path sourcePath, Path targetPath, FileSystem fs,
FileSystem dstFs) throws HiveException, IOException {
// RawLocalFileSystem seems not able to get the right permissions for a local file, it
// always returns hdfs default permission (00666). So we can not overwrite a directory
// by deleting and recreating the directory and restoring its permissions. We should
// delete all its files and subdirectories instead.
if (dstFs.exists(targetPath)) {
if (dstFs.isDirectory(targetPath)) {
FileStatus[] destFiles = dstFs.listStatus(targetPath);
for (FileStatus destFile : destFiles) {
if (!dstFs.delete(destFile.getPath(), true)) {
throw new IOException("Unable to clean the destination directory: " + targetPath);
}
}
} else {
throw new HiveException("Target " + targetPath + " is not a local directory.");
}
} else {
if (!FileUtils.mkdir(dstFs, targetPath, false, conf)) {
throw new HiveException("Failed to create local target directory " + targetPath);
}
}
if (fs.exists(sourcePath)) {
FileStatus[] srcs = fs.listStatus(sourcePath, FileUtils.HIDDEN_FILES_PATH_FILTER);
for (FileStatus status : srcs) {
fs.copyToLocalFile(status.getPath(), targetPath);
}
}
}
private Path createTargetPath(Path targetPath, FileSystem fs) throws IOException {
Path deletePath = null;
Path mkDirPath = targetPath.getParent();
if (mkDirPath != null && !fs.exists(mkDirPath)) {
Path actualPath = mkDirPath;
// targetPath path is /x/y/z/1/2/3 here /x/y/z is present in the file system
// create the structure till /x/y/z/1/2 to work rename for multilevel directory
// and if rename fails delete the path /x/y/z/1
// If targetPath have multilevel directories like /x/y/z/1/2/3 , /x/y/z/1/2/4
// the renaming of the directories are not atomic the execution will happen one
// by one
while (actualPath != null && !fs.exists(actualPath)) {
deletePath = actualPath;
actualPath = actualPath.getParent();
}
fs.mkdirs(mkDirPath);
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS)) {
try {
HdfsUtils.setFullFileStatus(conf, new HdfsUtils.HadoopFileStatus(conf, fs, actualPath), fs, mkDirPath, true);
} catch (Exception e) {
LOG.warn("Error setting permissions or group of " + actualPath, e);
}
}
}
return deletePath;
}
// Release all the locks acquired for this object
// This becomes important for multi-table inserts when one branch may take much more
// time than the others. It is better to release the lock for this particular insert.
// The other option is to wait for all the branches to finish, or set
// hive.multi.insert.move.tasks.share.dependencies to true, which will mean that the
// first multi-insert results will be available when all of the branches of multi-table
// inserts are done.
private void releaseLocks(LoadTableDesc ltd) throws HiveException {
// nothing needs to be done
if (!conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY)) {
return;
}
Context ctx = driverContext.getCtx();
HiveLockManager lockMgr = ctx.getHiveTxnManager().getLockManager();
WriteEntity output = ctx.getLoadTableOutputMap().get(ltd);
List<HiveLockObj> lockObjects = ctx.getOutputLockObjects().get(output);
if (lockObjects == null) {
return;
}
for (HiveLockObj lockObj : lockObjects) {
List<HiveLock> locks = lockMgr.getLocks(lockObj.getObj(), false, true);
for (HiveLock lock : locks) {
if (lock.getHiveLockMode() == lockObj.getMode()) {
if (ctx.getHiveLocks().remove(lock)) {
LOG.info("about to release lock for output: " + output.toString() +
" lock: " + lock.getHiveLockObject().getName());
try {
lockMgr.unlock(lock);
} catch (LockException le) {
// should be OK since the lock is ephemeral and will eventually be deleted
// when the query finishes and zookeeper session is closed.
LOG.warn("Could not release lock " + lock.getHiveLockObject().getName());
}
}
}
}
}
}
// we check if there is only one immediate child task and it is stats task
public boolean hasFollowingStatsTask() {
if (this.getNumChild() == 1) {
return this.getChildTasks().get(0) instanceof StatsTask;
}
return false;
}
@Override
public int execute(DriverContext driverContext) {
try {
Hive db = getHive();
// Do any hive related operations like moving tables and files
// to appropriate locations
LoadFileDesc lfd = work.getLoadFileWork();
if (lfd != null) {
Path targetPath = lfd.getTargetDir();
Path sourcePath = lfd.getSourcePath();
moveFile(sourcePath, targetPath, lfd.getIsDfsDir());
}
// Multi-file load is for dynamic partitions when some partitions do not
// need to merge and they can simply be moved to the target directory.
LoadMultiFilesDesc lmfd = work.getLoadMultiFilesWork();
if (lmfd != null) {
boolean isDfsDir = lmfd.getIsDfsDir();
int i = 0;
while (i <lmfd.getSourceDirs().size()) {
Path srcPath = lmfd.getSourceDirs().get(i);
Path destPath = lmfd.getTargetDirs().get(i);
FileSystem fs = destPath.getFileSystem(conf);
if (!fs.exists(destPath.getParent())) {
fs.mkdirs(destPath.getParent());
}
moveFile(srcPath, destPath, isDfsDir);
i++;
}
}
// Next we do this for tables and partitions
LoadTableDesc tbd = work.getLoadTableWork();
if (tbd != null) {
StringBuilder mesg = new StringBuilder("Loading data to table ")
.append( tbd.getTable().getTableName());
if (tbd.getPartitionSpec().size() > 0) {
mesg.append(" partition (");
Map<String, String> partSpec = tbd.getPartitionSpec();
for (String key: partSpec.keySet()) {
mesg.append(key).append('=').append(partSpec.get(key)).append(", ");
}
mesg.setLength(mesg.length()-2);
mesg.append(')');
}
String mesg_detail = " from " + tbd.getSourcePath();
console.printInfo(mesg.toString(), mesg_detail);
Table table = db.getTable(tbd.getTable().getTableName());
if (work.getCheckFileFormat()) {
// Get all files from the src directory
FileStatus[] dirs;
ArrayList<FileStatus> files;
FileSystem srcFs; // source filesystem
try {
srcFs = tbd.getSourcePath().getFileSystem(conf);
dirs = srcFs.globStatus(tbd.getSourcePath());
files = new ArrayList<FileStatus>();
for (int i = 0; (dirs != null && i < dirs.length); i++) {
files.addAll(Arrays.asList(srcFs.listStatus(dirs[i].getPath(), FileUtils.HIDDEN_FILES_PATH_FILTER)));
// We only check one file, so exit the loop when we have at least
// one.
if (files.size() > 0) {
break;
}
}
} catch (IOException e) {
throw new HiveException(
"addFiles: filesystem error in check phase", e);
}
// handle file format check for table level
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVECHECKFILEFORMAT)) {
boolean flag = true;
// work.checkFileFormat is set to true only for Load Task, so assumption here is
// dynamic partition context is null
if (tbd.getDPCtx() == null) {
if (tbd.getPartitionSpec() == null || tbd.getPartitionSpec().isEmpty()) {
// Check if the file format of the file matches that of the table.
flag = HiveFileFormatUtils.checkInputFormat(
srcFs, conf, tbd.getTable().getInputFileFormatClass(), files);
} else {
// Check if the file format of the file matches that of the partition
Partition oldPart = db.getPartition(table, tbd.getPartitionSpec(), false);
if (oldPart == null) {
// this means we have just created a table and are specifying partition in the
// load statement (without pre-creating the partition), in which case lets use
// table input format class. inheritTableSpecs defaults to true so when a new
// partition is created later it will automatically inherit input format
// from table object
flag = HiveFileFormatUtils.checkInputFormat(
srcFs, conf, tbd.getTable().getInputFileFormatClass(), files);
} else {
flag = HiveFileFormatUtils.checkInputFormat(
srcFs, conf, oldPart.getInputFormatClass(), files);
}
}
if (!flag) {
throw new HiveException(
"Wrong file format. Please check the file's format.");
}
} else {
LOG.warn("Skipping file format check as dpCtx is not null");
}
}
}
// Create a data container
DataContainer dc = null;
if (tbd.getPartitionSpec().size() == 0) {
dc = new DataContainer(table.getTTable());
db.loadTable(tbd.getSourcePath(), tbd.getTable().getTableName(), tbd.getReplace(),
work.isSrcLocal(), isSkewedStoredAsDirs(tbd),
work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID,
hasFollowingStatsTask());
if (work.getOutputs() != null) {
work.getOutputs().add(new WriteEntity(table,
(tbd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE :
WriteEntity.WriteType.INSERT)));
}
} else {
LOG.info("Partition is: " + tbd.getPartitionSpec().toString());
// Check if the bucketing and/or sorting columns were inferred
List<BucketCol> bucketCols = null;
List<SortCol> sortCols = null;
int numBuckets = -1;
Task task = this;
String path = tbd.getSourcePath().toUri().toString();
// Find the first ancestor of this MoveTask which is some form of map reduce task
// (Either standard, local, or a merge)
while (task.getParentTasks() != null && task.getParentTasks().size() == 1) {
task = (Task)task.getParentTasks().get(0);
// If it was a merge task or a local map reduce task, nothing can be inferred
if (task instanceof MergeFileTask || task instanceof MapredLocalTask) {
break;
}
// If it's a standard map reduce task, check what, if anything, it inferred about
// the directory this move task is moving
if (task instanceof MapRedTask) {
MapredWork work = (MapredWork)task.getWork();
MapWork mapWork = work.getMapWork();
bucketCols = mapWork.getBucketedColsByDirectory().get(path);
sortCols = mapWork.getSortedColsByDirectory().get(path);
if (work.getReduceWork() != null) {
numBuckets = work.getReduceWork().getNumReduceTasks();
}
if (bucketCols != null || sortCols != null) {
// This must be a final map reduce task (the task containing the file sink
// operator that writes the final output)
assert work.isFinalMapRed();
}
break;
}
// If it's a move task, get the path the files were moved from, this is what any
// preceding map reduce task inferred information about, and moving does not invalidate
// those assumptions
// This can happen when a conditional merge is added before the final MoveTask, but the
// condition for merging is not met, see GenMRFileSink1.
if (task instanceof MoveTask) {
if (((MoveTask)task).getWork().getLoadFileWork() != null) {
path = ((MoveTask)task).getWork().getLoadFileWork().getSourcePath().toUri().toString();
}
}
}
// deal with dynamic partitions
DynamicPartitionCtx dpCtx = tbd.getDPCtx();
if (dpCtx != null && dpCtx.getNumDPCols() > 0) { // dynamic partitions
List<LinkedHashMap<String, String>> dps = Utilities.getFullDPSpecs(conf, dpCtx);
// publish DP columns to its subscribers
if (dps != null && dps.size() > 0) {
pushFeed(FeedType.DYNAMIC_PARTITIONS, dps);
}
console.printInfo(System.getProperty("line.separator"));
long startTime = System.currentTimeMillis();
// load the list of DP partitions and return the list of partition specs
// TODO: In a follow-up to HIVE-1361, we should refactor loadDynamicPartitions
// to use Utilities.getFullDPSpecs() to get the list of full partSpecs.
// After that check the number of DPs created to not exceed the limit and
// iterate over it and call loadPartition() here.
// The reason we don't do inside HIVE-1361 is the latter is large and we
// want to isolate any potential issue it may introduce.
Map<Map<String, String>, Partition> dp =
db.loadDynamicPartitions(
tbd.getSourcePath(),
tbd.getTable().getTableName(),
tbd.getPartitionSpec(),
tbd.getReplace(),
dpCtx.getNumDPCols(),
isSkewedStoredAsDirs(tbd),
work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID,
SessionState.get().getTxnMgr().getCurrentTxnId(), hasFollowingStatsTask(),
work.getLoadTableWork().getWriteType());
console.printInfo("\t Time taken to load dynamic partitions: " +
(System.currentTimeMillis() - startTime)/1000.0 + " seconds");
if (dp.size() == 0 && conf.getBoolVar(HiveConf.ConfVars.HIVE_ERROR_ON_EMPTY_PARTITION)) {
throw new HiveException("This query creates no partitions." +
" To turn off this error, set hive.error.on.empty.partition=false.");
}
startTime = System.currentTimeMillis();
// for each partition spec, get the partition
// and put it to WriteEntity for post-exec hook
for(Map.Entry<Map<String, String>, Partition> entry : dp.entrySet()) {
Partition partn = entry.getValue();
if (bucketCols != null || sortCols != null) {
updatePartitionBucketSortColumns(
db, table, partn, bucketCols, numBuckets, sortCols);
}
WriteEntity enty = new WriteEntity(partn,
(tbd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE :
WriteEntity.WriteType.INSERT));
if (work.getOutputs() != null) {
work.getOutputs().add(enty);
}
// Need to update the queryPlan's output as well so that post-exec hook get executed.
// This is only needed for dynamic partitioning since for SP the the WriteEntity is
// constructed at compile time and the queryPlan already contains that.
// For DP, WriteEntity creation is deferred at this stage so we need to update
// queryPlan here.
if (queryPlan.getOutputs() == null) {
queryPlan.setOutputs(new LinkedHashSet<WriteEntity>());
}
queryPlan.getOutputs().add(enty);
// update columnar lineage for each partition
dc = new DataContainer(table.getTTable(), partn.getTPartition());
// Don't set lineage on delete as we don't have all the columns
if (SessionState.get() != null &&
work.getLoadTableWork().getWriteType() != AcidUtils.Operation.DELETE &&
work.getLoadTableWork().getWriteType() != AcidUtils.Operation.UPDATE) {
SessionState.get().getLineageState().setLineage(tbd.getSourcePath(), dc,
table.getCols());
}
LOG.info("\tLoading partition " + entry.getKey());
}
console.printInfo("\t Time taken for adding to write entity : " +
(System.currentTimeMillis() - startTime)/1000.0 + " seconds");
dc = null; // reset data container to prevent it being added again.
} else { // static partitions
List<String> partVals = MetaStoreUtils.getPvals(table.getPartCols(),
tbd.getPartitionSpec());
db.validatePartitionNameCharacters(partVals);
db.loadPartition(tbd.getSourcePath(), tbd.getTable().getTableName(),
tbd.getPartitionSpec(), tbd.getReplace(),
tbd.getInheritTableSpecs(), isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID, hasFollowingStatsTask());
Partition partn = db.getPartition(table, tbd.getPartitionSpec(), false);
if (bucketCols != null || sortCols != null) {
updatePartitionBucketSortColumns(db, table, partn, bucketCols,
numBuckets, sortCols);
}
dc = new DataContainer(table.getTTable(), partn.getTPartition());
// add this partition to post-execution hook
if (work.getOutputs() != null) {
work.getOutputs().add(new WriteEntity(partn,
(tbd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE
: WriteEntity.WriteType.INSERT)));
}
}
}
if (SessionState.get() != null && dc != null) {
// If we are doing an update or a delete the number of columns in the table will not
// match the number of columns in the file sink. For update there will be one too many
// (because of the ROW__ID), and in the case of the delete there will be just the
// ROW__ID, which we don't need to worry about from a lineage perspective.
List<FieldSchema> tableCols = null;
switch (work.getLoadTableWork().getWriteType()) {
case DELETE:
case UPDATE:
// Pass an empty list as no columns will be written to the file.
// TODO I should be able to make this work for update
tableCols = new ArrayList<FieldSchema>();
break;
default:
tableCols = table.getCols();
break;
}
SessionState.get().getLineageState().setLineage(tbd.getSourcePath(), dc, tableCols);
}
releaseLocks(tbd);
}
return 0;
} catch (Exception e) {
console.printError("Failed with exception " + e.getMessage(), "\n"
+ StringUtils.stringifyException(e));
setException(e);
return (1);
}
}
private boolean isSkewedStoredAsDirs(LoadTableDesc tbd) {
return (tbd.getLbCtx() == null) ? false : tbd.getLbCtx()
.isSkewedStoredAsDir();
}
/**
* Alters the bucketing and/or sorting columns of the partition provided they meet some
* validation criteria, e.g. the number of buckets match the number of files, and the
* columns are not partition columns
* @param table
* @param partn
* @param bucketCols
* @param numBuckets
* @param sortCols
* @throws IOException
* @throws InvalidOperationException
* @throws HiveException
*/
private void updatePartitionBucketSortColumns(Hive db, Table table, Partition partn,
List<BucketCol> bucketCols, int numBuckets, List<SortCol> sortCols)
throws IOException, InvalidOperationException, HiveException {
boolean updateBucketCols = false;
if (bucketCols != null) {
FileSystem fileSys = partn.getDataLocation().getFileSystem(conf);
FileStatus[] fileStatus = HiveStatsUtils.getFileStatusRecurse(
partn.getDataLocation(), 1, fileSys);
// Verify the number of buckets equals the number of files
// This will not hold for dynamic partitions where not every reducer produced a file for
// those partitions. In this case the table is not bucketed as Hive requires a files for
// each bucket.
if (fileStatus.length == numBuckets) {
List<String> newBucketCols = new ArrayList<String>();
updateBucketCols = true;
for (BucketCol bucketCol : bucketCols) {
if (bucketCol.getIndexes().get(0) < partn.getCols().size()) {
newBucketCols.add(partn.getCols().get(
bucketCol.getIndexes().get(0)).getName());
} else {
// If the table is bucketed on a partition column, not valid for bucketing
updateBucketCols = false;
break;
}
}
if (updateBucketCols) {
partn.getBucketCols().clear();
partn.getBucketCols().addAll(newBucketCols);
partn.getTPartition().getSd().setNumBuckets(numBuckets);
}
}
}
boolean updateSortCols = false;
if (sortCols != null) {
List<Order> newSortCols = new ArrayList<Order>();
updateSortCols = true;
for (SortCol sortCol : sortCols) {
if (sortCol.getIndexes().get(0) < partn.getCols().size()) {
newSortCols.add(new Order(
partn.getCols().get(sortCol.getIndexes().get(0)).getName(),
sortCol.getSortOrder() == '+' ? BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC :
BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC));
} else {
// If the table is sorted on a partition column, not valid for sorting
updateSortCols = false;
break;
}
}
if (updateSortCols) {
partn.getSortCols().clear();
partn.getSortCols().addAll(newSortCols);
}
}
if (updateBucketCols || updateSortCols) {
db.alterPartition(table.getDbName(), table.getTableName(), partn, null);
}
}
/*
* Does the move task involve moving to a local file system
*/
public boolean isLocal() {
LoadTableDesc tbd = work.getLoadTableWork();
if (tbd != null) {
return false;
}
LoadFileDesc lfd = work.getLoadFileWork();
if (lfd != null) {
if (lfd.getIsDfsDir()) {
return false;
} else {
return true;
}
}
return false;
}
@Override
public StageType getType() {
return StageType.MOVE;
}
@Override
public String getName() {
return "MOVE";
}
}
| |
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.security.AccessControlException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableJobInfo;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import azkaban.flow.Edge;
import azkaban.flow.Flow;
import azkaban.flow.FlowProps;
import azkaban.flow.Node;
import azkaban.project.Project;
import azkaban.project.ProjectLogEvent;
import azkaban.project.ProjectManager;
import azkaban.project.ProjectManagerException;
import azkaban.project.validator.ValidationReport;
import azkaban.project.validator.ValidatorConfigs;
import azkaban.scheduler.Schedule;
import azkaban.scheduler.ScheduleManager;
import azkaban.scheduler.ScheduleManagerException;
import azkaban.server.session.Session;
import azkaban.user.Permission;
import azkaban.user.Permission.Type;
import azkaban.user.Role;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.utils.JSONUtils;
import azkaban.utils.Pair;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import azkaban.utils.Utils;
import azkaban.webapp.AzkabanWebServer;
public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
private static final long serialVersionUID = 1;
private static final Logger logger = Logger
.getLogger(ProjectManagerServlet.class);
private static final NodeLevelComparator NODE_LEVEL_COMPARATOR =
new NodeLevelComparator();
private static final String LOCKDOWN_CREATE_PROJECTS_KEY =
"lockdown.create.projects";
private ProjectManager projectManager;
private ExecutorManagerAdapter executorManager;
private ScheduleManager scheduleManager;
private UserManager userManager;
private boolean lockdownCreateProjects = false;
private static Comparator<Flow> FLOW_ID_COMPARATOR = new Comparator<Flow>() {
@Override
public int compare(Flow f1, Flow f2) {
return f1.getId().compareTo(f2.getId());
}
};
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
AzkabanWebServer server = (AzkabanWebServer) getApplication();
projectManager = server.getProjectManager();
executorManager = server.getExecutorManager();
scheduleManager = server.getScheduleManager();
userManager = server.getUserManager();
lockdownCreateProjects =
server.getServerProps().getBoolean(LOCKDOWN_CREATE_PROJECTS_KEY, false);
if (lockdownCreateProjects) {
logger.info("Creation of projects is locked down");
}
}
@Override
protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
Session session) throws ServletException, IOException {
if (hasParam(req, "project")) {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else if (hasParam(req, "logs")) {
handleProjectLogsPage(req, resp, session);
} else if (hasParam(req, "permissions")) {
handlePermissionPage(req, resp, session);
} else if (hasParam(req, "prop")) {
handlePropertyPage(req, resp, session);
} else if (hasParam(req, "history")) {
handleJobHistoryPage(req, resp, session);
} else if (hasParam(req, "job")) {
handleJobPage(req, resp, session);
} else if (hasParam(req, "flow")) {
handleFlowPage(req, resp, session);
} else if (hasParam(req, "delete")) {
handleRemoveProject(req, resp, session);
} else {
handleProjectPage(req, resp, session);
}
return;
}
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/projectpage.vm");
page.add("errorMsg", "No project set.");
page.render();
}
@Override
protected void handleMultiformPost(HttpServletRequest req,
HttpServletResponse resp, Map<String, Object> params, Session session)
throws ServletException, IOException {
// Looks like a duplicate, but this is a move away from the regular
// multiform post + redirect
// to a more ajax like command.
if (params.containsKey("ajax")) {
String action = (String) params.get("ajax");
HashMap<String, String> ret = new HashMap<String, String>();
if (action.equals("upload")) {
ajaxHandleUpload(req, ret, params, session);
}
this.writeJSON(resp, ret);
} else if (params.containsKey("action")) {
String action = (String) params.get("action");
if (action.equals("upload")) {
handleUpload(req, resp, params, session);
}
}
}
@Override
protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
Session session) throws ServletException, IOException {
if (hasParam(req, "action")) {
String action = getParam(req, "action");
if (action.equals("create")) {
handleCreate(req, resp, session);
}
}
}
private void handleAJAXAction(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
String projectName = getParam(req, "project");
User user = session.getUser();
HashMap<String, Object> ret = new HashMap<String, Object>();
ret.put("project", projectName);
Project project = projectManager.getProject(projectName);
if (project == null) {
ret.put("error", "Project " + projectName + " doesn't exist.");
return;
}
ret.put("projectId", project.getId());
String ajaxName = getParam(req, "ajax");
if (ajaxName.equals("fetchProjectLogs")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchProjectLogEvents(project, req, resp, ret, user);
}
} else if (ajaxName.equals("fetchflowjobs")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlow(project, ret, req, resp);
}
} else if (ajaxName.equals("fetchflowdetails")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowDetails(project, ret, req);
}
} else if (ajaxName.equals("fetchflowgraph")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowGraph(project, ret, req);
}
} else if (ajaxName.equals("fetchflownodedata")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowNodeData(project, ret, req);
}
} else if (ajaxName.equals("fetchprojectflows")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchProjectFlows(project, ret, req);
}
} else if (ajaxName.equals("changeDescription")) {
if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
ajaxChangeDescription(project, ret, req, user);
}
} else if (ajaxName.equals("getPermissions")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxGetPermissions(project, ret);
}
} else if (ajaxName.equals("changePermission")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxChangePermissions(project, ret, req, user);
}
} else if (ajaxName.equals("addPermission")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxAddPermission(project, ret, req, user);
}
} else if (ajaxName.equals("addProxyUser")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxAddProxyUser(project, ret, req, user);
}
} else if (ajaxName.equals("removeProxyUser")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxRemoveProxyUser(project, ret, req, user);
}
} else if (ajaxName.equals("fetchFlowExecutions")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowExecutions(project, ret, req);
}
} else if (ajaxName.equals("fetchLastSuccessfulFlowExecution")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchLastSuccessfulFlowExecution(project, ret, req);
}
} else if (ajaxName.equals("fetchJobInfo")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchJobInfo(project, ret, req);
}
} else if (ajaxName.equals("setJobOverrideProperty")) {
if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
ajaxSetJobOverrideProperty(project, ret, req);
}
} else {
ret.put("error", "Cannot execute command " + ajaxName);
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
private boolean handleAjaxPermission(Project project, User user, Type type,
Map<String, Object> ret) {
if (hasPermission(project, user, type)) {
return true;
}
ret.put("error", "Permission denied. Need " + type.toString() + " access.");
return false;
}
private void ajaxFetchProjectLogEvents(Project project,
HttpServletRequest req, HttpServletResponse resp,
HashMap<String, Object> ret, User user) throws ServletException {
if (!hasPermission(project, user, Type.READ)) {
ret.put("error", "Permission denied. Need READ access.");
return;
}
int num = this.getIntParam(req, "size", 1000);
int skip = this.getIntParam(req, "skip", 0);
List<ProjectLogEvent> logEvents = null;
try {
logEvents = projectManager.getProjectEventLogs(project, num, skip);
} catch (ProjectManagerException e) {
throw new ServletException(e);
}
String[] columns = new String[] { "user", "time", "type", "message" };
ret.put("columns", columns);
List<Object[]> eventData = new ArrayList<Object[]>();
for (ProjectLogEvent events : logEvents) {
Object[] entry = new Object[4];
entry[0] = events.getUser();
entry[1] = events.getTime();
entry[2] = events.getType();
entry[3] = events.getMessage();
eventData.add(entry);
}
ret.put("logData", eventData);
}
private List<String> getFlowJobTypes(Flow flow) {
Set<String> jobTypeSet = new HashSet<String>();
for (Node node : flow.getNodes()) {
jobTypeSet.add(node.getType());
}
List<String> jobTypes = new ArrayList<String>();
jobTypes.addAll(jobTypeSet);
return jobTypes;
}
private void ajaxFetchFlowDetails(Project project,
HashMap<String, Object> ret, HttpServletRequest req)
throws ServletException {
String flowName = getParam(req, "flow");
Flow flow = null;
try {
flow = project.getFlow(flowName);
if (flow == null) {
ret.put("error", "Flow " + flowName + " not found.");
return;
}
ret.put("jobTypes", getFlowJobTypes(flow));
} catch (AccessControlException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxFetchLastSuccessfulFlowExecution(Project project,
HashMap<String, Object> ret, HttpServletRequest req)
throws ServletException {
String flowId = getParam(req, "flow");
List<ExecutableFlow> exFlows = null;
try {
exFlows =
executorManager.getExecutableFlows(project.getId(), flowId, 0, 1,
Status.SUCCEEDED);
} catch (ExecutorManagerException e) {
ret.put("error", "Error retrieving executable flows");
return;
}
if (exFlows.size() == 0) {
ret.put("success", "false");
ret.put("message", "This flow has no successful run.");
return;
}
ret.put("success", "true");
ret.put("message", "");
ret.put("execId", exFlows.get(0).getExecutionId());
}
private void ajaxFetchFlowExecutions(Project project,
HashMap<String, Object> ret, HttpServletRequest req)
throws ServletException {
String flowId = getParam(req, "flow");
int from = Integer.valueOf(getParam(req, "start"));
int length = Integer.valueOf(getParam(req, "length"));
ArrayList<ExecutableFlow> exFlows = new ArrayList<ExecutableFlow>();
int total = 0;
try {
total =
executorManager.getExecutableFlows(project.getId(), flowId, from,
length, exFlows);
} catch (ExecutorManagerException e) {
ret.put("error", "Error retrieving executable flows");
}
ret.put("flow", flowId);
ret.put("total", total);
ret.put("from", from);
ret.put("length", length);
ArrayList<Object> history = new ArrayList<Object>();
for (ExecutableFlow flow : exFlows) {
HashMap<String, Object> flowInfo = new HashMap<String, Object>();
flowInfo.put("execId", flow.getExecutionId());
flowInfo.put("flowId", flow.getFlowId());
flowInfo.put("projectId", flow.getProjectId());
flowInfo.put("status", flow.getStatus().toString());
flowInfo.put("submitTime", flow.getSubmitTime());
flowInfo.put("startTime", flow.getStartTime());
flowInfo.put("endTime", flow.getEndTime());
flowInfo.put("submitUser", flow.getSubmitUser());
history.add(flowInfo);
}
ret.put("executions", history);
}
private void handleRemoveProject(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
User user = session.getUser();
String projectName = getParam(req, "project");
Project project = projectManager.getProject(projectName);
if (project == null) {
this.setErrorMessageInCookie(resp, "Project " + projectName
+ " doesn't exist.");
resp.sendRedirect(req.getContextPath());
return;
}
if (!hasPermission(project, user, Type.ADMIN)) {
this.setErrorMessageInCookie(resp,
"Cannot delete. User '" + user.getUserId() + "' is not an ADMIN.");
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
return;
}
// Check if scheduled
Schedule sflow = null;
try {
for (Schedule flow : scheduleManager.getSchedules()) {
if (flow.getProjectId() == project.getId()) {
sflow = flow;
break;
}
}
} catch (ScheduleManagerException e) {
throw new ServletException(e);
}
if (sflow != null) {
this.setErrorMessageInCookie(resp, "Cannot delete. Please unschedule "
+ sflow.getScheduleName() + ".");
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
return;
}
// Check if executing
ExecutableFlow exflow = null;
for (ExecutableFlow flow : executorManager.getRunningFlows()) {
if (flow.getProjectId() == project.getId()) {
exflow = flow;
break;
}
}
if (exflow != null) {
this.setErrorMessageInCookie(resp, "Cannot delete. Executable flow "
+ exflow.getExecutionId() + " is still running.");
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
return;
}
try {
projectManager.removeProject(project, user);
} catch (ProjectManagerException e) {
this.setErrorMessageInCookie(resp, e.getMessage());
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
return;
}
this.setSuccessMessageInCookie(resp, "Project '" + projectName
+ "' was successfully deleted.");
resp.sendRedirect(req.getContextPath());
}
private void ajaxChangeDescription(Project project,
HashMap<String, Object> ret, HttpServletRequest req, User user)
throws ServletException {
String description = getParam(req, "description");
project.setDescription(description);
try {
projectManager.updateProjectDescription(project, description, user);
} catch (ProjectManagerException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxFetchJobInfo(Project project, HashMap<String, Object> ret,
HttpServletRequest req) throws ServletException {
String flowName = getParam(req, "flowName");
String jobName = getParam(req, "jobName");
Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put("error",
"Flow " + flowName + " not found in project " + project.getName());
return;
}
Node node = flow.getNode(jobName);
if (node == null) {
ret.put("error", "Job " + jobName + " not found in flow " + flowName);
return;
}
Props prop;
try {
prop = projectManager.getProperties(project, node.getJobSource());
} catch (ProjectManagerException e) {
ret.put("error", "Failed to retrieve job properties!");
return;
}
Props overrideProp;
try {
overrideProp = projectManager.getJobOverrideProperty(project, jobName);
} catch (ProjectManagerException e) {
ret.put("error", "Failed to retrieve job override properties!");
return;
}
ret.put("jobName", node.getId());
ret.put("jobType", prop.get("type"));
if (overrideProp == null) {
overrideProp = new Props(prop);
}
Map<String, String> generalParams = new HashMap<String, String>();
Map<String, String> overrideParams = new HashMap<String, String>();
for (String ps : prop.getKeySet()) {
generalParams.put(ps, prop.getString(ps));
}
for (String ops : overrideProp.getKeySet()) {
overrideParams.put(ops, overrideProp.getString(ops));
}
ret.put("generalParams", generalParams);
ret.put("overrideParams", overrideParams);
}
private void ajaxSetJobOverrideProperty(Project project,
HashMap<String, Object> ret, HttpServletRequest req)
throws ServletException {
String flowName = getParam(req, "flowName");
String jobName = getParam(req, "jobName");
Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put("error",
"Flow " + flowName + " not found in project " + project.getName());
return;
}
Node node = flow.getNode(jobName);
if (node == null) {
ret.put("error", "Job " + jobName + " not found in flow " + flowName);
return;
}
Map<String, String> jobParamGroup = this.getParamGroup(req, "jobOverride");
@SuppressWarnings("unchecked")
Props overrideParams = new Props(null, jobParamGroup);
try {
projectManager.setJobOverrideProperty(project, overrideParams, jobName);
} catch (ProjectManagerException e) {
ret.put("error", "Failed to upload job override property");
}
}
private void ajaxFetchProjectFlows(Project project,
HashMap<String, Object> ret, HttpServletRequest req)
throws ServletException {
ArrayList<Map<String, Object>> flowList =
new ArrayList<Map<String, Object>>();
for (Flow flow : project.getFlows()) {
HashMap<String, Object> flowObj = new HashMap<String, Object>();
flowObj.put("flowId", flow.getId());
flowList.add(flowObj);
}
ret.put("flows", flowList);
}
private void ajaxFetchFlowGraph(Project project, HashMap<String, Object> ret,
HttpServletRequest req) throws ServletException {
String flowId = getParam(req, "flow");
fillFlowInfo(project, flowId, ret);
}
private void fillFlowInfo(Project project, String flowId,
HashMap<String, Object> ret) {
Flow flow = project.getFlow(flowId);
ArrayList<Map<String, Object>> nodeList =
new ArrayList<Map<String, Object>>();
for (Node node : flow.getNodes()) {
HashMap<String, Object> nodeObj = new HashMap<String, Object>();
nodeObj.put("id", node.getId());
nodeObj.put("type", node.getType());
if (node.getEmbeddedFlowId() != null) {
nodeObj.put("flowId", node.getEmbeddedFlowId());
fillFlowInfo(project, node.getEmbeddedFlowId(), nodeObj);
}
nodeList.add(nodeObj);
Set<Edge> inEdges = flow.getInEdges(node.getId());
if (inEdges != null && !inEdges.isEmpty()) {
ArrayList<String> inEdgesList = new ArrayList<String>();
for (Edge edge : inEdges) {
inEdgesList.add(edge.getSourceId());
}
Collections.sort(inEdgesList);
nodeObj.put("in", inEdgesList);
}
}
Collections.sort(nodeList, new Comparator<Map<String, Object>>() {
@Override
public int compare(Map<String, Object> o1, Map<String, Object> o2) {
String id = (String) o1.get("id");
return id.compareTo((String) o2.get("id"));
}
});
ret.put("flow", flowId);
ret.put("nodes", nodeList);
}
private void ajaxFetchFlowNodeData(Project project,
HashMap<String, Object> ret, HttpServletRequest req)
throws ServletException {
String flowId = getParam(req, "flow");
Flow flow = project.getFlow(flowId);
String nodeId = getParam(req, "node");
Node node = flow.getNode(nodeId);
if (node == null) {
ret.put("error", "Job " + nodeId + " doesn't exist.");
return;
}
ret.put("id", nodeId);
ret.put("flow", flowId);
ret.put("type", node.getType());
Props props;
try {
props = projectManager.getProperties(project, node.getJobSource());
} catch (ProjectManagerException e) {
ret.put("error", "Failed to upload job override property for " + nodeId);
return;
}
if (props == null) {
ret.put("error", "Properties for " + nodeId + " isn't found.");
return;
}
Map<String, String> properties = PropsUtils.toStringMap(props, true);
ret.put("props", properties);
if (node.getType().equals("flow")) {
if (node.getEmbeddedFlowId() != null) {
fillFlowInfo(project, node.getEmbeddedFlowId(), ret);
}
}
}
private void ajaxFetchFlow(Project project, HashMap<String, Object> ret,
HttpServletRequest req, HttpServletResponse resp) throws ServletException {
String flowId = getParam(req, "flow");
Flow flow = project.getFlow(flowId);
ArrayList<Node> flowNodes = new ArrayList<Node>(flow.getNodes());
Collections.sort(flowNodes, NODE_LEVEL_COMPARATOR);
ArrayList<Object> nodeList = new ArrayList<Object>();
for (Node node : flowNodes) {
HashMap<String, Object> nodeObj = new HashMap<String, Object>();
nodeObj.put("id", node.getId());
ArrayList<String> dependencies = new ArrayList<String>();
Collection<Edge> collection = flow.getInEdges(node.getId());
if (collection != null) {
for (Edge edge : collection) {
dependencies.add(edge.getSourceId());
}
}
ArrayList<String> dependents = new ArrayList<String>();
collection = flow.getOutEdges(node.getId());
if (collection != null) {
for (Edge edge : collection) {
dependents.add(edge.getTargetId());
}
}
nodeObj.put("dependencies", dependencies);
nodeObj.put("dependents", dependents);
nodeObj.put("level", node.getLevel());
nodeList.add(nodeObj);
}
ret.put("flowId", flowId);
ret.put("nodes", nodeList);
}
private void ajaxAddProxyUser(Project project, HashMap<String, Object> ret,
HttpServletRequest req, User user) throws ServletException {
String name = getParam(req, "name");
logger.info("Adding proxy user " + name + " by " + user.getUserId());
if (userManager.validateProxyUser(name, user)) {
try {
projectManager.addProjectProxyUser(project, name, user);
} catch (ProjectManagerException e) {
ret.put("error", e.getMessage());
}
} else {
ret.put("error", "User " + user.getUserId()
+ " has no permission to add " + name + " as proxy user.");
return;
}
}
private void ajaxRemoveProxyUser(Project project,
HashMap<String, Object> ret, HttpServletRequest req, User user)
throws ServletException {
String name = getParam(req, "name");
logger.info("Removing proxy user " + name + " by " + user.getUserId());
try {
projectManager.removeProjectProxyUser(project, name, user);
} catch (ProjectManagerException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxAddPermission(Project project, HashMap<String, Object> ret,
HttpServletRequest req, User user) throws ServletException {
String name = getParam(req, "name");
boolean group = Boolean.parseBoolean(getParam(req, "group"));
if (group) {
if (project.getGroupPermission(name) != null) {
ret.put("error", "Group permission already exists.");
return;
}
if (!userManager.validateGroup(name)) {
ret.put("error", "Group is invalid.");
return;
}
} else {
if (project.getUserPermission(name) != null) {
ret.put("error", "User permission already exists.");
return;
}
if (!userManager.validateUser(name)) {
ret.put("error", "User is invalid.");
return;
}
}
boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
boolean execute =
Boolean.parseBoolean(getParam(req, "permissions[execute]"));
boolean schedule =
Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
Permission perm = new Permission();
if (admin) {
perm.setPermission(Type.ADMIN, true);
} else {
perm.setPermission(Type.READ, read);
perm.setPermission(Type.WRITE, write);
perm.setPermission(Type.EXECUTE, execute);
perm.setPermission(Type.SCHEDULE, schedule);
}
try {
projectManager.updateProjectPermission(project, name, perm, group, user);
} catch (ProjectManagerException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxChangePermissions(Project project,
HashMap<String, Object> ret, HttpServletRequest req, User user)
throws ServletException {
boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
boolean execute =
Boolean.parseBoolean(getParam(req, "permissions[execute]"));
boolean schedule =
Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
boolean group = Boolean.parseBoolean(getParam(req, "group"));
String name = getParam(req, "name");
Permission perm;
if (group) {
perm = project.getGroupPermission(name);
} else {
perm = project.getUserPermission(name);
}
if (perm == null) {
ret.put("error", "Permissions for " + name + " cannot be found.");
return;
}
if (admin || read || write || execute || schedule) {
if (admin) {
perm.setPermission(Type.ADMIN, true);
perm.setPermission(Type.READ, false);
perm.setPermission(Type.WRITE, false);
perm.setPermission(Type.EXECUTE, false);
perm.setPermission(Type.SCHEDULE, false);
} else {
perm.setPermission(Type.ADMIN, false);
perm.setPermission(Type.READ, read);
perm.setPermission(Type.WRITE, write);
perm.setPermission(Type.EXECUTE, execute);
perm.setPermission(Type.SCHEDULE, schedule);
}
try {
projectManager
.updateProjectPermission(project, name, perm, group, user);
} catch (ProjectManagerException e) {
ret.put("error", e.getMessage());
}
} else {
try {
projectManager.removeProjectPermission(project, name, group, user);
} catch (ProjectManagerException e) {
ret.put("error", e.getMessage());
}
}
}
private void ajaxGetPermissions(Project project, HashMap<String, Object> ret) {
ArrayList<HashMap<String, Object>> permissions =
new ArrayList<HashMap<String, Object>>();
for (Pair<String, Permission> perm : project.getUserPermissions()) {
HashMap<String, Object> permObj = new HashMap<String, Object>();
String userId = perm.getFirst();
permObj.put("username", userId);
permObj.put("permission", perm.getSecond().toStringArray());
permissions.add(permObj);
}
ret.put("permissions", permissions);
}
private void handleProjectLogsPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/projectlogpage.vm");
String projectName = getParam(req, "project");
User user = session.getUser();
Project project = null;
try {
project = projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " doesn't exist.");
} else {
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
page.add("admins", Utils.flattenToString(
project.getUsersWithPermission(Type.ADMIN), ","));
Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
page.add("userpermission", perm);
boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
if (adminPerm) {
page.add("admin", true);
}
// Set this so we can display execute buttons only to those who have
// access.
if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
page.add("exec", true);
} else {
page.add("exec", false);
}
}
} catch (AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
int numBytes = 1024;
// Really sucks if we do a lot of these because it'll eat up memory fast.
// But it's expected that this won't be a heavily used thing. If it is,
// then we'll revisit it to make it more stream friendly.
StringBuffer buffer = new StringBuffer(numBytes);
page.add("log", buffer.toString());
page.render();
}
private void handleJobHistoryPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/jobhistorypage.vm");
String projectName = getParam(req, "project");
User user = session.getUser();
Project project = projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " doesn't exist.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
page.add("errorMsg", "No permission to view project " + projectName + ".");
page.render();
return;
}
String jobId = getParam(req, "job");
int pageNum = getIntParam(req, "page", 1);
int pageSize = getIntParam(req, "size", 25);
page.add("projectId", project.getId());
page.add("projectName", project.getName());
page.add("jobid", jobId);
page.add("page", pageNum);
int skipPage = (pageNum - 1) * pageSize;
int numResults = 0;
try {
numResults = executorManager.getNumberOfJobExecutions(project, jobId);
int maxPage = (numResults / pageSize) + 1;
List<ExecutableJobInfo> jobInfo =
executorManager.getExecutableJobs(project, jobId, skipPage, pageSize);
if (jobInfo == null || jobInfo.isEmpty()) {
jobInfo = null;
}
page.add("history", jobInfo);
page.add("previous", new PageSelection("Previous", pageSize, true, false,
Math.max(pageNum - 1, 1)));
page.add(
"next",
new PageSelection("Next", pageSize, false, false, Math.min(
pageNum + 1, maxPage)));
if (jobInfo != null) {
ArrayList<Object> dataSeries = new ArrayList<Object>();
for (ExecutableJobInfo info : jobInfo) {
Map<String, Object> map = info.toObject();
dataSeries.add(map);
}
page.add("dataSeries", JSONUtils.toJSON(dataSeries));
} else {
page.add("dataSeries", "[]");
}
} catch (ExecutorManagerException e) {
page.add("errorMsg", e.getMessage());
}
// Now for the 5 other values.
int pageStartValue = 1;
if (pageNum > 3) {
pageStartValue = pageNum - 2;
}
int maxPage = (numResults / pageSize) + 1;
page.add(
"page1",
new PageSelection(String.valueOf(pageStartValue), pageSize,
pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
pageStartValue, maxPage)));
pageStartValue++;
page.add(
"page2",
new PageSelection(String.valueOf(pageStartValue), pageSize,
pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
pageStartValue, maxPage)));
pageStartValue++;
page.add(
"page3",
new PageSelection(String.valueOf(pageStartValue), pageSize,
pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
pageStartValue, maxPage)));
pageStartValue++;
page.add(
"page4",
new PageSelection(String.valueOf(pageStartValue), pageSize,
pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
pageStartValue, maxPage)));
pageStartValue++;
page.add(
"page5",
new PageSelection(String.valueOf(pageStartValue), pageSize,
pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
pageStartValue, maxPage)));
page.render();
}
private void handlePermissionPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/permissionspage.vm");
String projectName = getParam(req, "project");
User user = session.getUser();
Project project = null;
try {
project = projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
} else {
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
page.add("username", user.getUserId());
page.add("admins", Utils.flattenToString(
project.getUsersWithPermission(Type.ADMIN), ","));
Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
page.add("userpermission", perm);
if (perm.isPermissionSet(Type.ADMIN)) {
page.add("admin", true);
}
List<Pair<String, Permission>> userPermission =
project.getUserPermissions();
if (userPermission != null && !userPermission.isEmpty()) {
page.add("permissions", userPermission);
}
List<Pair<String, Permission>> groupPermission =
project.getGroupPermissions();
if (groupPermission != null && !groupPermission.isEmpty()) {
page.add("groupPermissions", groupPermission);
}
Set<String> proxyUsers = project.getProxyUsers();
if (proxyUsers != null && !proxyUsers.isEmpty()) {
page.add("proxyUsers", proxyUsers);
}
if (hasPermission(project, user, Type.ADMIN)) {
page.add("isAdmin", true);
}
}
} catch (AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleJobPage(HttpServletRequest req, HttpServletResponse resp,
Session session) throws ServletException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/jobpage.vm");
String projectName = getParam(req, "project");
String flowName = getParam(req, "flow");
String jobName = getParam(req, "job");
User user = session.getUser();
Project project = null;
Flow flow = null;
try {
project = projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
page.render();
return;
}
page.add("flowid", flow.getId());
Node node = flow.getNode(jobName);
if (node == null) {
page.add("errorMsg", "Job " + jobName + " not found.");
page.render();
return;
}
Props prop = projectManager.getProperties(project, node.getJobSource());
Props overrideProp =
projectManager.getJobOverrideProperty(project, jobName);
if (overrideProp == null) {
overrideProp = new Props();
}
Props comboProp = new Props(prop);
for (String key : overrideProp.getKeySet()) {
comboProp.put(key, overrideProp.get(key));
}
page.add("jobid", node.getId());
page.add("jobtype", node.getType());
ArrayList<String> dependencies = new ArrayList<String>();
Set<Edge> inEdges = flow.getInEdges(node.getId());
if (inEdges != null) {
for (Edge dependency : inEdges) {
dependencies.add(dependency.getSourceId());
}
}
if (!dependencies.isEmpty()) {
page.add("dependencies", dependencies);
}
ArrayList<String> dependents = new ArrayList<String>();
Set<Edge> outEdges = flow.getOutEdges(node.getId());
if (outEdges != null) {
for (Edge dependent : outEdges) {
dependents.add(dependent.getTargetId());
}
}
if (!dependents.isEmpty()) {
page.add("dependents", dependents);
}
// Resolve property dependencies
ArrayList<String> source = new ArrayList<String>();
String nodeSource = node.getPropsSource();
if (nodeSource != null) {
source.add(nodeSource);
FlowProps parent = flow.getFlowProps(nodeSource);
while (parent.getInheritedSource() != null) {
source.add(parent.getInheritedSource());
parent = flow.getFlowProps(parent.getInheritedSource());
}
}
if (!source.isEmpty()) {
page.add("properties", source);
}
ArrayList<Pair<String, String>> parameters =
new ArrayList<Pair<String, String>>();
// Parameter
for (String key : comboProp.getKeySet()) {
String value = comboProp.get(key);
parameters.add(new Pair<String, String>(key, value));
}
page.add("parameters", parameters);
} catch (AccessControlException e) {
page.add("errorMsg", e.getMessage());
} catch (ProjectManagerException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handlePropertyPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/propertypage.vm");
String projectName = getParam(req, "project");
String flowName = getParam(req, "flow");
String jobName = getParam(req, "job");
String propSource = getParam(req, "prop");
User user = session.getUser();
Project project = null;
Flow flow = null;
try {
project = projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
page.render();
return;
}
page.add("flowid", flow.getId());
Node node = flow.getNode(jobName);
if (node == null) {
page.add("errorMsg", "Job " + jobName + " not found.");
page.render();
return;
}
Props prop = projectManager.getProperties(project, propSource);
page.add("property", propSource);
page.add("jobid", node.getId());
// Resolve property dependencies
ArrayList<String> inheritProps = new ArrayList<String>();
FlowProps parent = flow.getFlowProps(propSource);
while (parent.getInheritedSource() != null) {
inheritProps.add(parent.getInheritedSource());
parent = flow.getFlowProps(parent.getInheritedSource());
}
if (!inheritProps.isEmpty()) {
page.add("inheritedproperties", inheritProps);
}
ArrayList<String> dependingProps = new ArrayList<String>();
FlowProps child =
flow.getFlowProps(flow.getNode(jobName).getPropsSource());
while (!child.getSource().equals(propSource)) {
dependingProps.add(child.getSource());
child = flow.getFlowProps(child.getInheritedSource());
}
if (!dependingProps.isEmpty()) {
page.add("dependingproperties", dependingProps);
}
ArrayList<Pair<String, String>> parameters =
new ArrayList<Pair<String, String>>();
// Parameter
for (String key : prop.getKeySet()) {
String value = prop.get(key);
parameters.add(new Pair<String, String>(key, value));
}
page.add("parameters", parameters);
} catch (AccessControlException e) {
page.add("errorMsg", e.getMessage());
} catch (ProjectManagerException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleFlowPage(HttpServletRequest req, HttpServletResponse resp,
Session session) throws ServletException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/flowpage.vm");
String projectName = getParam(req, "project");
String flowName = getParam(req, "flow");
User user = session.getUser();
Project project = null;
Flow flow = null;
try {
project = projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission Project " + projectName
+ ".");
}
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
} else {
page.add("flowid", flow.getId());
}
} catch (AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleProjectPage(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException {
Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/projectpage.vm");
String projectName = getParam(req, "project");
User user = session.getUser();
Project project = null;
try {
project = projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
} else {
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
page.add("admins", Utils.flattenToString(
project.getUsersWithPermission(Type.ADMIN), ","));
Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
page.add("userpermission", perm);
page.add("validatorFixPrompt", projectManager.getProps()
.getBoolean(ValidatorConfigs.VALIDATOR_AUTO_FIX_PROMPT_FLAG_PARAM,
ValidatorConfigs.DEFAULT_VALIDATOR_AUTO_FIX_PROMPT_FLAG));
page.add("validatorFixLabel", projectManager.getProps()
.get(ValidatorConfigs.VALIDATOR_AUTO_FIX_PROMPT_LABEL_PARAM));
page.add("validatorFixLink", projectManager.getProps()
.get(ValidatorConfigs.VALIDATOR_AUTO_FIX_PROMPT_LINK_PARAM));
boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
if (adminPerm) {
page.add("admin", true);
}
// Set this so we can display execute buttons only to those who have
// access.
if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
page.add("exec", true);
} else {
page.add("exec", false);
}
List<Flow> flows = project.getFlows();
if (!flows.isEmpty()) {
Collections.sort(flows, FLOW_ID_COMPARATOR);
page.add("flows", flows);
}
}
} catch (AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleCreate(HttpServletRequest req, HttpServletResponse resp,
Session session) throws ServletException {
String projectName = hasParam(req, "name") ? getParam(req, "name") : null;
String projectDescription =
hasParam(req, "description") ? getParam(req, "description") : null;
logger.info("Create project " + projectName);
User user = session.getUser();
String status = null;
String action = null;
String message = null;
HashMap<String, Object> params = null;
if (lockdownCreateProjects && !hasPermissionToCreateProject(user)) {
message =
"User " + user.getUserId()
+ " doesn't have permission to create projects.";
logger.info(message);
status = "error";
} else {
try {
projectManager.createProject(projectName, projectDescription, user);
status = "success";
action = "redirect";
String redirect = "manager?project=" + projectName;
params = new HashMap<String, Object>();
params.put("path", redirect);
} catch (ProjectManagerException e) {
message = e.getMessage();
status = "error";
}
}
String response = createJsonResponse(status, message, action, params);
try {
Writer write = resp.getWriter();
write.append(response);
write.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
private void ajaxHandleUpload(HttpServletRequest req,
Map<String, String> ret, Map<String, Object> multipart, Session session)
throws ServletException, IOException {
User user = session.getUser();
String projectName = (String) multipart.get("project");
Project project = projectManager.getProject(projectName);
String autoFix = (String) multipart.get("fix");
Props props = new Props();
if (autoFix != null && autoFix.equals("on")) {
props.put(ValidatorConfigs.CUSTOM_AUTO_FIX_FLAG_PARAM, "true");
} else {
props.put(ValidatorConfigs.CUSTOM_AUTO_FIX_FLAG_PARAM, "false");
}
if (projectName == null || projectName.isEmpty()) {
ret.put("error", "No project name found.");
} else if (project == null) {
ret.put("error", "Installation Failed. Project '" + projectName
+ "' doesn't exist.");
} else if (!hasPermission(project, user, Type.WRITE)) {
ret.put("error", "Installation Failed. User '" + user.getUserId()
+ "' does not have write access.");
} else {
ret.put("projectId", String.valueOf(project.getId()));
FileItem item = (FileItem) multipart.get("file");
String name = item.getName();
String type = null;
final String contentType = item.getContentType();
if (contentType != null
&& (contentType.startsWith("application/zip")
|| contentType.startsWith("application/x-zip-compressed") || contentType
.startsWith("application/octet-stream"))) {
type = "zip";
} else {
item.delete();
ret.put("error", "File type " + contentType + " unrecognized.");
return;
}
File tempDir = Utils.createTempDir();
OutputStream out = null;
try {
logger.info("Uploading file " + name);
File archiveFile = new File(tempDir, name);
out = new BufferedOutputStream(new FileOutputStream(archiveFile));
IOUtils.copy(item.getInputStream(), out);
out.close();
Map<String, ValidationReport> reports = projectManager.uploadProject(
project, archiveFile, type, user, props);
StringBuffer message = new StringBuffer();
for (Entry<String, ValidationReport> reportEntry : reports.entrySet()) {
ValidationReport report = reportEntry.getValue();
if (!report.getPassMsgs().isEmpty()) {
for (String msg : report.getPassMsgs()) {
message.append(msg + "<br/>");
}
message.append("<br/>");
}
if (!report.getErrorMsgs().isEmpty()) {
message.append("Validator " + reportEntry.getKey() + " reports errors:<ul>");
for (String msg : report.getErrorMsgs()) {
message.append("<li>" + msg + "</li>");
}
message.append("</ul>");
}
if (!report.getWarningMsgs().isEmpty()) {
message.append("Validator " + reportEntry.getKey() + " reports warnings:<ul>");
for (String msg : report.getWarningMsgs()) {
message.append("<li>" + msg + "</li>");
}
message.append("</ul>");
}
}
if (message.length() > 0) {
ret.put("error", message.toString());
}
} catch (Exception e) {
logger.info("Installation Failed.", e);
String error = e.getMessage();
if (error.length() > 512) {
error = error.substring(0, 512) + "<br>Too many errors to display.<br>";
}
ret.put("error", "Installation Failed.<br>" + error);
} finally {
if (tempDir.exists()) {
FileUtils.deleteDirectory(tempDir);
}
if (out != null) {
out.close();
}
}
ret.put("version", String.valueOf(project.getVersion()));
}
}
private void handleUpload(HttpServletRequest req, HttpServletResponse resp,
Map<String, Object> multipart, Session session) throws ServletException,
IOException {
HashMap<String, String> ret = new HashMap<String, String>();
String projectName = (String) multipart.get("project");
ajaxHandleUpload(req, ret, multipart, session);
if (ret.containsKey("error")) {
setErrorMessageInCookie(resp, ret.get("error"));
}
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
}
private static class NodeLevelComparator implements Comparator<Node> {
@Override
public int compare(Node node1, Node node2) {
return node1.getLevel() - node2.getLevel();
}
}
public class PageSelection {
private String page;
private int size;
private boolean disabled;
private boolean selected;
private int nextPage;
public PageSelection(String pageName, int size, boolean disabled,
boolean selected, int nextPage) {
this.page = pageName;
this.size = size;
this.disabled = disabled;
this.setSelected(selected);
this.nextPage = nextPage;
}
public String getPage() {
return page;
}
public int getSize() {
return size;
}
public boolean getDisabled() {
return disabled;
}
public boolean isSelected() {
return selected;
}
public int getNextPage() {
return nextPage;
}
public void setSelected(boolean selected) {
this.selected = selected;
}
}
private Permission getPermissionObject(Project project, User user,
Permission.Type type) {
Permission perm = project.getCollectivePermission(user);
for (String roleName : user.getRoles()) {
Role role = userManager.getRole(roleName);
perm.addPermissions(role.getPermission());
}
return perm;
}
private boolean hasPermissionToCreateProject(User user) {
for (String roleName : user.getRoles()) {
Role role = userManager.getRole(roleName);
Permission perm = role.getPermission();
if (perm.isPermissionSet(Permission.Type.ADMIN)
|| perm.isPermissionSet(Permission.Type.CREATEPROJECTS)) {
return true;
}
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.monitoring;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.utils.ApproximateTime;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static org.apache.cassandra.utils.MonotonicClock.approxTime;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class MonitoringTaskTest
{
private static final long timeout = MILLISECONDS.toNanos(100);
private static final long slowTimeout = MILLISECONDS.toNanos(10);
private static final long MAX_SPIN_TIME_NANOS = TimeUnit.SECONDS.toNanos(5);
private static final int REPORT_INTERVAL_MS = 600000; // long enough so that it won't check unless told to do so
private static final int MAX_TIMEDOUT_OPERATIONS = -1; // unlimited
@BeforeClass
public static void setup()
{
MonitoringTask.instance = MonitoringTask.make(REPORT_INTERVAL_MS, MAX_TIMEDOUT_OPERATIONS);
}
@After
public void cleanUp()
{
// these clear the queues of the monitorint task
MonitoringTask.instance.getSlowOperations();
MonitoringTask.instance.getFailedOperations();
}
private static final class TestMonitor extends MonitorableImpl
{
private final String name;
TestMonitor(String name, long timestamp, boolean isCrossNode, long timeout, long slow)
{
this.name = name;
setMonitoringTime(timestamp, isCrossNode, timeout, slow);
}
public String name()
{
return name;
}
@Override
public String toString()
{
return name();
}
}
private static void waitForOperationsToComplete(Monitorable... operations) throws InterruptedException
{
waitForOperationsToComplete(Arrays.asList(operations));
}
private static void waitForOperationsToComplete(List<Monitorable> operations) throws InterruptedException
{
long timeout = operations.stream().map(Monitorable::timeoutNanos).reduce(0L, Long::max);
Thread.sleep(NANOSECONDS.toMillis(timeout * 2 + approxTime.error()));
long start = System.nanoTime();
while(System.nanoTime() - start <= MAX_SPIN_TIME_NANOS)
{
long numInProgress = operations.stream().filter(Monitorable::isInProgress).count();
if (numInProgress == 0)
return;
}
}
private static void waitForOperationsToBeReportedAsSlow(Monitorable... operations) throws InterruptedException
{
waitForOperationsToBeReportedAsSlow(Arrays.asList(operations));
}
private static void waitForOperationsToBeReportedAsSlow(List<Monitorable> operations) throws InterruptedException
{
long timeout = operations.stream().map(Monitorable::slowTimeoutNanos).reduce(0L, Long::max);
Thread.sleep(NANOSECONDS.toMillis(timeout * 2 + approxTime.error()));
long start = System.nanoTime();
while(System.nanoTime() - start <= MAX_SPIN_TIME_NANOS)
{
long numSlow = operations.stream().filter(Monitorable::isSlow).count();
if (numSlow == operations.size())
return;
}
}
@Test
public void testAbort() throws InterruptedException
{
Monitorable operation = new TestMonitor("Test abort", System.nanoTime(), false, timeout, slowTimeout);
waitForOperationsToComplete(operation);
assertTrue(operation.isAborted());
assertFalse(operation.isCompleted());
assertEquals(1, MonitoringTask.instance.getFailedOperations().size());
}
@Test
public void testAbortIdemPotent() throws InterruptedException
{
Monitorable operation = new TestMonitor("Test abort", System.nanoTime(), false, timeout, slowTimeout);
waitForOperationsToComplete(operation);
assertTrue(operation.abort());
assertTrue(operation.isAborted());
assertFalse(operation.isCompleted());
assertEquals(1, MonitoringTask.instance.getFailedOperations().size());
}
@Test
public void testAbortCrossNode() throws InterruptedException
{
Monitorable operation = new TestMonitor("Test for cross node", System.nanoTime(), true, timeout, slowTimeout);
waitForOperationsToComplete(operation);
assertTrue(operation.isAborted());
assertFalse(operation.isCompleted());
assertEquals(1, MonitoringTask.instance.getFailedOperations().size());
}
@Test
public void testComplete() throws InterruptedException
{
Monitorable operation = new TestMonitor("Test complete", System.nanoTime(), false, timeout, slowTimeout);
operation.complete();
waitForOperationsToComplete(operation);
assertFalse(operation.isAborted());
assertTrue(operation.isCompleted());
assertEquals(0, MonitoringTask.instance.getFailedOperations().size());
}
@Test
public void testCompleteIdemPotent() throws InterruptedException
{
Monitorable operation = new TestMonitor("Test complete", System.nanoTime(), false, timeout, slowTimeout);
operation.complete();
waitForOperationsToComplete(operation);
assertTrue(operation.complete());
assertFalse(operation.isAborted());
assertTrue(operation.isCompleted());
assertEquals(0, MonitoringTask.instance.getFailedOperations().size());
}
@Test
public void testReportSlow() throws InterruptedException
{
Monitorable operation = new TestMonitor("Test report slow", System.nanoTime(), false, timeout, slowTimeout);
waitForOperationsToBeReportedAsSlow(operation);
assertTrue(operation.isSlow());
operation.complete();
assertFalse(operation.isAborted());
assertTrue(operation.isCompleted());
assertEquals(1, MonitoringTask.instance.getSlowOperations().size());
}
@Test
public void testNoReportSlowIfZeroSlowTimeout() throws InterruptedException
{
// when the slow timeout is set to zero then operation won't be reported as slow
Monitorable operation = new TestMonitor("Test report slow disabled", System.nanoTime(), false, timeout, 0);
waitForOperationsToBeReportedAsSlow(operation);
assertTrue(operation.isSlow());
operation.complete();
assertFalse(operation.isAborted());
assertTrue(operation.isCompleted());
assertEquals(0, MonitoringTask.instance.getSlowOperations().size());
}
@Test
public void testReport() throws InterruptedException
{
Monitorable operation = new TestMonitor("Test report", System.nanoTime(), false, timeout, slowTimeout);
waitForOperationsToComplete(operation);
assertTrue(operation.isSlow());
assertTrue(operation.isAborted());
assertFalse(operation.isCompleted());
// aborted operations are not logged as slow
assertFalse(MonitoringTask.instance.logSlowOperations(approxTime.now()));
assertEquals(0, MonitoringTask.instance.getSlowOperations().size());
assertTrue(MonitoringTask.instance.logFailedOperations(approxTime.now()));
assertEquals(0, MonitoringTask.instance.getFailedOperations().size());
}
@Test
public void testRealScheduling() throws InterruptedException
{
MonitoringTask.instance = MonitoringTask.make(10, -1);
try
{
Monitorable operation1 = new TestMonitor("Test report 1", System.nanoTime(), false, timeout, slowTimeout);
waitForOperationsToComplete(operation1);
assertTrue(operation1.isAborted());
assertFalse(operation1.isCompleted());
Monitorable operation2 = new TestMonitor("Test report 2", System.nanoTime(), false, timeout, slowTimeout);
waitForOperationsToBeReportedAsSlow(operation2);
operation2.complete();
assertFalse(operation2.isAborted());
assertTrue(operation2.isCompleted());
Thread.sleep(2 * NANOSECONDS.toMillis(approxTime.error()) + 500);
assertEquals(0, MonitoringTask.instance.getFailedOperations().size());
assertEquals(0, MonitoringTask.instance.getSlowOperations().size());
}
finally
{
MonitoringTask.instance = MonitoringTask.make(REPORT_INTERVAL_MS, MAX_TIMEDOUT_OPERATIONS);
}
}
@Test
public void testMultipleThreads() throws InterruptedException
{
final int opCount = 50;
final ExecutorService executorService = Executors.newFixedThreadPool(20);
final List<Monitorable> operations = Collections.synchronizedList(new ArrayList<>(opCount));
for (int i = 0; i < opCount; i++)
{
executorService.submit(() ->
operations.add(new TestMonitor(UUID.randomUUID().toString(), System.nanoTime(), false, timeout, slowTimeout))
);
}
executorService.shutdown();
assertTrue(executorService.awaitTermination(1, TimeUnit.MINUTES));
assertEquals(opCount, operations.size());
waitForOperationsToComplete(operations);
assertEquals(opCount, MonitoringTask.instance.getFailedOperations().size());
assertEquals(0, MonitoringTask.instance.getSlowOperations().size());
}
@Test
public void testZeroMaxTimedoutOperations() throws InterruptedException
{
doTestMaxTimedoutOperations(0, 1, 0);
}
@Test
public void testMaxTimedoutOperationsExceeded() throws InterruptedException
{
doTestMaxTimedoutOperations(5, 10, 6);
}
private static void doTestMaxTimedoutOperations(int maxTimedoutOperations,
int numThreads,
int numExpectedOperations) throws InterruptedException
{
MonitoringTask.instance = MonitoringTask.make(REPORT_INTERVAL_MS, maxTimedoutOperations);
try
{
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
final CountDownLatch finished = new CountDownLatch(numThreads);
for (int i = 0; i < numThreads; i++)
{
final String operationName = "Operation " + Integer.toString(i+1);
final int numTimes = i + 1;
executorService.submit(() -> {
try
{
for (int j = 0; j < numTimes; j++)
{
Monitorable operation1 = new TestMonitor(operationName,
System.nanoTime(),
false,
timeout,
slowTimeout);
waitForOperationsToComplete(operation1);
Monitorable operation2 = new TestMonitor(operationName,
System.nanoTime(),
false,
timeout,
slowTimeout);
waitForOperationsToBeReportedAsSlow(operation2);
operation2.complete();
}
}
catch (InterruptedException e)
{
e.printStackTrace();
fail("Unexpected exception");
}
finally
{
finished.countDown();
}
});
}
finished.await();
assertEquals(0, executorService.shutdownNow().size());
List<String> failedOperations = MonitoringTask.instance.getFailedOperations();
assertEquals(numExpectedOperations, failedOperations.size());
if (numExpectedOperations > 0)
assertTrue(failedOperations.get(numExpectedOperations - 1).startsWith("..."));
}
finally
{
MonitoringTask.instance = MonitoringTask.make(REPORT_INTERVAL_MS, MAX_TIMEDOUT_OPERATIONS);
}
}
@Test
public void testMultipleThreadsSameNameFailed() throws InterruptedException
{
final int threadCount = 50;
final List<Monitorable> operations = new ArrayList<>(threadCount);
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
final CountDownLatch finished = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; i++)
{
executorService.submit(() -> {
try
{
Monitorable operation = new TestMonitor("Test testMultipleThreadsSameName failed",
System.nanoTime(),
false,
timeout,
slowTimeout);
operations.add(operation);
}
finally
{
finished.countDown();
}
});
}
finished.await();
assertEquals(0, executorService.shutdownNow().size());
waitForOperationsToComplete(operations);
assertEquals(1, MonitoringTask.instance.getFailedOperations().size());
}
@Test
public void testMultipleThreadsSameNameSlow() throws InterruptedException
{
final int threadCount = 50;
final List<Monitorable> operations = new ArrayList<>(threadCount);
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
final CountDownLatch finished = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; i++)
{
executorService.submit(() -> {
try
{
Monitorable operation = new TestMonitor("Test testMultipleThreadsSameName slow",
System.nanoTime(),
false,
timeout,
slowTimeout);
operations.add(operation);
}
finally
{
finished.countDown();
}
});
}
finished.await();
assertEquals(0, executorService.shutdownNow().size());
waitForOperationsToBeReportedAsSlow(operations);
operations.forEach(o -> o.complete());
assertEquals(1, MonitoringTask.instance.getSlowOperations().size());
}
@Test
public void testMultipleThreadsNoFailedOps() throws InterruptedException
{
final int threadCount = 50;
final List<Monitorable> operations = new ArrayList<>(threadCount);
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
final CountDownLatch finished = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; i++)
{
executorService.submit(() -> {
try
{
Monitorable operation = new TestMonitor("Test thread " + Thread.currentThread().getName(),
System.nanoTime(),
false,
timeout,
slowTimeout);
operations.add(operation);
operation.complete();
}
finally
{
finished.countDown();
}
});
}
finished.await();
assertEquals(0, executorService.shutdownNow().size());
waitForOperationsToComplete(operations);
assertEquals(0, MonitoringTask.instance.getFailedOperations().size());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core;
import java.security.Principal;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import javax.jcr.AccessDeniedException;
import javax.jcr.Credentials;
import javax.jcr.NoSuchWorkspaceException;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.SimpleCredentials;
import javax.jcr.security.AccessControlException;
import javax.security.auth.Subject;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.Group;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.core.config.AccessManagerConfig;
import org.apache.jackrabbit.core.config.LoginModuleConfig;
import org.apache.jackrabbit.core.config.SecurityConfig;
import org.apache.jackrabbit.core.config.SecurityManagerConfig;
import org.apache.jackrabbit.core.config.WorkspaceConfig;
import org.apache.jackrabbit.core.config.WorkspaceSecurityConfig;
import org.apache.jackrabbit.core.config.UserManagerConfig;
import org.apache.jackrabbit.core.security.AMContext;
import org.apache.jackrabbit.core.security.AccessManager;
import org.apache.jackrabbit.core.security.DefaultAccessManager;
import org.apache.jackrabbit.core.security.JackrabbitSecurityManager;
import org.apache.jackrabbit.core.security.SecurityConstants;
import org.apache.jackrabbit.core.security.SystemPrincipal;
import org.apache.jackrabbit.core.security.authentication.AuthContext;
import org.apache.jackrabbit.core.security.authentication.AuthContextProvider;
import org.apache.jackrabbit.core.security.authorization.AccessControlProvider;
import org.apache.jackrabbit.core.security.authorization.AccessControlProviderFactory;
import org.apache.jackrabbit.core.security.authorization.AccessControlProviderFactoryImpl;
import org.apache.jackrabbit.core.security.authorization.WorkspaceAccessManager;
import org.apache.jackrabbit.core.security.principal.AbstractPrincipalProvider;
import org.apache.jackrabbit.core.security.principal.AdminPrincipal;
import org.apache.jackrabbit.core.security.principal.DefaultPrincipalProvider;
import org.apache.jackrabbit.core.security.principal.GroupPrincipals;
import org.apache.jackrabbit.core.security.principal.PrincipalManagerImpl;
import org.apache.jackrabbit.core.security.principal.PrincipalProvider;
import org.apache.jackrabbit.core.security.principal.PrincipalProviderRegistry;
import org.apache.jackrabbit.core.security.principal.ProviderRegistryImpl;
import org.apache.jackrabbit.core.security.user.MembershipCache;
import org.apache.jackrabbit.core.security.user.UserManagerImpl;
import org.apache.jackrabbit.core.security.user.action.AuthorizableAction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The security manager acts as central managing class for all security related
* operations on a low-level non-protected level. It manages the
* <ul>
* <li> {@link PrincipalProvider}s
* <li> {@link AccessControlProvider}s
* <li> {@link WorkspaceAccessManager}
* <li> {@link UserManager}
* </ul>
*/
public class DefaultSecurityManager implements JackrabbitSecurityManager {
/**
* the default logger
*/
private static final Logger log = LoggerFactory.getLogger(DefaultSecurityManager.class);
/**
* Flag indicating if the security manager was properly initialized.
*/
private boolean initialized;
/**
* the repository implementation
*/
private RepositoryImpl repository;
/**
* System session.
*/
private SystemSession systemSession;
/**
* System user manager. Implementation needed here for the DefaultPrincipalProvider.
*/
private UserManager systemUserManager;
/**
* The user id of the administrator. The value is retrieved from
* configuration. If the config entry is missing a default id is used (see
* {@link SecurityConstants#ADMIN_ID}).
*/
protected String adminId;
/**
* The user id of the anonymous user. The value is retrieved from
* configuration. If the config entry is missing a default id is used (see
* {@link SecurityConstants#ANONYMOUS_ID}).
*/
protected String anonymousId;
/**
* Contains the access control providers per workspace.
* key = name of the workspace,
* value = {@link AccessControlProvider}
*/
private final Map<String, AccessControlProvider> acProviders = new HashMap<String, AccessControlProvider>();
/**
* the AccessControlProviderFactory
*/
private AccessControlProviderFactory acProviderFactory;
/**
* the configured WorkspaceAccessManager
*/
private WorkspaceAccessManager workspaceAccessManager;
/**
* the principal provider registry
*/
private PrincipalProviderRegistry principalProviderRegistry;
/**
* factory for login-context {@see Repository#login())
*/
private AuthContextProvider authContextProvider;
//------------------------------------------< JackrabbitSecurityManager >---
/**
* @see JackrabbitSecurityManager#init(Repository, Session)
*/
public synchronized void init(Repository repository, Session systemSession) throws RepositoryException {
if (initialized) {
throw new IllegalStateException("already initialized");
}
if (!(repository instanceof RepositoryImpl)) {
throw new RepositoryException("RepositoryImpl expected");
}
if (!(systemSession instanceof SystemSession)) {
throw new RepositoryException("SystemSession expected");
}
this.systemSession = (SystemSession) systemSession;
this.repository = (RepositoryImpl) repository;
SecurityConfig config = this.repository.getConfig().getSecurityConfig();
LoginModuleConfig loginModConf = config.getLoginModuleConfig();
// build AuthContextProvider based on appName + optional LoginModuleConfig
authContextProvider = new AuthContextProvider(config.getAppName(), loginModConf);
if (authContextProvider.isLocal()) {
log.info("init: use Repository Login-Configuration for " + config.getAppName());
} else if (authContextProvider.isJAAS()) {
log.info("init: use JAAS login-configuration for " + config.getAppName());
} else {
String msg = "Neither JAAS nor RepositoryConfig contained a valid configuration for " + config.getAppName();
log.error(msg);
throw new RepositoryException(msg);
}
Properties[] moduleConfig = authContextProvider.getModuleConfig();
// retrieve default-ids (admin and anonymous) from login-module-configuration.
for (Properties props : moduleConfig) {
if (props.containsKey(LoginModuleConfig.PARAM_ADMIN_ID)) {
adminId = props.getProperty(LoginModuleConfig.PARAM_ADMIN_ID);
}
if (props.containsKey(LoginModuleConfig.PARAM_ANONYMOUS_ID)) {
anonymousId = props.getProperty(LoginModuleConfig.PARAM_ANONYMOUS_ID);
}
}
// fallback:
if (adminId == null) {
log.debug("No adminID defined in LoginModule/JAAS config -> using default.");
adminId = SecurityConstants.ADMIN_ID;
}
if (anonymousId == null) {
log.debug("No anonymousID defined in LoginModule/JAAS config -> using default.");
anonymousId = SecurityConstants.ANONYMOUS_ID;
}
// create the system userManager and make sure the system-users exist.
systemUserManager = createUserManager(this.systemSession);
createSystemUsers(systemUserManager, this.systemSession, adminId, anonymousId);
// init default ac-provider-factory
acProviderFactory = new AccessControlProviderFactoryImpl();
acProviderFactory.init(this.systemSession);
// create the workspace access manager
SecurityManagerConfig smc = config.getSecurityManagerConfig();
if (smc != null && smc.getWorkspaceAccessConfig() != null) {
workspaceAccessManager =
smc.getWorkspaceAccessConfig().newInstance(WorkspaceAccessManager.class);
} else {
// fallback -> the default implementation
log.debug("No WorkspaceAccessManager configured; using default.");
workspaceAccessManager = createDefaultWorkspaceAccessManager();
}
workspaceAccessManager.init(this.systemSession);
// initialize principal-provider registry
// 1) create default
PrincipalProvider defaultPP = createDefaultPrincipalProvider(moduleConfig);
// 2) create registry instance
principalProviderRegistry = new ProviderRegistryImpl(defaultPP);
// 3) register all configured principal providers.
for (Properties props : moduleConfig) {
principalProviderRegistry.registerProvider(props);
}
initialized = true;
}
/**
* @see JackrabbitSecurityManager#dispose(String)
*/
public void dispose(String workspaceName) {
checkInitialized();
synchronized (acProviders) {
AccessControlProvider prov = acProviders.remove(workspaceName);
if (prov != null) {
prov.close();
}
}
}
/**
* @see JackrabbitSecurityManager#close()
*/
public void close() {
checkInitialized();
synchronized (acProviders) {
for (AccessControlProvider accessControlProvider : acProviders.values()) {
accessControlProvider.close();
}
acProviders.clear();
}
}
/**
* @see JackrabbitSecurityManager#getAccessManager(Session,AMContext)
*/
public AccessManager getAccessManager(Session session, AMContext amContext) throws RepositoryException {
checkInitialized();
AccessManagerConfig amConfig = repository.getConfig().getSecurityConfig().getAccessManagerConfig();
try {
String wspName = session.getWorkspace().getName();
AccessControlProvider pp = getAccessControlProvider(wspName);
AccessManager accessMgr;
if (amConfig == null) {
log.debug("No configuration entry for AccessManager. Using org.apache.jackrabbit.core.security.DefaultAccessManager");
accessMgr = new DefaultAccessManager();
} else {
accessMgr = amConfig.newInstance(AccessManager.class);
}
accessMgr.init(amContext, pp, workspaceAccessManager);
return accessMgr;
} catch (AccessDeniedException e) {
// re-throw
throw e;
} catch (Exception e) {
// wrap in RepositoryException
String clsName = (amConfig == null) ? "-- missing access manager configuration --" : amConfig.getClassName();
String msg = "Failed to instantiate AccessManager (" + clsName + ")";
log.error(msg, e);
throw new RepositoryException(msg, e);
}
}
/**
* @see JackrabbitSecurityManager#getPrincipalManager(Session)
*/
public PrincipalManager getPrincipalManager(Session session) throws RepositoryException {
checkInitialized();
if (session instanceof SessionImpl) {
SessionImpl sImpl = (SessionImpl) session;
return createPrincipalManager(sImpl);
} else {
throw new RepositoryException("Internal error: SessionImpl expected.");
}
}
/**
* @see JackrabbitSecurityManager#getUserManager(Session)
*/
public UserManager getUserManager(Session session) throws RepositoryException {
checkInitialized();
if (session == systemSession) {
return systemUserManager;
} else if (session instanceof SessionImpl) {
String workspaceName = systemSession.getWorkspace().getName();
try {
SessionImpl sImpl = (SessionImpl) session;
UserManagerImpl uMgr;
if (workspaceName.equals(sImpl.getWorkspace().getName())) {
uMgr = createUserManager(sImpl);
} else {
SessionImpl s = (SessionImpl) sImpl.createSession(workspaceName);
uMgr = createUserManager(s);
sImpl.addListener(uMgr);
}
return uMgr;
} catch (NoSuchWorkspaceException e) {
throw new AccessControlException("Cannot build UserManager for " + session.getUserID(), e);
}
} else {
throw new RepositoryException("Internal error: SessionImpl expected.");
}
}
/**
* @see JackrabbitSecurityManager#getUserID(javax.security.auth.Subject, String)
*/
public String getUserID(Subject subject, String workspaceName) throws RepositoryException {
checkInitialized();
// shortcut if the subject contains the AdminPrincipal or
// SystemPrincipal in which cases the userID is already known.
if (!subject.getPrincipals(AdminPrincipal.class).isEmpty()) {
return adminId;
} else if (!subject.getPrincipals(SystemPrincipal.class).isEmpty()) {
// system session does not have a userId
return null;
}
/* if there is a configure principal class that should be used to
determine the UserID -> try this one. */
Class cl = getConfig().getUserIdClass();
if (cl != null) {
Set<Principal> s = subject.getPrincipals(cl);
if (!s.isEmpty()) {
for (Principal p : s) {
if (!GroupPrincipals.isGroup(p)) {
return p.getName();
}
}
// all principals found with the given p-Class were Group principals
log.debug("Only Group principals found with class '" + cl.getName() + "' -> Not used for UserID.");
} else {
log.debug("No principal found with class '" + cl.getName() + "'.");
}
}
/*
Fallback scenario to retrieve userID from the subject:
Since the subject may contain multiple principals and the principal
name may not be equals to the UserID, the id is retrieved by
searching for the corresponding authorizable and if this doesn't
succeed an attempt is made to obtained it from the login-credentials.
*/
String uid = null;
// first try to retrieve an authorizable corresponding to
// a non-group principal. the first one present is used
// to determine the userID.
try {
UserManager umgr = getSystemUserManager(workspaceName);
for (Principal p : subject.getPrincipals()) {
if (!(p instanceof Group)) {
Authorizable authorz = umgr.getAuthorizable(p);
if (authorz != null && !authorz.isGroup()) {
uid = authorz.getID();
break;
}
}
}
} catch (RepositoryException e) {
// failed to access userid via user manager -> use fallback 2.
log.error("Unexpected error while retrieving UserID.", e);
}
// 2. if no matching user is found try simple access to userID over
// SimpleCredentials.
if (uid == null) {
Iterator<SimpleCredentials> creds = subject.getPublicCredentials(
SimpleCredentials.class).iterator();
if (creds.hasNext()) {
SimpleCredentials sc = creds.next();
uid = sc.getUserID();
}
}
return uid;
}
/**
* Creates an AuthContext for the given {@link Credentials} and
* {@link Subject}. The workspace name is ignored and users are
* stored and retrieved from a specific (separate) workspace.<br>
* This includes selection of application specific LoginModules and
* initialization with credentials and Session to System-Workspace
*
* @return an {@link AuthContext} for the given Credentials, Subject
* @throws RepositoryException in other exceptional repository states
*/
public AuthContext getAuthContext(Credentials creds, Subject subject, String workspaceName)
throws RepositoryException {
checkInitialized();
return getAuthContextProvider().getAuthContext(creds, subject, systemSession,
getPrincipalProviderRegistry(), adminId, anonymousId);
}
//----------------------------------------------------------< protected >---
/**
* @return The <code>SecurityManagerConfig</code> configured for the
* repository this manager has been created for.
*/
protected SecurityManagerConfig getConfig() {
return repository.getConfig().getSecurityConfig().getSecurityManagerConfig();
}
/**
* @param workspaceName The name of the target workspace.
* @return The system user manager. Since this implementation stores users
* in a dedicated workspace the system user manager is the same for all
* sessions irrespective of the workspace.
* @throws javax.jcr.RepositoryException If an error occurs.
*/
protected UserManager getSystemUserManager(String workspaceName) throws RepositoryException {
return systemUserManager;
}
/**
* @param session The session for which to retrieve the membership cache.
* @return The membership cache.
* @throws RepositoryException If an error occurs.
*/
protected MembershipCache getMembershipCache(SessionImpl session) throws RepositoryException {
if (session == systemSession || session instanceof SystemSession) {
// force creation of the membership cache within the corresponding uMgr
return null;
} else {
return ((UserManagerImpl) getSystemUserManager(session.getWorkspace().getName())).getMembershipCache();
}
}
/**
* Creates a {@link UserManagerImpl} for the given session. May be overridden
* to return a custom implementation.
*
* @param session session
* @return user manager
* @throws RepositoryException if an error occurs
*/
protected UserManagerImpl createUserManager(SessionImpl session) throws RepositoryException {
UserManagerConfig umc = getConfig().getUserManagerConfig();
UserManagerImpl um;
if (umc != null) {
Class<?>[] paramTypes = new Class[] {
SessionImpl.class,
String.class,
Properties.class,
MembershipCache.class};
um = (UserManagerImpl) umc.getUserManager(UserManagerImpl.class,
paramTypes, session, adminId, umc.getParameters(), getMembershipCache(session));
} else {
um = new UserManagerImpl(session, adminId, null, getMembershipCache(session));
}
if (umc != null && !(session instanceof SystemSession)) {
AuthorizableAction[] actions = umc.getAuthorizableActions();
um.setAuthorizableActions(actions);
}
return um;
}
/**
* @param session The session used to create the principal manager.
* @return A new instance of PrincipalManagerImpl
* @throws javax.jcr.RepositoryException If an error occurs.
*/
protected PrincipalManager createPrincipalManager(SessionImpl session) throws RepositoryException {
return new PrincipalManagerImpl(session, getPrincipalProviderRegistry().getProviders());
}
/**
* @return A nwe instance of WorkspaceAccessManagerImpl to be used as
* default workspace access manager if the configuration doesn't specify one.
*/
protected WorkspaceAccessManager createDefaultWorkspaceAccessManager() {
return new WorkspaceAccessManagerImpl();
}
/**
* Creates the default principal provider used to create the
* {@link PrincipalProviderRegistry}.
*
* @return An new instance of <code>DefaultPrincipalProvider</code>.
* @throws RepositoryException If an error occurs.
*/
protected PrincipalProvider createDefaultPrincipalProvider(Properties[] moduleConfig) throws RepositoryException {
boolean initialized = false;
PrincipalProvider defaultPP = new DefaultPrincipalProvider(this.systemSession, (UserManagerImpl) systemUserManager);
for (Properties props : moduleConfig) {
//GRANITE-4470: apply config to DefaultPrincipalProvider if there is no explicit PrincipalProvider configured
if (!props.containsKey(LoginModuleConfig.PARAM_PRINCIPAL_PROVIDER_CLASS) && props.containsKey(AbstractPrincipalProvider.MAXSIZE_KEY)) {
defaultPP.init(props);
initialized = true;
break;
}
}
if (!initialized) {
defaultPP.init(new Properties());
}
return defaultPP;
}
/**
* @return The PrincipalProviderRegistry created during initialization.
*/
protected PrincipalProviderRegistry getPrincipalProviderRegistry() {
return principalProviderRegistry;
}
/**
* @return The AuthContextProvider created during initialization.
*/
protected AuthContextProvider getAuthContextProvider() {
return authContextProvider;
}
/**
* Throws <code>IllegalStateException</code> if this manager hasn't been
* initialized.
*/
protected void checkInitialized() {
if (!initialized) {
throw new IllegalStateException("Not initialized");
}
}
/**
* @return The system session used to initialize this SecurityManager.
*/
protected Session getSystemSession() {
return systemSession;
}
/**
* @return The repository used to initialize this SecurityManager.
*/
protected Repository getRepository() {
return repository;
}
//--------------------------------------------------------------------------
/**
* Returns the access control provider for the specified
* <code>workspaceName</code>.
*
* @param workspaceName Name of the workspace.
* @return access control provider
* @throws NoSuchWorkspaceException If no workspace with 'workspaceName' exists.
* @throws RepositoryException
*/
private AccessControlProvider getAccessControlProvider(String workspaceName)
throws NoSuchWorkspaceException, RepositoryException {
checkInitialized();
AccessControlProvider provider = acProviders.get(workspaceName);
if (provider == null || !provider.isLive()) {
// mark this workspace as 'active' so the workspace does not
// get disposed by the workspace-janitor
// TODO: There should be a cleaner way to do this.
repository.markWorkspaceActive(workspaceName);
WorkspaceSecurityConfig secConf = null;
WorkspaceConfig conf =
repository.getConfig().getWorkspaceConfig(workspaceName);
if (conf != null) {
secConf = conf.getSecurityConfig();
}
provider = acProviderFactory.createProvider(
repository.getSystemSession(workspaceName), secConf);
synchronized (acProviders) {
acProviders.put(workspaceName, provider);
}
}
return provider;
}
/**
* Make sure the system users (admin and anonymous) exist.
*
* @param userManager Manager to create users/groups.
* @param session The editing session.
* @param adminId UserID of the administrator.
* @param anonymousId UserID of the anonymous user.
* @throws RepositoryException If an error occurs.
*/
static void createSystemUsers(UserManager userManager,
SystemSession session,
String adminId,
String anonymousId) throws RepositoryException {
Authorizable admin;
if (adminId != null) {
admin = userManager.getAuthorizable(adminId);
if (admin == null) {
userManager.createUser(adminId, adminId);
if (!userManager.isAutoSave()) {
session.save();
}
log.info("... created admin-user with id \'" + adminId + "\' ...");
}
}
if (anonymousId != null) {
Authorizable anonymous = userManager.getAuthorizable(anonymousId);
if (anonymous == null) {
try {
userManager.createUser(anonymousId, "");
if (!userManager.isAutoSave()) {
session.save();
}
log.info("... created anonymous user with id \'" + anonymousId + "\' ...");
} catch (RepositoryException e) {
// exception while creating the anonymous user.
// log an error but don't abort the repository start-up
log.error("Failed to create anonymous user.", e);
}
}
}
}
//------------------------------------------------------< inner classes >---
/**
* <code>WorkspaceAccessManager</code> that upon {@link #grants(Set principals, String)}
* evaluates if access to the root node of a workspace with the specified
* name is granted.
*/
private final class WorkspaceAccessManagerImpl implements SecurityConstants, WorkspaceAccessManager {
//-----------------------------------------< WorkspaceAccessManager >---
/**
* {@inheritDoc}
*/
public void init(Session systemSession) throws RepositoryException {
// nothing to do here.
}
/**
* {@inheritDoc}
*/
public void close() throws RepositoryException {
// nothing to do here.
}
/**
* {@inheritDoc}
*/
public boolean grants(Set<Principal> principals, String workspaceName) throws RepositoryException {
AccessControlProvider prov = getAccessControlProvider(workspaceName);
return prov.canAccessRoot(principals);
}
}
}
| |
/*
* Copyright 2020-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.kubevirtnode.codec;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ImmutableList;
import org.hamcrest.MatcherAssert;
import org.junit.Before;
import org.junit.Test;
import org.onlab.packet.IpAddress;
import org.onosproject.codec.CodecContext;
import org.onosproject.codec.JsonCodec;
import org.onosproject.codec.impl.CodecManager;
import org.onosproject.core.CoreService;
import org.onosproject.kubevirtnode.api.DefaultKubevirtNode;
import org.onosproject.kubevirtnode.api.DefaultKubevirtPhyInterface;
import org.onosproject.kubevirtnode.api.KubevirtNode;
import org.onosproject.kubevirtnode.api.KubevirtNodeState;
import org.onosproject.kubevirtnode.api.KubevirtPhyInterface;
import org.onosproject.net.DeviceId;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.onosproject.kubevirtnode.codec.KubevirtNodeJsonMatcher.matchesKubevirtNode;
import static org.onosproject.net.NetTestTools.APP_ID;
/**
* Unit tests for KubevirtNode codec.
*/
public class KubevirtNodeCodecTest {
MockCodecContext context;
JsonCodec<KubevirtNode> kubevirtNodeCodec;
JsonCodec<KubevirtPhyInterface> kubevirtPhyInterfaceJsonCodec;
final CoreService mockCoreService = createMock(CoreService.class);
private static final String REST_APP_ID = "org.onosproject.rest";
@Before
public void setUp() {
context = new MockCodecContext();
kubevirtNodeCodec = new KubevirtNodeCodec();
kubevirtPhyInterfaceJsonCodec = new KubevirtPhyInterfaceCodec();
assertThat(kubevirtNodeCodec, notNullValue());
assertThat(kubevirtPhyInterfaceJsonCodec, notNullValue());
expect(mockCoreService.registerApplication(REST_APP_ID))
.andReturn(APP_ID).anyTimes();
replay(mockCoreService);
context.registerService(CoreService.class, mockCoreService);
}
/**
* Tests the kubevirt compute node encoding.
*/
@Test
public void testKubevirtComputeNodeEncode() {
KubevirtPhyInterface phyIntf1 = DefaultKubevirtPhyInterface.builder()
.network("mgmtnetwork")
.intf("eth3")
.build();
KubevirtPhyInterface phyIntf2 = DefaultKubevirtPhyInterface.builder()
.network("oamnetwork")
.intf("eth4")
.build();
KubevirtNode node = DefaultKubevirtNode.builder()
.hostname("worker")
.type(KubevirtNode.Type.WORKER)
.state(KubevirtNodeState.INIT)
.managementIp(IpAddress.valueOf("10.10.10.1"))
.intgBridge(DeviceId.deviceId("br-int"))
.tunBridge(DeviceId.deviceId("br-tun"))
.dataIp(IpAddress.valueOf("20.20.20.2"))
.phyIntfs(ImmutableList.of(phyIntf1, phyIntf2))
.build();
ObjectNode nodeJson = kubevirtNodeCodec.encode(node, context);
assertThat(nodeJson, matchesKubevirtNode(node));
}
/**
* Tests the kubevirt compute node decoding.
*
* @throws IOException io exception
*/
@Test
public void testKubevirtComputeNodeDecode() throws IOException {
KubevirtNode node = getKubevirtNode("KubevirtWorkerNode.json");
assertThat(node.hostname(), is("worker-01"));
assertThat(node.type().name(), is("WORKER"));
assertThat(node.managementIp().toString(), is("172.16.130.4"));
assertThat(node.dataIp().toString(), is("172.16.130.4"));
assertThat(node.intgBridge().toString(), is("of:00000000000000a1"));
assertThat(node.tunBridge().toString(), is("of:00000000000000a2"));
assertThat(node.phyIntfs().size(), is(2));
node.phyIntfs().forEach(intf -> {
if (intf.network().equals("mgmtnetwork")) {
assertThat(intf.intf(), is("eth3"));
}
if (intf.network().equals("oamnetwork")) {
assertThat(intf.intf(), is("eth4"));
}
});
}
private KubevirtNode getKubevirtNode(String resourceName) throws IOException {
InputStream jsonStream = KubevirtNodeCodecTest.class.getResourceAsStream(resourceName);
JsonNode json = context.mapper().readTree(jsonStream);
MatcherAssert.assertThat(json, notNullValue());
KubevirtNode node = kubevirtNodeCodec.decode((ObjectNode) json, context);
assertThat(node, notNullValue());
return node;
}
/**
* Tests the kubevirt gateway node encoding.
*/
@Test
public void testKubevirtGatweayNodeEncode() {
KubevirtNode node = DefaultKubevirtNode.builder()
.hostname("gateway")
.type(KubevirtNode.Type.GATEWAY)
.state(KubevirtNodeState.INIT)
.managementIp(IpAddress.valueOf("10.10.10.1"))
.intgBridge(DeviceId.deviceId("br-int"))
.tunBridge(DeviceId.deviceId("br-tun"))
.dataIp(IpAddress.valueOf("20.20.20.2"))
.gatewayBridgeName("gateway")
.build();
ObjectNode nodeJson = kubevirtNodeCodec.encode(node, context);
assertThat(nodeJson, matchesKubevirtNode(node));
}
/**
* Tests the kubevirt gateway node decoding.
*
* @throws IOException io exception
*/
@Test
public void testKubevirtGatewayNodeDecode() throws IOException {
KubevirtNode node = getKubevirtNode("KubevirtGatewayNode.json");
assertThat(node.hostname(), is("gateway-01"));
assertThat(node.type().name(), is("GATEWAY"));
assertThat(node.managementIp().toString(), is("172.16.130.4"));
assertThat(node.dataIp().toString(), is("172.16.130.4"));
assertThat(node.intgBridge().toString(), is("of:00000000000000a1"));
assertThat(node.tunBridge().toString(), is("of:00000000000000a2"));
assertThat(node.gatewayBridgeName(), is("gateway"));
}
/**
* Mock codec context for use in codec unit tests.
*/
private class MockCodecContext implements CodecContext {
private final ObjectMapper mapper = new ObjectMapper();
private final CodecManager manager = new CodecManager();
private final Map<Class<?>, Object> services = new HashMap<>();
/**
* Constructs a new mock codec context.
*/
public MockCodecContext() {
manager.activate();
}
@Override
public ObjectMapper mapper() {
return mapper;
}
@SuppressWarnings("unchecked")
@Override
public <T> JsonCodec<T> codec(Class<T> entityClass) {
if (entityClass == KubevirtPhyInterface.class) {
return (JsonCodec<T>) kubevirtPhyInterfaceJsonCodec;
}
return manager.getCodec(entityClass);
}
@SuppressWarnings("unchecked")
@Override
public <T> T getService(Class<T> serviceClass) {
return (T) services.get(serviceClass);
}
// for registering mock services
public <T> void registerService(Class<T> serviceClass, T impl) {
services.put(serviceClass, impl);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sentry.tests.e2e.hive;
import org.apache.sentry.provider.file.PolicyFile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileOutputStream;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.io.Resources;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/* Tests privileges at table scope with cross database access */
public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
private static final Logger LOGGER = LoggerFactory
.getLogger(TestCrossDbOps.class);
private File dataFile;
private PolicyFile policyFile;
private String loadData;
@BeforeClass
public static void setupTestStaticConfiguration() throws Exception{
LOGGER.info("TestCrossDbOps setupTestStaticConfiguration");
policyOnHdfs = true;
AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
}
@Before
public void setup() throws Exception {
LOGGER.info("TestCrossDbOps setup");
policyFile = super.setupPolicy();
super.setup();
File dataDir = context.getDataDir();
// copy data file to test dir
dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
FileOutputStream to = new FileOutputStream(dataFile);
Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
to.close();
loadData = "server=server1->uri=file://" + dataFile.getPath();
}
/*
* Admin creates DB_1, DB2, tables (tab_1 ) and (tab_2, tab_3) in DB_1 and
* DB_2 respectively. User user1 has select on DB_1.tab_1, insert on
* DB2.tab_2 User user2 has select on DB2.tab_3 Test show database and show
* tables for both user1 and user2
*/
@Test
public void testShowDatabasesAndShowTables() throws Exception {
// admin create two databases
Connection connection = context.createConnection(ADMIN1);
Statement statement = context.createStatement(connection);
statement.execute("CREATE DATABASE " + DB1);
statement.execute("CREATE DATABASE " + DB2);
statement.execute("USE " + DB1);
statement.execute("CREATE TABLE TAB1(id int)");
statement.executeQuery("SHOW TABLES");
statement.execute("USE " + DB2);
statement.execute("CREATE TABLE TAB2(id int)");
statement.execute("CREATE TABLE TAB3(id int)");
// load policy file and grant role with privileges
policyFile
.addRolesToGroup(USERGROUP1, "select_tab1", "insert_tab2")
.addRolesToGroup(USERGROUP2, "select_tab3")
.addPermissionsToRole("select_tab1", "server=server1->db=" + DB1 + "->table=tab1->action=select")
.addPermissionsToRole("select_tab3", "server=server1->db=" + DB2 + "->table=tab3->action=select")
.addPermissionsToRole("insert_tab2", "server=server1->db=" + DB2 + "->table=tab2->action=insert")
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
// show grant to validate roles and privileges
if(useSentryService) {
PrivilegeResultSet pRset = new PrivilegeResultSet(statement, "SHOW GRANT ROLE select_tab1 ON DATABASE " + DB1);
LOGGER.info("SHOW GRANT ROLE select_tab1 ON DATABASE " + DB1 + " : " + pRset.toString());
pRset.verifyResultSetColumn("database", DB1);
pRset.verifyResultSetColumn("table", "tab1");
pRset = new PrivilegeResultSet(statement, "SHOW GRANT ROLE insert_tab2 ON DATABASE " + DB2);
LOGGER.info("SHOW GRANT ROLE insert_tab2 ON DATABASE " + DB2 + " : " + pRset.toString());
pRset.verifyResultSetColumn("database", DB2);
pRset.verifyResultSetColumn("table", "tab2");
pRset = new PrivilegeResultSet(statement, "SHOW GRANT ROLE select_tab3 ON DATABASE " + DB2);
LOGGER.info("SHOW GRANT ROLE select_tab3 ON DATABASE " + DB2 + " : " + pRset.toString());
pRset.verifyResultSetColumn("database", DB2);
pRset.verifyResultSetColumn("table", "tab3");
}
// test show databases
// show databases shouldn't filter any of the dbs from the resultset
Connection conn = context.createConnection(USER1_1);
Statement stmt = context.createStatement(conn);
PrivilegeResultSet pRset = new PrivilegeResultSet(stmt, "SHOW DATABASES");
LOGGER.info("found databases :" + pRset.toString());
pRset.verifyResultSetColumn("database_name", DB1);
pRset.verifyResultSetColumn("database_name", DB2);
// test show tables
stmt.execute("USE " + DB1);
pRset = new PrivilegeResultSet(stmt, "SHOW TABLES");
LOGGER.info("found tables :" + pRset.toString());
pRset.verifyResultSetColumn("tab_name", "tab1");
stmt.execute("USE " + DB2);
pRset = new PrivilegeResultSet(stmt, "SHOW TABLES");
LOGGER.info("found tables :" + pRset.toString());
pRset.verifyResultSetColumn("tab_name", "tab2");
try {
stmt.close();
conn.close();
} catch (Exception ex) {
// nothing to do
}
// test show databases and show tables for user2_1
conn = context.createConnection(USER2_1);
stmt = context.createStatement(conn);
pRset = new PrivilegeResultSet(stmt, "SHOW DATABASES");
pRset.verifyResultSetColumn("database_name", DB2);
// test show tables
stmt.execute("USE " + DB2);
pRset = new PrivilegeResultSet(stmt, "SHOW TABLES");
pRset.verifyResultSetColumn("tab_name", "tab3");
try {
stmt.execute("USE " + DB1);
Assert.fail("Expected SQL exception");
} catch (SQLException e) {
context.verifyAuthzException(e);
}
context.close();
}
/*
* Admin creates DB_1, DB2, tables (tab_1 ) and (tab_2, tab_3) in DB_1 and
* DB_2 respectively. User user1 has select on DB_1.tab_1, insert on
* DB2.tab_2 User user2 has select on DB2.tab_3 Test show database and show
* tables for both user1 and user2
*/
@Test
public void testJDBCGetSchemasAndGetTables() throws Exception {
// admin create two databases
Connection connection = context.createConnection(ADMIN1);
Statement statement = context.createStatement(connection);
statement.execute("CREATE DATABASE " + DB1);
statement.execute("CREATE DATABASE " + DB2);
statement.execute("USE " + DB1);
statement.execute("CREATE TABLE TAB1(id int)");
statement.executeQuery("SHOW TABLES");
statement.execute("USE " + DB2);
statement.execute("CREATE TABLE TAB2(id int)");
statement.execute("CREATE TABLE TAB3(id int)");
// edit policy file
policyFile.addRolesToGroup(USERGROUP1, "select_tab1", "insert_tab2")
.addRolesToGroup(USERGROUP2, "select_tab3")
.addPermissionsToRole("select_tab1", "server=server1->db=" + DB1 + "->table=tab1->action=select")
.addPermissionsToRole("select_tab3", "server=server1->db=" + DB2 + "->table=tab3->action=select")
.addPermissionsToRole("insert_tab2", "server=server1->db=" + DB2 + "->table=tab2->action=insert")
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
// test show databases
// show databases shouldn't filter any of the dbs from the resultset
Connection conn = context.createConnection(USER1_1);
Statement stmt = context.createStatement(conn);
// test direct JDBC metadata API
ResultSet res = stmt.executeQuery("SHOW DATABASES");
res = conn.getMetaData().getSchemas();
ResultSetMetaData resMeta = res.getMetaData();
assertEquals(2, resMeta.getColumnCount());
assertEquals("TABLE_SCHEM", resMeta.getColumnName(1));
assertEquals("TABLE_CATALOG", resMeta.getColumnName(2));
List<String> expectedResult = new ArrayList<String>();
List<String> returnedResult = new ArrayList<String>();
expectedResult.add(DB1);
expectedResult.add(DB2);
expectedResult.add("default");
while (res.next()) {
returnedResult.add(res.getString(1).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
// test direct JDBC metadata API
res = conn.getMetaData().getTables(null, DB1, "tab%", null);
expectedResult.add("tab1");
while (res.next()) {
returnedResult.add(res.getString(3).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
// test direct JDBC metadata API
res = conn.getMetaData().getTables(null, DB2, "tab%", null);
expectedResult.add("tab2");
while (res.next()) {
returnedResult.add(res.getString(3).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
res = conn.getMetaData().getTables(null, "DB%", "tab%", null);
expectedResult.add("tab2");
expectedResult.add("tab1");
while (res.next()) {
returnedResult.add(res.getString(3).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
//test show columns
res = conn.getMetaData().getColumns(null, "DB%", "tab%","i%" );
expectedResult.add("id");
while (res.next()) {
returnedResult.add(res.getString(4).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
conn.close();
// test show databases and show tables for user2
conn = context.createConnection(USER2_1);
// test direct JDBC metadata API
res = conn.getMetaData().getSchemas();
resMeta = res.getMetaData();
assertEquals(2, resMeta.getColumnCount());
assertEquals("TABLE_SCHEM", resMeta.getColumnName(1));
assertEquals("TABLE_CATALOG", resMeta.getColumnName(2));
expectedResult.add(DB2);
expectedResult.add("default");
while (res.next()) {
returnedResult.add(res.getString(1).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
// test JDBC direct API
res = conn.getMetaData().getTables(null, "DB%", "tab%", null);
expectedResult.add("tab3");
while (res.next()) {
returnedResult.add(res.getString(3).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
//test show columns
res = conn.getMetaData().getColumns(null, "DB%", "tab%","i%" );
expectedResult.add("id");
while (res.next()) {
returnedResult.add(res.getString(4).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
//test show columns
res = conn.getMetaData().getColumns(null, DB1, "tab%","i%" );
while (res.next()) {
returnedResult.add(res.getString(4).trim());
}
validateReturnedResult(expectedResult, returnedResult);
returnedResult.clear();
expectedResult.clear();
res.close();
context.close();
}
/**
* 2.8 admin user create two database, DB_1, DB_2 admin grant all to USER1_1,
* USER1_2 on DB_1, admin grant all to user1's group, user2's group on DB_2
* positive test case: user1, user2 has ALL privilege on both DB_1 and DB_2
* negative test case: user1, user2 don't have ALL privilege on SERVER
*/
@Test
public void testDbPrivileges() throws Exception {
createDb(ADMIN1, DB1, DB2);
// edit policy file
policyFile.addRolesToGroup(USERGROUP1, "db1_all,db2_all, load_data")
.addPermissionsToRole("db1_all", "server=server1->db=" + DB1)
.addPermissionsToRole("db2_all", "server=server1->db=" + DB2)
.addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath())
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
for (String user : new String[]{USER1_1, USER1_2}) {
for (String dbName : new String[]{DB1, DB2}) {
Connection userConn = context.createConnection(user);
String tabName = user + "_tab1";
Statement userStmt = context.createStatement(userConn);
// Positive case: test user1 and user2 has permissions to access
// db1 and
// db2
userStmt.execute("Use " + dbName);
userStmt
.execute("create table " + dbName + "." + tabName + " (id int)");
userStmt.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
+ "' INTO TABLE " + tabName);
userStmt.execute("select * from " + dbName + "." + tabName);
context.close();
}
}
}
/**
* Test Case 2.11 admin user create a new database DB_1 and grant ALL to
* himself on DB_1 should work
*/
@Test
public void testAdminDbPrivileges() throws Exception {
createDb(ADMIN1, DB1);
policyFile
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
Connection adminCon = context.createConnection(ADMIN1);
Statement adminStmt = context.createStatement(adminCon);
String tabName = DB1 + "." + "admin_tab1";
adminStmt.execute("create table " + tabName + "(c1 string)");
adminStmt.execute("load data local inpath '" + dataFile.getPath() + "' into table "
+ tabName);
assertTrue(adminStmt.executeQuery("select * from " + tabName).next());
adminStmt.close();
adminCon.close();
}
/**
* Test Case 2.14 admin user create a new database DB_1 create TABLE_1 in DB_1
* admin user grant INSERT to user1's group on TABLE_1 negative test case:
* user1 try to do following on TABLE_1 will fail: --explain --analyze
* --describe --describe function --show columns --show table status --show
* table properties --show create table --show partitions --show indexes
* --select * from TABLE_1.
*/
@Test
public void testNegativeUserPrivileges() throws Exception {
Connection adminCon = context.createConnection(ADMIN1);
Statement adminStmt = context.createStatement(adminCon);
adminStmt.execute("use default");
adminStmt.execute("CREATE DATABASE " + DB1);
adminStmt.execute("create table " + DB1 + ".table_1 (id int)");
adminStmt.execute("create table " + DB1 + ".table_2 (id int)");
adminStmt.close();
adminCon.close();
// edit policy file
policyFile.addRolesToGroup(USERGROUP1, "db1_tab1_insert", "db1_tab2_all")
.addPermissionsToRole("db1_tab2_all", "server=server1->db=" + DB1 + "->table=table_2")
.addPermissionsToRole("db1_tab1_insert", "server=server1->db=" + DB1 + "->table=table_1->action=insert")
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
Connection userConn = context.createConnection(USER1_1);
Statement userStmt = context.createStatement(userConn);
context.assertAuthzException(userStmt, "select * from " + DB1 + ".table_1");
userConn.close();
userStmt.close();
}
/**
* Test Case 2.16 admin user create a new database DB_1 create TABLE_1 and
* TABLE_2 (same schema) in DB_1 admin user grant SELECT, INSERT to user1's
* group on TABLE_2 negative test case: user1 try to do following on TABLE_1
* will fail: --insert overwrite TABLE_2 select * from TABLE_1
*/
@Test
public void testNegativeUserDMLPrivileges() throws Exception {
createDb(ADMIN1, DB1);
Connection adminCon = context.createConnection(ADMIN1);
Statement adminStmt = context.createStatement(adminCon);
adminStmt.execute("create table " + DB1 + ".table_1 (id int)");
adminStmt.execute("create table " + DB1 + ".table_2 (id int)");
adminStmt.close();
adminCon.close();
policyFile
.addPermissionsToRole("db1_tab2_all", "server=server1->db=" + DB1 + "->table=table_2")
.addRolesToGroup(USERGROUP1, "db1_tab2_all")
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
Connection userConn = context.createConnection(USER1_1);
Statement userStmt = context.createStatement(userConn);
context.assertAuthzException(userStmt, "insert overwrite table " + DB1
+ ".table_2 select * from " + DB1 + ".table_1");
context.assertAuthzException(userStmt, "insert overwrite directory '" + dataDir.getPath()
+ "' select * from " + DB1 + ".table_1");
userStmt.close();
userConn.close();
}
/**
* Test Case 2.17 Execution steps
* a) Admin user creates a new database DB_1,
* b) Admin user grants ALL on DB_1 to group GROUP_1
* c) User from GROUP_1 creates table TAB_1, TAB_2 in DB_1
* d) Admin user grants SELECT on TAB_1 to group GROUP_2
*
* 1) verify users from GROUP_2 have only SELECT privileges on TAB_1. They
* shouldn't be able to perform any operation other than those listed as
* requiring SELECT in the privilege model.
*
* 2) verify users from GROUP_2 can't perform queries involving join between
* TAB_1 and TAB_2.
*
* 3) verify users from GROUP_1 can't perform operations requiring ALL @
* SERVER scope. Refer to list
*/
@Test
public void testNegUserPrivilegesAll() throws Exception {
// create dbs
Connection adminCon = context.createConnection(ADMIN1);
Statement adminStmt = context.createStatement(adminCon);
adminStmt.execute("use default");
adminStmt.execute("drop table if exists table_def");
adminStmt.execute("create table table_def (name string)");
adminStmt
.execute("load data local inpath '" + dataFile.getPath() + "' into table table_def");
adminStmt.execute("CREATE DATABASE " + DB1);
adminStmt.execute("use " + DB1);
adminStmt.execute("create table table_1 (name string)");
adminStmt
.execute("load data local inpath '" + dataFile.getPath() + "' into table table_1");
adminStmt.execute("create table table_2 (name string)");
adminStmt
.execute("load data local inpath '" + dataFile.getPath() + "' into table table_2");
adminStmt.execute("create view v1 AS select * from table_1");
adminStmt
.execute("create table table_part_1 (name string) PARTITIONED BY (year INT)");
adminStmt.execute("ALTER TABLE table_part_1 ADD PARTITION (year = 2012)");
adminStmt.close();
adminCon.close();
policyFile
.addRolesToGroup(USERGROUP1, "db1_all")
.addRolesToGroup(USERGROUP2, "db1_tab1_select")
.addPermissionsToRole("db1_all", "server=server1->db=" + DB1)
.addPermissionsToRole("db1_tab1_select", "server=server1->db=" + DB1 + "->table=table_1->action=select")
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
Connection userConn = context.createConnection(USER2_1);
Statement userStmt = context.createStatement(userConn);
context.assertAuthzException(userStmt, "drop database " + DB1);
// Hive currently doesn't support cross db index DDL
context.assertAuthzException(userStmt, "CREATE TEMPORARY FUNCTION strip AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf'");
context.assertAuthzException(userStmt, "create table " + DB1
+ ".c_tab_2 as select * from " + DB1 + ".table_2");
context.assertAuthzException(userStmt, "select * from " + DB1 + ".table_2");
context.assertAuthzException(userStmt, "ALTER DATABASE " + DB1
+ " SET DBPROPERTIES ('foo' = 'bar')");
context.assertAuthzException(userStmt, "drop table " + DB1 + ".table_1");
context.assertAuthzException(userStmt, "DROP VIEW IF EXISTS " + DB1 + ".v1");
context.assertAuthzException(userStmt, "create table " + DB1 + ".table_5 (name string)");
context.assertAuthzException(userStmt, "ALTER TABLE " + DB1 + ".table_1 RENAME TO "
+ DB1 + ".table_99");
context.assertAuthzException(userStmt, "insert overwrite table " + DB1
+ ".table_2 select * from " + DB1 + ".table_1");
context.assertAuthzException(userStmt, "insert overwrite table " + DB1
+ ".table_2 select * from " + "table_def");
context.assertAuthzException(userStmt, "ALTER TABLE " + DB1
+ ".table_part_1 ADD IF NOT EXISTS PARTITION (year = 2012)");
context.assertAuthzException(userStmt, "ALTER TABLE " + DB1
+ ".table_part_1 PARTITION (year = 2012) SET LOCATION '/etc'");
userStmt.close();
userConn.close();
}
/**
* Steps: 1. admin user create databases, DB_1 and DB_2, no table or other
* object in database
* 2. admin grant all to user1's group on DB_1 and DB_2
* positive test case:
* a)user1 has the privilege to create table, load data,
* drop table, create view, insert more data on both databases
* b) user1 can switch between DB_1 and DB_2 without exception
* negative test case:
* c) user1 cannot drop database
*/
@Test
public void testSandboxOpt9() throws Exception {
createDb(ADMIN1, DB1, DB2);
policyFile
.addPermissionsToRole(GROUP1_ROLE, ALL_DB1, ALL_DB2, loadData)
.addRolesToGroup(USERGROUP1, GROUP1_ROLE)
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
// a
statement.execute("DROP TABLE IF EXISTS " + DB1 + "." + TBL1);
statement.execute("create table " + DB1 + "." + TBL1
+ " (under_col int comment 'the under column', value string)");
statement.execute("load data local inpath '" + dataFile.getPath()
+ "' into table " + DB1 + "." + TBL1);
statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW1);
statement.execute("CREATE VIEW " + DB1 + "." + VIEW1
+ " (value) AS SELECT value from " + DB1 + "." + TBL1
+ " LIMIT 10");
statement.execute("DROP TABLE IF EXISTS " + DB2 + "." + TBL1);
statement.execute("CREATE TABLE " + DB2 + "." + TBL1
+ " AS SELECT value from " + DB1 + "." + TBL1
+ " LIMIT 10");
// b
statement.execute("DROP TABLE IF EXISTS " + DB2 + "." + TBL2);
statement.execute("create table " + DB2 + "." + TBL2
+ " (under_col int comment 'the under column', value string)");
statement.execute("load data local inpath '" + dataFile.getPath()
+ "' into table " + DB2 + "." + TBL2);
statement.execute("DROP TABLE IF EXISTS " + DB2 + "." + TBL3);
statement.execute("create table " + DB2 + "." + TBL3
+ " (under_col int comment 'the under column', value string)");
statement.execute("load data local inpath '" + dataFile.getPath()
+ "' into table " + DB2 + "." + TBL3);
policyFile.removePermissionsFromRole(GROUP1_ROLE, ALL_DB2);
writePolicyFile(policyFile);
// create db1.view1 as select from db2.tbl2
statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW2);
context.assertAuthzException(statement, "CREATE VIEW " + DB1 + "." + VIEW2 +
" (value) AS SELECT value from " + DB2 + "." + TBL2 + " LIMIT 10");
// create db1.tbl2 as select from db2.tbl2
statement.execute("DROP TABLE IF EXISTS " + DB1 + "." + TBL2);
context.assertAuthzException(statement, "CREATE TABLE " + DB1 + "." + TBL2 +
" AS SELECT value from " + DB2 + "." + TBL2 + " LIMIT 10");
statement.close();
connection.close();
}
/**
* Steps: 1. admin user create databases, DB_1 and DB_2, no table or other
* object in database positive test case:
* d) user1 has the privilege to create view on tables in DB_1 negative test case:
* e) user1 cannot create view in DB_1 that select from tables in DB_2
* with no select privilege 2.
* positive test case:
* f) user1 has the privilege to create view to select from DB_1.tb_1
* and DB_2.tb_2 negative test case:
* g) user1 cannot create view to select from DB_1.tb_1 and DB_2.tb_3
*/
@Test
public void testCrossDbViewOperations() throws Exception {
// admin create two databases
createDb(ADMIN1, DB1, DB2);
Connection connection = context.createConnection(ADMIN1);
Statement statement = context.createStatement(connection);
statement
.execute("CREATE TABLE " + DB1 + "." + TBL1 + "(id int)");
statement
.execute("CREATE TABLE " + DB2 + "." + TBL1 + "(id int)");
statement
.execute("CREATE TABLE " + DB2 + "." + TBL2 + "(id int)");
context.close();
// edit policy file
policyFile
.addRolesToGroup(USERGROUP1, "all_db1", "load_data", "select_tb2")
.addPermissionsToRole("all_db1", "server=server1->db=" + DB1)
.addPermissionsToRole("all_db2", "server=server1->db=" + DB2)
.addPermissionsToRole("select_tb2", "server=server1->db=" + DB2 + "->table=tb_1->action=select")
.addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath())
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
// d
statement.execute("DROP TABLE IF EXISTS " + DB1 + "." + TBL1);
statement.execute("create table " + DB1 + "." + TBL1
+ " (under_col int comment 'the under column', value string)");
// e
statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW1);
context.assertAuthzException(statement, "CREATE VIEW " + DB1 + "." + VIEW1
+ " (value) AS SELECT value from " + DB2 + "." + TBL2
+ " LIMIT 10");
// f
statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW2);
statement.execute("CREATE VIEW " + DB1 + "." + VIEW2
+ " (value) AS SELECT value from " + DB1 + "." + TBL1
+ " LIMIT 10");
// g
statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW3);
context.assertAuthzException(statement, "CREATE VIEW " + DB1 + "." + VIEW3
+ " (value) AS SELECT value from " + DB2 + "." + TBL2
+ " LIMIT 10");
}
}
| |
/**
*
*/
package org.treetank.bench;
import java.io.File;
import java.nio.file.FileSystems;
import java.util.HashSet;
import java.util.Set;
import org.perfidix.AbstractConfig;
import org.perfidix.Benchmark;
import org.perfidix.annotation.AfterEachRun;
import org.perfidix.annotation.BeforeEachRun;
import org.perfidix.annotation.Bench;
import org.perfidix.element.KindOfArrangement;
import org.perfidix.meter.AbstractMeter;
import org.perfidix.meter.Time;
import org.perfidix.meter.TimeMeter;
import org.perfidix.ouput.AbstractOutput;
import org.perfidix.ouput.CSVOutput;
import org.perfidix.ouput.TabularSummaryOutput;
import org.perfidix.result.BenchmarkResult;
import org.treetank.access.Storage;
import org.treetank.access.conf.ModuleSetter;
import org.treetank.access.conf.ResourceConfiguration;
import org.treetank.access.conf.ResourceConfiguration.IResourceConfigurationFactory;
import org.treetank.access.conf.SessionConfiguration;
import org.treetank.access.conf.StandardSettings;
import org.treetank.access.conf.StorageConfiguration;
import org.treetank.api.IBucketReadTrx;
import org.treetank.api.IBucketWriteTrx;
import org.treetank.api.ISession;
import org.treetank.api.IStorage;
import org.treetank.bucket.DumbDataFactory;
import org.treetank.bucket.DumbDataFactory.DumbData;
import org.treetank.bucket.DumbMetaEntryFactory;
import org.treetank.exception.TTException;
import org.treetank.exception.TTIOException;
import org.treetank.io.IOUtils;
import org.treetank.io.jclouds.JCloudsStorage;
import com.google.inject.Guice;
import com.google.inject.Injector;
/**
* Benchmarking the getting of data. ELEMENTS nodes are inserted within FACTOR revisions. Afterwards within
* FACTOR revisions, ELEMENTS nodes are modified. The aim are highly distributed and scattered buckets.
* Then powers of 2 nodes are retrieved sequentially and random-access-like.
*
* @author Sebastian Graf, University of Konstanz
*
*/
public class GetBench {
private final String RESOURCENAME = "benchResourcegrave9283123";
private static final File benchedFile = FileSystems.getDefault().getPath("tmp", "bench").toFile();
// private static final File benchedFile =
// FileSystems.getDefault().getPath("/Volumes/ramdisk/tt").toFile();
private static final int FACTOR = 8;
private final int ELEMENTS = 262144;
// private final int ELEMENTS = 32768;
private IStorage mStorage;
private ISession mSession;
private DumbData[] mNodesToInsert = BenchUtils.createDatas(new int[] {
ELEMENTS
})[0];
private IBucketReadTrx mTrx;
public GetBench() throws TTException {
// final Injector inj =
// Guice.createInjector(new ModuleSetter().setDataFacClass(DumbDataFactory.class).setMetaFacClass(
// DumbMetaEntryFactory.class).setBackendClass(JCloudsStorage.class).createModule());
//
// final ResourceConfiguration resConfig =
// inj.getInstance(IResourceConfigurationFactory.class).create(
// StandardSettings.getProps(benchedFile.getAbsolutePath(), RESOURCENAME));
// IOUtils.recursiveDelete(benchedFile);
//
// // Creating Storage and inserting ELEMENTS nodes in FACTOR revisions
// final StorageConfiguration storConfig = new StorageConfiguration(benchedFile);
// Storage.createStorage(storConfig);
// final IStorage storage = Storage.openStorage(benchedFile);
// storage.createResource(resConfig);
// final ISession session =
// storage.getSession(new SessionConfiguration(RESOURCENAME, StandardSettings.KEY));
// IBucketWriteTrx trx = session.beginBucketWtx();
// long time = System.currentTimeMillis();
// // Creating FACTOR versions with ELEMENTS\FACTOR elements
// for (int j = 0; j < FACTOR; j++) {
// System.out.println("Inserting revision " + j + " and " + (ELEMENTS / FACTOR) + " elements");
// for (int i = 0; i < ELEMENTS / FACTOR; i++) {
// final long nodeKey = trx.incrementDataKey();
// mNodesToInsert[i].setDataKey(nodeKey);
// trx.setData(mNodesToInsert[i]);
// }
// trx.commit();
// }
// trx.close();
// trx = session.beginBucketWtx();
// long endtime = System.currentTimeMillis();
// System.out.println("Generating nodes in " + FACTOR + " versions took " + (endtime - time) + "ms");
//
// // Modifying ELEMENT nodes in FACTOR revisions.
// for (int i = 0; i < FACTOR; i++) {
// System.out.println("Modifying revision " + i + " and " + (ELEMENTS / FACTOR) + " elements");
// boolean continueFlag = true;
// for (int j = 0; j < ELEMENTS / FACTOR && continueFlag; j++) {
// try {
// final long keyToAdapt = Math.abs(BenchUtils.random.nextLong()) % ELEMENTS;
// final DumbData data= BenchUtils.generateOne();
// data.setDataKey(keyToAdapt);
// trx.setData(data);
// } catch (Exception e) {
// System.err.println("Exception " + e + " thrown in factor " + i + " and Elements " + j);
// continueFlag = false;
// }
// }
// if (continueFlag) {
// long commitstart = System.currentTimeMillis();
// System.out.println("Revision " + i + " before commit");
// trx.commit();
// System.out.println("Commit of revision " + i + " finished in "
// + (System.currentTimeMillis() - commitstart) + "ms");
// } else {
// System.out.println("Revision " + i + " skipped");
// i--;
// trx.close();
// trx = session.beginBucketWtx();
// }
// }
// long endtimeMod = System.currentTimeMillis();
// System.out
// .println("Modifying nodes in " + FACTOR + " versions took " + (endtimeMod - endtime) + "ms");
//
// trx.close();
// session.close();
// storage.close();
}
private void get(int numbersToGet, boolean random) throws TTIOException {
for (int i = 0; i < numbersToGet; i++) {
if (random) {
long nextKey = Math.abs(BenchUtils.random.nextLong()) % ELEMENTS;
mTrx.getData(nextKey);
} else {
mTrx.getData(i % ELEMENTS);
}
if (i % 1024 == 0) {
System.out.println(i + " elements read of " + numbersToGet + " with random=" + random);
}
}
}
@BeforeEachRun
public void setUp() throws TTException {
mStorage = Storage.openStorage(benchedFile);
mSession = mStorage.getSession(new SessionConfiguration(RESOURCENAME, StandardSettings.KEY));
mTrx = mSession.beginBucketRtx(mSession.getMostRecentVersion());
}
// @Bench
// public void random016384() throws TTException {
// get(16384, true);
// System.out.println("163842");
// }
//
// @Bench
// public void random032768() throws TTException {
// get(32768, true);
// System.out.println("32768");
// }
@Bench
public void random065536() throws TTException {
get(65536, true);
System.out.println("65536");
}
// @Bench
// public void random131072() throws TTException {
// get(131072, true);
// System.out.println("131072");
// }
//
// @Bench
// public void random262144() throws TTException {
// get(262144, true);
// System.out.println("262144");
// }
// @Bench
// public void seq016384() throws TTException {
// get(16384, false);
// System.out.println("163842");
// }
//
// @Bench
// public void seq032768() throws TTException {
// get(32768, false);
// System.out.println("32768");
// }
//
// @Bench
// public void seq065536() throws TTException {
// get(65536, false);
// System.out.println("65536");
// }
//
// @Bench
// public void seq131072() throws TTException {
// get(131072, false);
// System.out.println("131072");
// }
//
// @Bench
// public void seq262144() throws TTException {
// get(262144, false);
// System.out.println("262144");
// }
@AfterEachRun
public void tearDown() throws TTException {
mTrx.close();
mSession.close();
mStorage.close();
}
final static File outputFold = new File("/Users/sebi/listenerBench");
public static void main(String[] args) {
final File resultFold = new File("/Users/sebi/resBench");
IOUtils.recursiveDelete(outputFold);
IOUtils.recursiveDelete(resultFold);
outputFold.mkdirs();
resultFold.mkdirs();
Benchmark bench = new Benchmark(new Config());
bench.add(GetBench.class);
BenchmarkResult res = bench.run();
new TabularSummaryOutput().visitBenchmark(res);
new CSVOutput(resultFold).visitBenchmark(res);
}
static class Config extends AbstractConfig {
private final static int RUNS = 2;
private final static Set<AbstractMeter> METERS = new HashSet<AbstractMeter>();
private final static Set<AbstractOutput> OUTPUT = new HashSet<AbstractOutput>();
private final static KindOfArrangement ARRAN = KindOfArrangement.SequentialMethodArrangement;
private final static double GCPROB = 1.0d;
static {
METERS.add(new TimeMeter(Time.MilliSeconds));
// METERS.add(new MemMeter(Memory.Byte));
OUTPUT.add(new CSVOutput(outputFold));
OUTPUT.add(new TabularSummaryOutput());
}
/**
* Public constructor.
*/
public Config() {
super(RUNS, METERS.toArray(new AbstractMeter[METERS.size()]), OUTPUT
.toArray(new AbstractOutput[OUTPUT.size()]), ARRAN, GCPROB);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.policy.loadbalancing;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.location.Location;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimaps;
import com.google.common.collect.SetMultimap;
/**
* Standard implementation of {@link BalanceablePoolModel}, providing essential arithmetic for item and container
* workrates and thresholds. See subclasses for specific requirements for migrating items.
*/
public class DefaultBalanceablePoolModel<ContainerType, ItemType> implements BalanceablePoolModel<ContainerType, ItemType> {
private static final Logger LOG = LoggerFactory.getLogger(DefaultBalanceablePoolModel.class);
/*
* Performance comments.
* - Used hprof with LoadBalancingPolicySoakTest.testLoadBalancingManyManyItemsTest (1000 items)
* - Prior to adding containerToItems, it created a new set by iterating over all items.
* This was the biggest percentage of any brooklyn code.
* Hence it's worth duplicating the values, keyed by item and keyed by container.
* - Unfortunately changing threading model (so have a "rebalancer" thread, and a thread that
* processes events to update the model), get ConcurrentModificationException if don't take
* copy of containerToItems.get(node)...
*/
// Concurrent maps cannot have null value; use this to represent when no container is supplied for an item
private static final String NULL_CONTAINER = "null-container";
private final String name;
private final Set<ContainerType> containers = Collections.newSetFromMap(new ConcurrentHashMap<ContainerType,Boolean>());
private final Map<ContainerType, Double> containerToLowThreshold = new ConcurrentHashMap<ContainerType, Double>();
private final Map<ContainerType, Double> containerToHighThreshold = new ConcurrentHashMap<ContainerType, Double>();
private final Map<ItemType, ContainerType> itemToContainer = new ConcurrentHashMap<ItemType, ContainerType>();
private final SetMultimap<ContainerType, ItemType> containerToItems = Multimaps.synchronizedSetMultimap(HashMultimap.<ContainerType, ItemType>create());
private final Map<ItemType, Double> itemToWorkrate = new ConcurrentHashMap<ItemType, Double>();
private final Set<ItemType> immovableItems = Collections.newSetFromMap(new ConcurrentHashMap<ItemType, Boolean>());
private volatile double poolLowThreshold = 0;
private volatile double poolHighThreshold = 0;
private volatile double currentPoolWorkrate = 0;
public DefaultBalanceablePoolModel(String name) {
this.name = name;
}
public ContainerType getParentContainer(ItemType item) {
ContainerType result = itemToContainer.get(item);
return (result != NULL_CONTAINER) ? result : null;
}
public Set<ItemType> getItemsForContainer(ContainerType node) {
Set<ItemType> result = containerToItems.get(node);
synchronized (containerToItems) {
return (result != null) ? ImmutableSet.copyOf(result) : Collections.<ItemType>emptySet();
}
}
public Double getItemWorkrate(ItemType item) {
return itemToWorkrate.get(item);
}
@Override public double getPoolLowThreshold() { return poolLowThreshold; }
@Override public double getPoolHighThreshold() { return poolHighThreshold; }
@Override public double getCurrentPoolWorkrate() { return currentPoolWorkrate; }
@Override public boolean isHot() { return !containers.isEmpty() && currentPoolWorkrate > poolHighThreshold; }
@Override public boolean isCold() { return !containers.isEmpty() && currentPoolWorkrate < poolLowThreshold; }
// Provider methods.
@Override public String getName() { return name; }
@Override public int getPoolSize() { return containers.size(); }
@Override public Set<ContainerType> getPoolContents() { return containers; }
@Override public String getName(ContainerType container) { return container.toString(); } // TODO: delete?
@Override public Location getLocation(ContainerType container) { return null; } // TODO?
@Override public double getLowThreshold(ContainerType container) {
Double result = containerToLowThreshold.get(container);
return (result != null) ? result : -1;
}
@Override public double getHighThreshold(ContainerType container) {
Double result = containerToHighThreshold.get(container);
return (result != null) ? result : -1;
}
@Override public double getTotalWorkrate(ContainerType container) {
double totalWorkrate = 0;
for (ItemType item : getItemsForContainer(container)) {
Double workrate = itemToWorkrate.get(item);
if (workrate != null)
totalWorkrate += Math.abs(workrate);
}
return totalWorkrate;
}
@Override public Map<ContainerType, Double> getContainerWorkrates() {
Map<ContainerType, Double> result = new LinkedHashMap<ContainerType, Double>();
for (ContainerType node : containers)
result.put(node, getTotalWorkrate(node));
return result;
}
@Override public Map<ItemType, Double> getItemWorkrates(ContainerType node) {
Map<ItemType, Double> result = new LinkedHashMap<ItemType, Double>();
for (ItemType item : getItemsForContainer(node))
result.put(item, itemToWorkrate.get(item));
return result;
}
@Override public boolean isItemMoveable(ItemType item) {
// If don't know about item, then assume not movable; otherwise has this item been explicitly flagged as immovable?
return itemToContainer.containsKey(item) && !immovableItems.contains(item);
}
@Override public boolean isItemAllowedIn(ItemType item, Location location) {
return true; // TODO?
}
// Mutators.
@Override
public void onItemMoved(ItemType item, ContainerType newNode) {
if (!itemToContainer.containsKey(item)) {
// Item may have been deleted; order of events received from different sources
// (i.e. item itself and for itemGroup membership) is non-deterministic.
LOG.info("Balanceable pool model ignoring onItemMoved for unknown item {} to container {}; " +
"if onItemAdded subsequently received will get new container then", item, newNode);
return;
}
ContainerType newNodeNonNull = toNonNullContainer(newNode);
ContainerType oldNode = itemToContainer.put(item, newNodeNonNull);
if (oldNode != null && oldNode != NULL_CONTAINER) containerToItems.remove(oldNode, item);
if (newNode != null) containerToItems.put(newNode, item);
}
@Override
public void onContainerAdded(ContainerType newContainer, double lowThreshold, double highThreshold) {
boolean added = containers.add(newContainer);
if (!added) {
// See LoadBalancingPolicy.onContainerAdded for possible explanation of why can get duplicate calls
LOG.debug("Duplicate container-added event for {}; ignoring", newContainer);
return;
}
containerToLowThreshold.put(newContainer, lowThreshold);
containerToHighThreshold.put(newContainer, highThreshold);
poolLowThreshold += lowThreshold;
poolHighThreshold += highThreshold;
}
@Override
public void onContainerRemoved(ContainerType oldContainer) {
containers.remove(oldContainer);
Double containerLowThreshold = containerToLowThreshold.remove(oldContainer);
Double containerHighThresold = containerToHighThreshold.remove(oldContainer);
poolLowThreshold -= (containerLowThreshold != null ? containerLowThreshold : 0);
poolHighThreshold -= (containerHighThresold != null ? containerHighThresold : 0);
// TODO: assert no orphaned items
}
@Override
public void onItemAdded(ItemType item, ContainerType parentContainer) {
onItemAdded(item, parentContainer, false);
}
@Override
public void onItemAdded(ItemType item, ContainerType parentContainer, boolean immovable) {
// Duplicate calls to onItemAdded do no harm, as long as most recent is most accurate!
// Important that it stays that way for now - See LoadBalancingPolicy.onContainerAdded for explanation.
if (immovable)
immovableItems.add(item);
ContainerType parentContainerNonNull = toNonNullContainer(parentContainer);
ContainerType oldNode = itemToContainer.put(item, parentContainerNonNull);
if (oldNode != null && oldNode != NULL_CONTAINER) containerToItems.remove(oldNode, item);
if (parentContainer != null) containerToItems.put(parentContainer, item);
}
@Override
public void onItemRemoved(ItemType item) {
ContainerType oldNode = itemToContainer.remove(item);
if (oldNode != null && oldNode != NULL_CONTAINER) containerToItems.remove(oldNode, item);
Double workrate = itemToWorkrate.remove(item);
if (workrate != null)
currentPoolWorkrate -= workrate;
immovableItems.remove(item);
}
@Override
public void onItemWorkrateUpdated(ItemType item, double newValue) {
if (hasItem(item)) {
Double oldValue = itemToWorkrate.put(item, newValue);
double delta = ( newValue - (oldValue != null ? oldValue : 0) );
currentPoolWorkrate += delta;
} else {
// Can happen when item removed - get notification of removal and workrate from group and item
// respectively, so can overtake each other
if (LOG.isDebugEnabled()) LOG.debug("Ignoring setting of workrate for unknown item {}, to {}", item, newValue);
}
}
private boolean hasItem(ItemType item) {
return itemToContainer.containsKey(item);
}
// Additional methods for tests.
/**
* Warning: this can be an expensive (time and memory) operation if there are a lot of items/containers.
*/
@VisibleForTesting
public String itemDistributionToString() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
dumpItemDistribution(new PrintStream(baos));
return new String(baos.toByteArray());
}
@VisibleForTesting
public void dumpItemDistribution() {
dumpItemDistribution(System.out);
}
@VisibleForTesting
public void dumpItemDistribution(PrintStream out) {
for (ContainerType container : getPoolContents()) {
out.println("Container '"+container+"': ");
for (ItemType item : getItemsForContainer(container)) {
Double workrate = getItemWorkrate(item);
out.println("\t"+"Item '"+item+"' ("+workrate+")");
}
}
out.flush();
}
@SuppressWarnings("unchecked")
private ContainerType nullContainer() {
return (ContainerType) NULL_CONTAINER; // relies on erasure
}
private ContainerType toNonNullContainer(ContainerType container) {
return (container != null) ? container : nullContainer();
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.databasemigrationservice.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Describes a quota for an AWS account, for example, the number of replication instances allowed.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dms-2016-01-01/AccountQuota" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AccountQuota implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of the AWS DMS quota for this AWS account.
* </p>
*/
private String accountQuotaName;
/**
* <p>
* The amount currently used toward the quota maximum.
* </p>
*/
private Long used;
/**
* <p>
* The maximum allowed value for the quota.
* </p>
*/
private Long max;
/**
* <p>
* The name of the AWS DMS quota for this AWS account.
* </p>
*
* @param accountQuotaName
* The name of the AWS DMS quota for this AWS account.
*/
public void setAccountQuotaName(String accountQuotaName) {
this.accountQuotaName = accountQuotaName;
}
/**
* <p>
* The name of the AWS DMS quota for this AWS account.
* </p>
*
* @return The name of the AWS DMS quota for this AWS account.
*/
public String getAccountQuotaName() {
return this.accountQuotaName;
}
/**
* <p>
* The name of the AWS DMS quota for this AWS account.
* </p>
*
* @param accountQuotaName
* The name of the AWS DMS quota for this AWS account.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AccountQuota withAccountQuotaName(String accountQuotaName) {
setAccountQuotaName(accountQuotaName);
return this;
}
/**
* <p>
* The amount currently used toward the quota maximum.
* </p>
*
* @param used
* The amount currently used toward the quota maximum.
*/
public void setUsed(Long used) {
this.used = used;
}
/**
* <p>
* The amount currently used toward the quota maximum.
* </p>
*
* @return The amount currently used toward the quota maximum.
*/
public Long getUsed() {
return this.used;
}
/**
* <p>
* The amount currently used toward the quota maximum.
* </p>
*
* @param used
* The amount currently used toward the quota maximum.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AccountQuota withUsed(Long used) {
setUsed(used);
return this;
}
/**
* <p>
* The maximum allowed value for the quota.
* </p>
*
* @param max
* The maximum allowed value for the quota.
*/
public void setMax(Long max) {
this.max = max;
}
/**
* <p>
* The maximum allowed value for the quota.
* </p>
*
* @return The maximum allowed value for the quota.
*/
public Long getMax() {
return this.max;
}
/**
* <p>
* The maximum allowed value for the quota.
* </p>
*
* @param max
* The maximum allowed value for the quota.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AccountQuota withMax(Long max) {
setMax(max);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAccountQuotaName() != null)
sb.append("AccountQuotaName: ").append(getAccountQuotaName()).append(",");
if (getUsed() != null)
sb.append("Used: ").append(getUsed()).append(",");
if (getMax() != null)
sb.append("Max: ").append(getMax());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AccountQuota == false)
return false;
AccountQuota other = (AccountQuota) obj;
if (other.getAccountQuotaName() == null ^ this.getAccountQuotaName() == null)
return false;
if (other.getAccountQuotaName() != null && other.getAccountQuotaName().equals(this.getAccountQuotaName()) == false)
return false;
if (other.getUsed() == null ^ this.getUsed() == null)
return false;
if (other.getUsed() != null && other.getUsed().equals(this.getUsed()) == false)
return false;
if (other.getMax() == null ^ this.getMax() == null)
return false;
if (other.getMax() != null && other.getMax().equals(this.getMax()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAccountQuotaName() == null) ? 0 : getAccountQuotaName().hashCode());
hashCode = prime * hashCode + ((getUsed() == null) ? 0 : getUsed().hashCode());
hashCode = prime * hashCode + ((getMax() == null) ? 0 : getMax().hashCode());
return hashCode;
}
@Override
public AccountQuota clone() {
try {
return (AccountQuota) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.databasemigrationservice.model.transform.AccountQuotaMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.shindig.social.opensocial.model;
import org.apache.shindig.protocol.model.Enum;
import org.apache.shindig.protocol.model.Exportablebean;
//import org.apache.shindig.social.core.model.DocumentImpl;
import com.google.common.base.Functions;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.inject.ImplementedBy;
import java.util.Date;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* see <a href="http://www.opensocial.org/Technical-Resources/opensocial-spec-v081/opensocial-reference#opensocial.Document.Field">
* http://www.opensocial.org/Technical-Resources/opensocial-spec-v081/opensocial-reference#opensocial.Document.Field</a>
* for all field meanings. All fields are represented in the js api at this time except for lastUpdated.
* This field is currently only in the RESTful spec.
*
*/
//@ImplementedBy(DocumentImpl.class)
@Exportablebean
public interface Document {
/**
* The type of a profile url when represented as a list field.
*/
String PROFILE_URL_TYPE = "profile";
/**
* The type of thumbnail photo types when represented as list fields.
*/
String THUMBNAIL_PHOTO_TYPE = "thumbnail";
/**
* The display name for the document.
* @return the display name
*/
String getDisplayName();
/**
* Set the display name.
* @param displayName the new display name.
*/
void setDisplayName(String displayName);
/**
* The fields that represent the document object in json form.
*/
public static enum Field {
/** the json field for appData. */
APP_DATA("appData"),
/** the json field for documentType. */
DOCUMENT_TYPE("documentType"),
/** the json field for documentEntity. */
DOCUMENT_ENTITY("documentEntity"),
/** the json field for author. */
AUTHOR("author"),
/** the json field for authorEmail. */
AUTHOR_EMAIL("authorEmail"),
/** the json field for parentId. */
PARENT_ID("parentId"),
/** the json field for parentType. */
PARENT_TYPE("parentType"),
/** the json field for description. */
DESCRIPTION("description"),
/** the json field for display name. */
DISPLAY_NAME("displayName"), /** Needed to support the RESTful api. */
/** the json field for height. */
HEIGHT("height"),
/** the json field for id. */
ID("id"),
/** the json field for IM accounts. */
IMS("ims"),
/** the json field for name. */
NAME("name"),
/** the json field for screenshotUrl. */
SCREENSHOT_URL("screenshotUrl"),
/** the json field for tags. */
TAGS("tags"),
/** the json field for thumbnailUrl. */
THUMBNAIL_URL("thumbnailUrl"),
/** the json field for utcOffset. */
UTC_OFFSET("utcOffset");
/**
* a Map to convert json string to Field representations.
*/
private static final Map<String,Field> LOOKUP = Maps.uniqueIndex(EnumSet.allOf(Field.class),
Functions.toStringFunction());
/**
* The json field that the instance represents.
*/
private final String urlString;
/**
* The set of all fields.
*/
public static final Set<String> ALL_FIELDS = LOOKUP.keySet();
/**
* The set of default fields returned fields.
*/
public static final Set<String> DEFAULT_FIELDS = ImmutableSet.of(
ID.toString(),
NAME.toString(),
DOCUMENT_ENTITY.toString(),
PARENT_ID.toString(),
PARENT_TYPE.toString(),
THUMBNAIL_URL.toString());
/**
* create a field base on the a json element.
*
* @param urlString the name of the element
*/
private Field(String urlString) {
this.urlString = urlString;
}
/**
* emit the field as a json element.
*
* @return the field name
*/
@Override
public String toString() {
return this.urlString;
}
public static Field getField(String jsonString) {
return LOOKUP.get(jsonString);
}
/**
* Converts from a url string (usually passed in the fields= parameter) into the
* corresponding field enum.
* @param urlString The string to translate.
* @return The corresponding document field.
*/
public static Document.Field fromUrlString(String urlString) {
return LOOKUP.get(urlString);
}
}
/**
* Get app data for the document.
*
* @return the app data, possibly a subset.
*/
Map<String, ?> getAppData();
/**
* Sets app data for the document.
*
* @param appData the app data, possibly a subset
*/
void setAppData(Map<String, ?> appData);
/**
* Get addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @return documentEntity
*/
String getDocumentType();
/**
* Set addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @param documentEntity documentEntity objects
*/
void setDocumentType(String documentType);
/**
* Get addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @return documentEntity
*/
String getDocumentEntity();
/**
* Set addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @param documentEntity documentEntity objects
*/
void setDocumentEntity(String documentEntity);
/**
* Get addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @return author
*/
String getAuthor();
/**
* Set addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @param author author objects
*/
void setAuthor(String author);
/**
* Get addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @return authorEmail
*/
String getAuthorEmail();
/**
* Set addresses associated with the document, specified as an List of Address objects. Container
* support for this field is OPTIONAL.
*
* @param authorEmail authorEmail objects
*/
void setAuthorEmail(String authorEmail);
/**
* Set A parent ID for a document/document to which this document belongs. Container support for this
* field is REQUIRED.
* @return the parentId
*/
String getParentId();
/**
* Set A parent ID for a document/document to which this document belongs. Container support for this
* field is REQUIRED.
*
* @param parentId of element to which document belongs
*/
void setParentId(String parentId);
/**
* Set A parent TYPE to @document or @document where this document belongs. Container support for this
* field is REQUIRED.
* @return the parentType
*/
String getParentType();
/**
* Set A parent TYPE to @document or @document where this document belongs. Container support for this
* field is REQUIRED.
*
* @param parentType of element to which document belongs
*/
void setParentType(String parentType);
/**
* Get string description of a document, specified as a string. Container support for this field is
* OPTIONAL.
*
* @return the document's description
*/
String getDescription();
/**
* Set string description of a document, specified as a string. Container support for this field is
* OPTIONAL.
*
* @param description the document's description
*/
void setDescription(String description);
/**
* Get the document's Emails associated with the document.
* Container support for this field is OPTIONAL.
*
* @return a list of the document's emails
*/
Integer getHeight();
/**
* Set the document's Emails associated with the document.
* Container support for this field is OPTIONAL.
*
* @param height a list of the document's emails
*/
void setHeight(Integer height);
/**
* Get A string ID that can be permanently associated with this document. Container support for this
* field is REQUIRED.
*
* @return the permanent ID of the document
*/
String getId();
/**
* Set A string ID that can be permanently associated with this document. Container support for this
* field is REQUIRED.
*
* @param id the permanent ID of the document
*/
void setId(String id);
/**
* Get a list of Instant messaging address for this Document. No official canonicalization rules
* exist for all instant messaging addresses, but Service Providers SHOULD remove all whitespace
* and convert the address to lowercase, if this is appropriate for the service this IM address is
* used for. Instead of the standard Canonical Values for type, this field defines the following
* Canonical Values to represent currently popular IM services: aim, gtalk, icq, xmpp, msn, skype,
* qq, and yahoo.
*
* @return A list of IM addresses
*/
List<ListField> getIms();
/**
* Set a list of Instant messaging address for this Document. No official canonicalization rules
* exist for all instant messaging addresses, but Service Providers SHOULD remove all whitedocument
* and convert the address to lowercase, if this is appropriate for the service this IM address is
* used for. Instead of the standard Canonical Values for type, this field defines the following
* Canonical Values to represent currently popular IM services: aim, gtalk, icq, xmpp, msn, skype,
* qq, and yahoo.
*
* @param ims a list ListFields representing IM addresses.
*/
void setIms(List<ListField> ims);
/**
* Get the document's name Container support for this field is REQUIRED.
*
* @return the document's name
*/
String getName();
/**
* Set the document's name Container support for this field is REQUIRED.
*
* @param name the document's name
*/
void setName(String name);
/**
* Get the Phone numbers associated with the document.
*
* @return the Phone numbers associated with the document
*/
String getScreenshotUrl();
/**
* Set the Phone numbers associated with the document.
*
* @param phoneNumbers the Phone numbers associated with the document
*/
void setScreenshotUrl(String screenshotUrl);
/**
* Get arbitrary tags about the person. Container support for this field is OPTIONAL.
*
* @return arbitrary tags about the person.
*/
List<String> getTags();
/**
* Set arbitrary tags about the person. Container support for this field is OPTIONAL.
*
* @param tags arbitrary tags about the person.
*/
void setTags(List<String> tags);
/**
* Get the document's profile url. Container support for this field is OPTIONAL.
*
* @return the application's status, headline or shoutout
*/
String getProfileUrl();
/**
* Set the document's profile url. Container support for this field is OPTIONAL.
*
* @param status the application's status, headline or shoutout
*/
void setProfileUrl(String profileUrl);
/**
* Get the document's status, headline or shoutout. Container support for this field is OPTIONAL.
*
* @return the document's status, headline or shoutout
*/
String getThumbnailUrl();
/**
* Set the document's status, headline or shoutout. Container support for this field is OPTIONAL.
*
* @param status the document's status, headline or shoutout
*/
void setThumbnailUrl(String thumbnailUrl);
/**
* The time this document was last updated.
*
* @return the last update time
*/
Date getUpdated();
/**
* Set the time this record was last updated.
*
* @param updated the last update time
*/
void setUpdated(Date updated);
/**
* Get the Document's time zone, specified as the difference in minutes between Greenwich Mean Time
* (GMT) and the document's local time. Container support for this field is OPTIONAL.
*
* @return the Document's time zone
*/
Long getUtcOffset();
/**
* Set the Document's time zone, specified as the difference in minutes between Greenwich Mean Time
* (GMT) and the document's local time. Container support for this field is OPTIONAL.
*
* @param utcOffset the Document's time zone
*/
void setUtcOffset(Long utcOffset);
/**
* @return true if this document object represents the owner of the current page.
*/
boolean getIsOwner();
/**
* Set the owner flag.
* @param isOwner the isOwnerflag
*/
void setIsOwner(boolean isOwner);
}
| |
/*
* Copyright 2010 Henry Coles
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.pitest.mutationtest.commandline;
import static org.pitest.mutationtest.config.ConfigOption.AVOID_CALLS;
import static org.pitest.mutationtest.config.ConfigOption.CHILD_JVM;
import static org.pitest.mutationtest.config.ConfigOption.CLASSPATH;
import static org.pitest.mutationtest.config.ConfigOption.CODE_PATHS;
import static org.pitest.mutationtest.config.ConfigOption.COVERAGE_THRESHOLD;
import static org.pitest.mutationtest.config.ConfigOption.DEPENDENCY_DISTANCE;
import static org.pitest.mutationtest.config.ConfigOption.EXCLUDED_CLASSES;
import static org.pitest.mutationtest.config.ConfigOption.EXCLUDED_GROUPS;
import static org.pitest.mutationtest.config.ConfigOption.EXCLUDED_METHOD;
import static org.pitest.mutationtest.config.ConfigOption.EXPORT_LINE_COVERAGE;
import static org.pitest.mutationtest.config.ConfigOption.FAIL_WHEN_NOT_MUTATIONS;
import static org.pitest.mutationtest.config.ConfigOption.HISTORY_INPUT_LOCATION;
import static org.pitest.mutationtest.config.ConfigOption.HISTORY_OUTPUT_LOCATION;
import static org.pitest.mutationtest.config.ConfigOption.INCLUDED_GROUPS;
import static org.pitest.mutationtest.config.ConfigOption.INCLUDE_LAUNCH_CLASSPATH;
import static org.pitest.mutationtest.config.ConfigOption.JVM_PATH;
import static org.pitest.mutationtest.config.ConfigOption.MAX_MUTATIONS_PER_CLASS;
import static org.pitest.mutationtest.config.ConfigOption.MUTATE_STATIC_INITIALIZERS;
import static org.pitest.mutationtest.config.ConfigOption.MUTATIONS;
import static org.pitest.mutationtest.config.ConfigOption.MUTATION_ENGINE;
import static org.pitest.mutationtest.config.ConfigOption.MUTATION_THRESHOLD;
import static org.pitest.mutationtest.config.ConfigOption.MUTATION_UNIT_SIZE;
import static org.pitest.mutationtest.config.ConfigOption.OUTPUT_FORMATS;
import static org.pitest.mutationtest.config.ConfigOption.PROJECT_FILE;
import static org.pitest.mutationtest.config.ConfigOption.REPORT_DIR;
import static org.pitest.mutationtest.config.ConfigOption.SOURCE_DIR;
import static org.pitest.mutationtest.config.ConfigOption.TARGET_CLASSES;
import static org.pitest.mutationtest.config.ConfigOption.TEST_FILTER;
import static org.pitest.mutationtest.config.ConfigOption.THREADS;
import static org.pitest.mutationtest.config.ConfigOption.TIMEOUT_CONST;
import static org.pitest.mutationtest.config.ConfigOption.TIMEOUT_FACTOR;
import static org.pitest.mutationtest.config.ConfigOption.TIME_STAMPED_REPORTS;
import static org.pitest.mutationtest.config.ConfigOption.USE_INLINED_CODE_DETECTION;
import static org.pitest.mutationtest.config.ConfigOption.VERBOSE;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import joptsimple.ArgumentAcceptingOptionSpec;
import joptsimple.OptionException;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import joptsimple.OptionSpecBuilder;
import org.pitest.classpath.ClassPath;
import org.pitest.classpath.ClassPathByteArraySource;
import org.pitest.functional.FArray;
import org.pitest.functional.FCollection;
import org.pitest.functional.predicate.Predicate;
import org.pitest.mutationtest.config.ConfigOption;
import org.pitest.mutationtest.config.ConfigurationFactory;
import org.pitest.mutationtest.config.ReportOptions;
import org.pitest.testapi.TestGroupConfig;
import org.pitest.project.ProjectConfigurationException;
import org.pitest.project.ProjectConfigurationParser;
import org.pitest.project.ProjectConfigurationParserException;
import org.pitest.project.ProjectConfigurationParserFactory;
import org.pitest.util.Glob;
import org.pitest.util.Unchecked;
public class OptionsParser {
private final Predicate<String> dependencyFilter;
private final OptionParser parser;
private final ArgumentAcceptingOptionSpec<String> reportDirSpec;
private final OptionSpec<String> targetClassesSpec;
private final OptionSpec<String> targetTestsSpec;
private final OptionSpec<String> avoidCallsSpec;
private final OptionSpec<Integer> depth;
private final OptionSpec<Integer> threadsSpec;
private final OptionSpec<File> sourceDirSpec;
private final OptionSpec<File> historyOutputSpec;
private final OptionSpec<File> historyInputSpec;
private final OptionSpec<String> mutators;
private final OptionSpec<String> jvmArgs;
private final ArgumentAcceptingOptionSpec<Boolean> mutateStatics;
private final OptionSpec<Float> timeoutFactorSpec;
private final OptionSpec<Long> timeoutConstSpec;
private final OptionSpec<String> excludedMethodsSpec;
private final OptionSpec<Integer> maxMutationsPerClassSpec;
private final ArgumentAcceptingOptionSpec<Boolean> verboseSpec;
private final OptionSpec<String> excludedClassesSpec;
private final OptionSpec<String> outputFormatSpec;
private final OptionSpec<String> projectFileSpec;
private final OptionSpec<String> additionalClassPathSpec;
private final ArgumentAcceptingOptionSpec<Boolean> failWhenNoMutations;
private final ArgumentAcceptingOptionSpec<String> codePaths;
private final OptionSpec<String> excludedGroupsSpec;
private final OptionSpec<String> includedGroupsSpec;
private final OptionSpec<Integer> mutationUnitSizeSpec;
private final ArgumentAcceptingOptionSpec<Boolean> timestampedReportsSpec;
private final ArgumentAcceptingOptionSpec<Boolean> detectInlinedCode;
private final ArgumentAcceptingOptionSpec<Integer> mutationThreshHoldSpec;
private final ArgumentAcceptingOptionSpec<Integer> coverageThreshHoldSpec;
private final OptionSpec<String> mutationEngine;
private final ArgumentAcceptingOptionSpec<Boolean> exportLineCoverageSpec;
private final OptionSpec<String> javaExecutable;
private final ArgumentAcceptingOptionSpec<Boolean> includeLaunchClasspathSpec;
public OptionsParser(Predicate<String> dependencyFilter) {
this.dependencyFilter = dependencyFilter;
this.parser = new OptionParser();
this.parser.acceptsAll(Arrays.asList("h", "?"), "show help");
this.reportDirSpec = parserAccepts(REPORT_DIR).withRequiredArg()
.describedAs("directory to create report folder in").required();
this.projectFileSpec = parserAccepts(PROJECT_FILE).withRequiredArg()
.ofType(String.class)
.describedAs("The name of the config file to use.");
this.targetClassesSpec = parserAccepts(TARGET_CLASSES)
.withRequiredArg()
.ofType(String.class)
.withValuesSeparatedBy(',')
.describedAs(
"comma separated list of filters to match against classes to test")
.required();
this.avoidCallsSpec = parserAccepts(AVOID_CALLS)
.withRequiredArg()
.ofType(String.class)
.withValuesSeparatedBy(',')
.describedAs(
"comma separated list of packages to consider as untouchable logging calls");
this.targetTestsSpec = parserAccepts(TEST_FILTER)
.withRequiredArg()
.ofType(String.class)
.withValuesSeparatedBy(',')
.describedAs(
"comma separated list of filters to match against tests to run");
this.depth = parserAccepts(DEPENDENCY_DISTANCE).withRequiredArg()
.ofType(Integer.class)
.defaultsTo(DEPENDENCY_DISTANCE.getDefault(Integer.class))
.describedAs("maximum distance to look from test for covered classes");
this.threadsSpec = parserAccepts(THREADS).withRequiredArg()
.ofType(Integer.class).defaultsTo(THREADS.getDefault(Integer.class))
.describedAs("number of threads to use for testing");
this.maxMutationsPerClassSpec = parserAccepts(MAX_MUTATIONS_PER_CLASS)
.withRequiredArg().ofType(Integer.class)
.defaultsTo(MAX_MUTATIONS_PER_CLASS.getDefault(Integer.class))
.describedAs("max number of mutations to allow for each class");
this.sourceDirSpec = parserAccepts(SOURCE_DIR).withRequiredArg()
.ofType(File.class).withValuesSeparatedBy(',')
.describedAs("comma separated list of source directories").required();
this.mutators = parserAccepts(MUTATIONS).withRequiredArg()
.ofType(String.class).withValuesSeparatedBy(',')
.describedAs("comma separated list of mutation operators");
this.jvmArgs = parserAccepts(CHILD_JVM).withRequiredArg()
.withValuesSeparatedBy(',')
.describedAs("comma separated list of child JVM args");
this.mutateStatics = parserAccepts(MUTATE_STATIC_INITIALIZERS)
.withOptionalArg()
.ofType(Boolean.class)
.defaultsTo(true)
.describedAs(
"whether or not to generate mutations in static initializers");
this.detectInlinedCode = parserAccepts(USE_INLINED_CODE_DETECTION)
.withOptionalArg()
.ofType(Boolean.class)
.defaultsTo(true)
.describedAs(
"whether or not to try and detect code inlined from finally blocks");
this.timestampedReportsSpec = parserAccepts(TIME_STAMPED_REPORTS)
.withOptionalArg().ofType(Boolean.class).defaultsTo(true)
.describedAs("whether or not to generated timestamped directories");
this.timeoutFactorSpec = parserAccepts(TIMEOUT_FACTOR).withOptionalArg()
.ofType(Float.class)
.describedAs("factor to apply to calculate maximum test duration")
.defaultsTo(TIMEOUT_FACTOR.getDefault(Float.class));
this.timeoutConstSpec = parserAccepts(TIMEOUT_CONST).withOptionalArg()
.ofType(Long.class)
.describedAs("constant to apply to calculate maximum test duration")
.defaultsTo(TIMEOUT_CONST.getDefault(Long.class));
this.excludedMethodsSpec = parserAccepts(EXCLUDED_METHOD)
.withRequiredArg()
.ofType(String.class)
.withValuesSeparatedBy(',')
.describedAs(
"comma separated list of filters to match against methods to exclude from mutation analysis");
this.excludedClassesSpec = parserAccepts(EXCLUDED_CLASSES)
.withRequiredArg()
.ofType(String.class)
.withValuesSeparatedBy(',')
.describedAs(
"comma separated list of globs for classes to exclude when looking for both mutation target and tests");
this.verboseSpec = parserAccepts(VERBOSE).withOptionalArg()
.ofType(Boolean.class).defaultsTo(true)
.describedAs("whether or not to generate verbose output");
this.exportLineCoverageSpec = parserAccepts(EXPORT_LINE_COVERAGE)
.withOptionalArg()
.ofType(Boolean.class)
.defaultsTo(true)
.describedAs(
"whether or not to dump per test line coverage data to disk");
this.includeLaunchClasspathSpec = parserAccepts(INCLUDE_LAUNCH_CLASSPATH)
.withOptionalArg().ofType(Boolean.class).defaultsTo(true)
.describedAs("whether or not to analyse launch classpath");
this.outputFormatSpec = parserAccepts(OUTPUT_FORMATS)
.withRequiredArg()
.ofType(String.class)
.withValuesSeparatedBy(',')
.describedAs(
"comma separated list of listeners to receive mutation results")
.defaultsTo("HTML");
this.additionalClassPathSpec = parserAccepts(CLASSPATH).withRequiredArg()
.ofType(String.class).withValuesSeparatedBy(',')
.describedAs("coma separated list of additional classpath elements");
this.failWhenNoMutations = parserAccepts(FAIL_WHEN_NOT_MUTATIONS)
.withOptionalArg().ofType(Boolean.class).defaultsTo(true)
.describedAs("whether to throw error if no mutations found");
this.codePaths = parserAccepts(CODE_PATHS)
.withRequiredArg()
.ofType(String.class)
.withValuesSeparatedBy(',')
.describedAs(
"Globs identifying classpath roots containing mutable code");
this.includedGroupsSpec = parserAccepts(INCLUDED_GROUPS).withRequiredArg()
.ofType(String.class).withValuesSeparatedBy(',')
.describedAs("TestNG groups/JUnit categories to include");
this.excludedGroupsSpec = parserAccepts(EXCLUDED_GROUPS).withRequiredArg()
.ofType(String.class).withValuesSeparatedBy(',')
.describedAs("TestNG groups/JUnit categories to include");
this.mutationUnitSizeSpec = parserAccepts(MUTATION_UNIT_SIZE)
.withRequiredArg()
.ofType(Integer.class)
.describedAs(
"Maximum number of mutations to include within a single unit of analysis")
.defaultsTo(MUTATION_UNIT_SIZE.getDefault(Integer.class));
this.historyInputSpec = parserAccepts(HISTORY_INPUT_LOCATION)
.withRequiredArg().ofType(File.class)
.describedAs("File to read history from for incremental analysis");
this.historyOutputSpec = parserAccepts(HISTORY_OUTPUT_LOCATION)
.withRequiredArg().ofType(File.class)
.describedAs("File to write history to for incremental analysis");
this.mutationThreshHoldSpec = parserAccepts(MUTATION_THRESHOLD)
.withRequiredArg().ofType(Integer.class)
.describedAs("Mutation score below which to throw an error")
.defaultsTo(MUTATION_THRESHOLD.getDefault(Integer.class));
this.coverageThreshHoldSpec = parserAccepts(COVERAGE_THRESHOLD)
.withRequiredArg().ofType(Integer.class)
.describedAs("Line coverage below which to throw an error")
.defaultsTo(COVERAGE_THRESHOLD.getDefault(Integer.class));
this.mutationEngine = parserAccepts(MUTATION_ENGINE).withRequiredArg()
.ofType(String.class).describedAs("mutation engine to use")
.defaultsTo(MUTATION_ENGINE.getDefault(String.class));
this.javaExecutable = parserAccepts(JVM_PATH).withRequiredArg()
.ofType(String.class).describedAs("path to java executable");
}
private OptionSpecBuilder parserAccepts(final ConfigOption option) {
return this.parser.accepts(option.getParamName());
}
public ParseResult parse(final String[] args) {
final ReportOptions data = new ReportOptions();
try {
final OptionSet userArgs = this.parser.parse(args);
if (userArgs.has(this.projectFileSpec)) {
return loadProjectFile(userArgs);
} else {
return parseCommandLine(data, userArgs);
}
} catch (final OptionException uoe) {
return new ParseResult(data, uoe.getLocalizedMessage());
}
}
/**
* Creates a new ParseResult object using the command line arguments.
*
* @param data
* the ReportOptions to populate.
* @param userArgs
* the OptionSet which contains the command line arguments.
* @return a new ParseResult, correctly configured using the command line
* arguments.
*/
private ParseResult parseCommandLine(final ReportOptions data,
final OptionSet userArgs) {
data.setReportDir(userArgs.valueOf(this.reportDirSpec));
data.setTargetClasses(FCollection.map(
this.targetClassesSpec.values(userArgs), Glob.toGlobPredicate()));
data.setTargetTests(FCollection.map(this.targetTestsSpec.values(userArgs),
Glob.toGlobPredicate()));
data.setSourceDirs(this.sourceDirSpec.values(userArgs));
data.setMutators(this.mutators.values(userArgs));
data.setDependencyAnalysisMaxDistance(this.depth.value(userArgs));
data.addChildJVMArgs(this.jvmArgs.values(userArgs));
data.setMutateStaticInitializers(userArgs.has(this.mutateStatics)
&& userArgs.valueOf(this.mutateStatics));
data.setDetectInlinedCode(userArgs.has(this.detectInlinedCode)
&& userArgs.valueOf(this.detectInlinedCode));
data.setIncludeLaunchClasspath(userArgs
.valueOf(this.includeLaunchClasspathSpec));
data.setShouldCreateTimestampedReports(userArgs
.valueOf(this.timestampedReportsSpec));
data.setNumberOfThreads(this.threadsSpec.value(userArgs));
data.setTimeoutFactor(this.timeoutFactorSpec.value(userArgs));
data.setTimeoutConstant(this.timeoutConstSpec.value(userArgs));
data.setLoggingClasses(this.avoidCallsSpec.values(userArgs));
data.setExcludedMethods(FCollection.map(
this.excludedMethodsSpec.values(userArgs), Glob.toGlobPredicate()));
data.setExcludedClasses(FCollection.map(
this.excludedClassesSpec.values(userArgs), Glob.toGlobPredicate()));
data.setMaxMutationsPerClass(this.maxMutationsPerClassSpec.value(userArgs));
data.setVerbose(userArgs.has(this.verboseSpec)
&& userArgs.valueOf(this.verboseSpec));
data.addOutputFormats(this.outputFormatSpec.values(userArgs));
data.setFailWhenNoMutations(this.failWhenNoMutations.value(userArgs));
data.setCodePaths(this.codePaths.values(userArgs));
data.setMutationUnitSize(this.mutationUnitSizeSpec.value(userArgs));
data.setHistoryInputLocation(this.historyInputSpec.value(userArgs));
data.setHistoryOutputLocation(this.historyOutputSpec.value(userArgs));
data.setMutationThreshold(this.mutationThreshHoldSpec.value(userArgs));
data.setCoverageThreshold(this.coverageThreshHoldSpec.value(userArgs));
data.setMutationEngine(this.mutationEngine.value(userArgs));
data.setExportLineCoverage(userArgs.has(this.exportLineCoverageSpec)
&& userArgs.valueOf(this.exportLineCoverageSpec));
setClassPath(userArgs, data);
setTestConfiguration(userArgs, data);
data.setJavaExecutable(this.javaExecutable.value(userArgs));
if (userArgs.has("?")) {
return new ParseResult(data, "See above for supported parameters.");
} else {
return new ParseResult(data, null);
}
}
private void setClassPath(final OptionSet userArgs, final ReportOptions data) {
final List<String> elements = new ArrayList<String>();
if (data.isIncludeLaunchClasspath()) {
elements.addAll(Arrays.asList(ClassPath.getClassPathElements()));
} else {
elements.addAll(FArray.filter(ClassPath.getClassPathElements(),
dependencyFilter));
}
elements.addAll(userArgs.valuesOf(this.additionalClassPathSpec));
data.setClassPathElements(elements);
}
private void setTestConfiguration(final OptionSet userArgs,
final ReportOptions data) {
final TestGroupConfig conf = new TestGroupConfig(
this.excludedGroupsSpec.values(userArgs),
this.includedGroupsSpec.values(userArgs));
final ConfigurationFactory configFactory = new ConfigurationFactory(conf,
new ClassPathByteArraySource(data.getClassPath()));
data.setGroupConfig(conf);
data.setConfiguration(configFactory.createConfiguration());
}
/**
* Creates a new ParseResult object, using the project file specified by the
* user on the command line.
*
* @param userArgs
* the OptionSet that contains all of the command line arguments.
* @return a correctly instantiated ParseResult using the project file to load
* arguments.
*/
private ParseResult loadProjectFile(final OptionSet userArgs) {
try {
final ProjectConfigurationParser configParser = ProjectConfigurationParserFactory
.createParser();
final ReportOptions loaded = configParser.loadProject(userArgs
.valueOf(this.projectFileSpec));
return new ParseResult(loaded, null);
} catch (final ProjectConfigurationParserException e) {
return new ParseResult(new ReportOptions(), "Project File ERROR: "
+ e.getMessage() + ".");
} catch (final ProjectConfigurationException e) {
return new ParseResult(new ReportOptions(), "Project File ERROR: "
+ e.getMessage() + ".");
}
}
public void printHelp() {
try {
this.parser.printHelpOn(System.out);
} catch (final IOException ex) {
throw Unchecked.translateCheckedException(ex);
}
}
}
| |
package com.ctrip.hermes.example.common;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.unidal.lookup.util.StringUtils;
import com.google.common.collect.ImmutableMap;
public class Configuration {
private static final Logger LOGGER = LoggerFactory.getLogger(Configuration.class);
private static List<String> defaultResources = new CopyOnWriteArrayList();
private static ClassLoader classLoader;
private static Map<String, String> configMap = new HashMap();
private static ArrayList<String> configFiles = new ArrayList();
public static Map<String, String> getAllConfig() {
return ImmutableMap.copyOf(configMap);
}
public static void addResource(String name) {
loadConfig(name);
}
public static synchronized void addDefaultResource(String name) {
if (!defaultResources.contains(name))
defaultResources.add(name);
}
private static void loadDefaultConfig() {
for (String defaultResource : defaultResources)
loadConfig(defaultResource);
}
private static void loadConfig(String confFile) {
Properties props = new Properties();
InputStream in = null;
try {
URL url = classLoader.getResource(confFile);
if (url == null) {
return;
}
in = url.openStream();
props.load(in);
Enumeration en = props.propertyNames();
while (en.hasMoreElements()) {
String key = (String) en.nextElement();
configMap.put(key, props.getProperty(key));
}
configFiles.add(confFile);
} catch (Exception e) {
LOGGER.warn("Cannot load configuration from file <" + confFile + ">", e);
} finally {
if (in != null)
try {
in.close();
} catch (IOException ignored) {
}
}
}
public static String get(String name) {
return getTrimmed(name);
}
public static String get(String name, String defaultValue) {
String result = get(name);
if (result == null) {
result = defaultValue;
}
return result;
}
private static String getTrimmed(String name) {
String value = (String) configMap.get(name);
if (null == value) {
return null;
}
return value.trim();
}
public static int getInt(String name, int defaultValue) {
String valueString = get(name);
if (valueString == null) {
return defaultValue;
}
return Integer.parseInt(valueString);
}
public static int getInt(String name) {
return Integer.parseInt(get(name));
}
public static boolean getBoolean(String name, boolean defaultValue) {
String valueString = get(name);
if (valueString == null) {
return defaultValue;
}
return Boolean.valueOf(valueString).booleanValue();
}
public static boolean getBoolean(String name) {
return Boolean.valueOf(get(name)).booleanValue();
}
public static String[] getStrings(String name) {
String valueString = get(name);
return getTrimmedStrings(valueString);
}
private static String[] getTrimmedStrings(String str) {
String[] emptyStringArray = new String[0];
if ((null == str) || ("".equals(str.trim()))) {
return emptyStringArray;
}
return str.trim().split("\\s*,\\s*");
}
public static Class<?> getClass(String name) throws ClassNotFoundException {
String valueString = getTrimmed(name);
if (valueString == null) {
throw new ClassNotFoundException("Class " + name + " not found");
}
return Class.forName(valueString, true, classLoader);
}
public static Class<?>[] getClasses(String name) throws ClassNotFoundException {
String[] classNames = getStrings(name);
if (classNames == null) {
return null;
}
Class[] classes = new Class[classNames.length];
for (int i = 0; i < classNames.length; i++) {
classes[i] = getClass(classNames[i]);
}
return classes;
}
public static void dumpDeprecatedKeys() {
for (String key : configMap.keySet())
System.out.println(key + "=" + (String) configMap.get(key));
}
public static void set(String key, String value) {
if (StringUtils.isEmpty(key)) {
throw new IllegalArgumentException("Key [" + key + "] is blank, invalid");
}
if (StringUtils.isEmpty(value)) {
throw new IllegalArgumentException("Value [" + value + "] is blank, invalid");
}
configMap.put(key, value);
}
public static void set(String key, String value, boolean isWriterToFile) {
if (isWriterToFile) {
String fileToWrite = null;
if (configFiles.size() == 1) {
fileToWrite = (String) configFiles.get(0);
} else {
for (String config : configFiles) {
if (!config.contains("default")) {
fileToWrite = config;
}
}
}
InputStream in = null;
Properties props = new Properties();
try {
URL url = classLoader.getResource(fileToWrite);
in = url.openStream();
props.load(in);
props.put(key, value);
props.store(new FileOutputStream(new File(url.toURI())), null);
in.close();
} catch (IOException ioe) {
LOGGER.error("Cannot Write configuration TO <" + fileToWrite + ">", ioe);
} catch (URISyntaxException e) {
LOGGER.error("Cannot Write configuration TO <" + fileToWrite + ">: URISyntaxException", e);
}
}
set(key, value);
}
public static String getPropOrConfig(String key, String defaultValue) {
String value = System.getProperty(key);
if (StringUtils.isEmpty(value)) {
value = get(key);
if (StringUtils.isEmpty(value)) {
return defaultValue;
}
}
return value;
}
static {
classLoader = Thread.currentThread().getContextClassLoader();
if (classLoader == null) {
classLoader = Configuration.class.getClassLoader();
}
loadDefaultConfig();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.