gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* DistAckMapMethodsDUnitTest.java
*
* Created on August 4, 2005, 12:36 PM
*/
package org.apache.geode.cache30;
import static org.junit.Assert.*;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.CacheListener;
import org.apache.geode.cache.CacheWriter;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.RegionDestroyedException;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.util.CacheListenerAdapter;
import org.apache.geode.cache.util.CacheWriterAdapter;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.Invoke;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.junit.categories.DistributedTest;
@Category(DistributedTest.class)
public class DistAckMapMethodsDUnitTest extends JUnit4DistributedTestCase { // TODO: reformat
static Cache cache;
static Properties props = new Properties();
static DistributedSystem ds = null;
static Region region;
static Region mirroredRegion;
static Region remRegion;
static boolean afterDestroy = false;
// helper class referece objects
static Object afterDestroyObj;
@Override
public final void postSetUp() throws Exception {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
vm0.invoke(() -> DistAckMapMethodsDUnitTest.createCache());
vm1.invoke(() -> DistAckMapMethodsDUnitTest.createCache());
}
@Override
public final void preTearDown() throws Exception {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
vm0.invoke(() -> DistAckMapMethodsDUnitTest.closeCache());
vm1.invoke(() -> DistAckMapMethodsDUnitTest.closeCache());
cache = null;
Invoke.invokeInEveryVM(new SerializableRunnable() {
public void run() {
cache = null;
}
});
}
public static void createCache() {
try {
// props.setProperty(DistributionConfig.SystemConfigurationProperties.MCAST_PORT, "1234");
// ds = DistributedSystem.connect(props);
ds = (new DistAckMapMethodsDUnitTest()).getSystem(props);
cache = CacheFactory.create(ds);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
RegionAttributes attr = factory.create();
region = cache.createRegion("map", attr);
} catch (Exception ex) {
ex.printStackTrace();
}
}
public static void closeCache() {
try {
cache.close();
ds.disconnect();
} catch (Exception ex) {
ex.printStackTrace();
}
}
public static void createMirroredRegion() {
try {
AttributesFactory factory1 = new AttributesFactory();
factory1.setScope(Scope.DISTRIBUTED_ACK);
factory1.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attr1 = factory1.create();
mirroredRegion = cache.createRegion("mirrored", attr1);
} catch (Exception ex) {
ex.printStackTrace();
}
}
public static void createRegionToTestRemove() {
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
CacheWriter cacheWriter = new RemoveCacheWriter();
CacheListener cacheListener = new RemoveCacheListener();
factory2.setCacheWriter(cacheWriter);
factory2.setCacheListener(cacheListener);
RegionAttributes attr2 = factory2.create();
remRegion = cache.createRegion("remove", attr2);
} catch (Exception ex) {
ex.printStackTrace();
}
}
// testMethods
@Test
public void testPutMethod() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
Object obj1;
// put from one and get from other
int i = 1;
Object[] objArr = new Object[1];
objArr[0] = "" + i;
// Integer in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
obj1 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);
if (obj1 == null) {
fail("region.put(key, value) from one vm does not match with region.get(key) from other vm");
}
// put from both vms for same key
i = 2;
objArr[0] = "" + i;
// in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
obj1 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
if (obj1 != null) {// here if some dummy object is returned on first time put then that should
// be checked
fail("failed while region.put from both vms for same key");
}
}
@Test
public void testRemoveMethod() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
Object obj1, obj2;
boolean ret;
// put from one and get from other
int i = 1;
Object objArr[] = new Object[1];
objArr[0] = "" + i;
// Integer in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
vm0.invoke(DistAckMapMethodsDUnitTest.class, "removeMethod", objArr);
// validate if vm0 has that key value entry
ret = vm0.invoke(() -> containsKeyMethod("" + i));
if (ret) {// if returned true means that the key is still there
fail("region.remove failed with distributed ack scope");
}
// test if the correct value is returned
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
obj1 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);// to make sure that
// vm1 region has the
// entry
obj2 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "removeMethod", objArr);
LogWriterUtils.getLogWriter().fine("111111111" + obj1);
LogWriterUtils.getLogWriter().fine("2222222222" + obj2);
if (obj1 == null)
fail("region1.getMethod returned null");
if (!(obj1.equals(obj2))) {
fail("region.remove failed with distributed ack scope");
}
}
@Test
public void testRemoveMethodDetails() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
vm0.invoke(() -> DistAckMapMethodsDUnitTest.createRegionToTestRemove());
vm1.invoke(() -> DistAckMapMethodsDUnitTest.createRegionToTestRemove());
vm0.invoke(() -> DistAckMapMethodsDUnitTest.removeMethodDetails());
vm1.invoke(new CacheSerializableRunnable("testRemoveMethodDetails") {
public void run2() throws CacheException {
Object ob1 = remRegion.get(new Integer(1));
assertEquals("beforeDestroy", ob1.toString());
// wait till listeber switches afterDestroy to true
// while(!afterDestroy){
// //wait
// }
assertEquals("afterDestroy", remRegion.get(new Integer(3)).toString());
}
});
}// end of testRemoveMethodDetails
@Test
public void testIsEmptyMethod() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
// boolean ret;
// put from one and get from other
int i = 1;
Object objArr[] = new Object[1];
objArr[0] = "" + i;
// Integer in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
boolean val = vm1.invoke(() -> DistAckMapMethodsDUnitTest.isEmptyMethod());
if (!val) {// val should be true
fail("Failed in region.isEmpty");
}
vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);
boolean val1 = vm1.invoke(() -> DistAckMapMethodsDUnitTest.isEmptyMethod());
if (val1) {
fail("Failed in region.isEmpty");
}
}
@Test
public void testContainsValueMethod() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
// boolean ret;
// put from one and get from other
int i = 1;
Object objArr[] = new Object[1];
objArr[0] = "" + i;
// Integer in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
boolean val = vm1.invoke(() -> containsValueMethod("first"));
if (val) {// val should be false.
fail("Failed in region.ContainsValue");
}
vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);
boolean val1 = vm1.invoke(() -> containsValueMethod("first"));
if (!val1) {// val1 should be true.
fail("Failed in region.ContainsValue");
}
}
@Test
public void testKeySetMethod() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
int i = 1;
Object objArr[] = new Object[1];
objArr[0] = "" + i;
// Integer in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
int temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.keySetMethod());
if (temp != 0) {
fail("failed in keySetMethodtest method");
}
vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);// to make sure that vm1
// region has the entry
temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.keySetMethod());
if (temp == 0) {
fail("failed in keySetMethodtest method");
}
// in the above scenarion we can test this for mirrorred region scenarion as well
temp = 0;
vm0.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
vm1.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.keySetMethod());
if (temp == 0) {
fail("failed in keySetMethodtest method");
}
}
@Test
public void testEntrySetMethod() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
int i = 1;
Object objArr[] = new Object[1];
objArr[0] = "" + i;
// Integer in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
int temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.entrySetMethod());
if (temp != 0) {
fail("failed in entrySetMethodtest method");
}
vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);// to make sure that vm1
// region has the entry
temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.entrySetMethod());
if (temp == 0) {
fail("failed in entrySetMethodtest method");
}
// in the above scenarion we can test this for mirrorred region scenarion as well
temp = 0;
vm0.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
vm1.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putOnMirroredRegion", objArr);
temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.entrySetMethod());
if (temp == 0) {
fail("failed in entrySetMethodtest method");
}
}
@Test
public void testSizeMethod() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
int i = 1, j = 0;
Object objArr[] = new Object[1];
objArr[0] = "" + i;
// Integer in = new Integer(i);
// objArr[0] = (Object) in;
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
j = vm1.invoke(() -> DistAckMapMethodsDUnitTest.sizeMethod());
if (j != 0) {
fail("failed in region.size method");
}
vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);// to make sure that vm1
// region has the entry
j = vm1.invoke(() -> DistAckMapMethodsDUnitTest.sizeMethod());
if (j == 0) {
fail("failed in region.size method");
}
}
@Test
public void testallMethodsArgs() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
vm0.invoke(() -> DistAckMapMethodsDUnitTest.allMethodsArgs());
}
// following is the implementation of the methods of Map to use in dunit test cases.
/*
*
*
*/
public static Object putMethod(Object ob) {
Object obj = null;
try {
if (ob != null) {
String str = "first";
obj = region.put(ob, str);
}
} catch (Exception ex) {
fail("Failed while region.put");
}
return obj;
}
public static Object getMethod(Object ob) {
Object obj = null;
try {
obj = region.get(ob);
} catch (Exception ex) {
fail("Failed while region.get");
}
return obj;
}
public static Object removeMethod(Object ob) {
Object obj = null;
try {
obj = region.remove(ob);
} catch (Exception ex) {
ex.printStackTrace();
fail("Failed while region.remove");
}
return obj;
}
public static boolean containsKeyMethod(Object ob) {
boolean flag = false;
try {
flag = region.containsKey(ob);
} catch (Exception ex) {
fail("Failed while region.containsKey");
}
return flag;
}
public static boolean isEmptyMethod() {
boolean flag = false;
try {
flag = region.isEmpty();
} catch (Exception ex) {
fail("Failed while region.isEmpty");
}
return flag;
}
public static boolean containsValueMethod(Object ob) {
boolean flag = false;
try {
flag = region.containsValue(ob);
} catch (Exception ex) {
fail("Failed while region.containsValueMethod");
}
return flag;
}
public static int keySetMethod() {
Set set = new HashSet();
int i = 0;
try {
set = region.keySet();
i = set.size();
} catch (Exception ex) {
ex.printStackTrace();
fail("Failed while region.keySet");
}
return i;
}
public static int entrySetMethod() {
Set set = new HashSet();
int i = 0;
try {
set = region.entrySet();
i = set.size();
} catch (Exception ex) {
ex.printStackTrace();
fail("Failed while region.entrySet");
}
return i;
}
public static int sizeMethod() {
int i = 0;
try {
i = region.size();
} catch (Exception ex) {
fail("Failed while region.size");
}
return i;
}
// following are methods for put on and get from mirrored regions
public static Object putOnMirroredRegion(Object ob) {
Object obj = null;
try {
String str = "mirror";
obj = mirroredRegion.put(ob, str);
} catch (Exception ex) {
ex.printStackTrace();
fail("Failed while mirroredRegion.put");
}
return obj;
}
public static Object getFromMirroredRegion(Object ob) {
Object obj = null;
try {
obj = mirroredRegion.get(ob);
} catch (Exception ex) {
fail("Failed while mirroredRegion.get");
}
return obj;
}
public static void removeMethodDetails() {
Object ob1;
// Object ob2;
Integer inOb1 = new Integer(1);
try {
region.put(inOb1, "first");
ob1 = region.remove(inOb1);
assertEquals("first", ob1.toString());
} catch (Exception ex) {
ex.printStackTrace();
}
// to test EntryNotFoundException
try {
region.remove(new Integer(2));
// fail("Should have thrown EntryNotFoundException");
} // catch (EntryNotFoundException e){
catch (Exception e) {
// pass
// e.printStackTrace();
}
// to test NullPointerException
try {
Integer inOb2 = new Integer(2);
region.put(inOb2, "second");
inOb2 = null;
region.remove(inOb2);
fail("Should have thrown NullPointerException ");
} catch (NullPointerException e) {
// pass
}
// to test the cache writers and listeners
try {
// createRegionToTestRemove();
Integer inOb2 = new Integer(2);
remRegion.put(inOb2, "second");
remRegion.remove(inOb2);
// to test cacheWriter
inOb2 = new Integer(1);
assertEquals("beforeDestroy", remRegion.get(inOb2).toString());
// wait till listeber switches afterDestroy to true
while (!afterDestroy) {
}
// to test cacheListener
inOb2 = new Integer(3);
assertEquals("afterDestroy", remRegion.get(inOb2).toString());
// verify that entryEventvalue is correct for listener
assertNotNull(afterDestroyObj);
} catch (Exception ex) {
ex.printStackTrace();
}
}// end of removeMethodDetail
public static void allMethodsArgs() {
// testing args for put method
try {
region.put(new Integer(1), new String("first"));
region.put(new Integer(2), new String("second"));
region.put(new Integer(3), new String("third"));
// test args for get method
Object ob1 = region.get(new Integer(1));
assertEquals("first", ob1.toString());
// test args for containsKey method
boolean val1 = region.containsKey(new Integer(2));
assertEquals(true, val1);
// test args for containsKey method
boolean val2 = region.containsValue(new String("second"));
// assertIndexDetailsEquals(true, val2);
// test args for remove method
try {
region.remove(new Integer(3));
} // catch (EntryNotFoundException ex){
catch (Exception ex) {
ex.printStackTrace();
fail("failed while region.remove(new Object())");
}
// verifying the correct exceptions are thrown by the methods
Object key = null, value = null;
// testing put method
try {
region.put(key, value);
fail("should have thrown NullPointerException");
} catch (NullPointerException iex) {
// pass
}
// testing containsValue method
try {
region.containsValue(value);
fail("should have thrown NullPointerException");
} catch (NullPointerException iex) {
// pass
}
// RegionDestroyedException
key = new Integer(5);
value = new String("fifth");
region.localDestroyRegion();
// test put method
try {
region.put(key, value);
fail("should have thrown RegionDestroyedException");
} catch (RegionDestroyedException iex) {
// pass
}
// test remove method
try {
region.remove(key);
fail("should have thrown RegionDestroyedException");
} catch (RegionDestroyedException iex) {
// pass
}
// test containsValue method
try {
region.containsValue(value);
fail("should have thrown RegionDestroyedException");
} catch (RegionDestroyedException iex) {
// pass
}
// test size method
try {
region.size();
fail("should have thrown RegionDestroyedException");
} catch (RegionDestroyedException iex) {
// pass
}
// test keySet method
try {
region.keySet();
fail("should have thrown RegionDestroyedException");
} catch (RegionDestroyedException iex) {
// pass
}
// test entrySet method
try {
region.entrySet();
fail("should have thrown RegionDestroyedException");
} catch (RegionDestroyedException iex) {
// pass
}
} catch (Exception ex) {
ex.printStackTrace();
}
}// end of allMethodsArgs
// helper classes
static class RemoveCacheWriter extends CacheWriterAdapter {
public void beforeDestroy(EntryEvent entryEvent)
throws org.apache.geode.cache.CacheWriterException {
Integer o1 = new Integer(1);
remRegion.put(o1, "beforeDestroy");
}
}// end of RemoveCacheWriter
static class RemoveCacheListener extends CacheListenerAdapter {
public void afterDestroy(EntryEvent entryEvent)
throws org.apache.geode.cache.CacheWriterException {
Integer o1 = new Integer(3);
remRegion.put(o1, "afterDestroy");
afterDestroyObj = entryEvent.getKey();
// to continue main thread where region.remove has actually occurred
afterDestroy = true;
}
}// end of RemoveCacheListener
}// end of class
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.pcep.cli;
import org.apache.karaf.shell.commands.Argument;
import org.apache.karaf.shell.commands.Command;
import org.onosproject.cli.AbstractShellCommand;
import org.onosproject.pcep.controller.PcepClientController;
import org.onosproject.pcep.controller.PcepErrorDetail;
import org.onosproject.pcep.controller.PcepErrorType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.util.TreeMap;
@Command(scope = "onos", name = "pcep", description = "Pcep Session Info")
public class PcepSessionCommand extends AbstractShellCommand {
private static final Logger log = LoggerFactory.getLogger(PcepSessionCommand.class);
private static final String SESSION = "session";
private static final String EXCEPTION = "exception";
private static final String ERROR = "error";
private PcepClientController pcepClientController;
private byte sessionId;
private Set<String> pcepSessionKeySet;
private Set<String> pcepSessionIdKeySet;
private Integer sessionIdValue = 0;
private String sessionStatus;
private List pcepSessionExceptions = new ArrayList();
private Set<String> pcepSessionFailurekeySet;
private PcepErrorDetail pcepErrorDetail;
private PcepErrorType pcepErrorType;
private Map<Integer, String> sessionEstablishmentFailureMap = new TreeMap<>();
private Map<Integer, String> unknownObjectMap = new TreeMap<>();
private Map<Integer, String> notSupportedObjectMap = new TreeMap<>();
private Map<Integer, String> policyViolationMap = new TreeMap<>();
private Map<Integer, String> mandatoryObjectMissingMap = new TreeMap<>();
private Map<Integer, String> receptionOfInvalidObjectMap = new TreeMap<>();
private Map<Integer, String> invalidOperationMap = new TreeMap<>();
private Set<Integer> pcepErrorMsgKey;
private Integer pcepErrorValue = 0;
@Argument(index = 0, name = "name",
description = "session" + "\n" + "exception" + "\n" + "error",
required = true, multiValued = false)
String name = null;
@Argument(index = 1, name = "peer",
description = "peerIp",
required = false, multiValued = false)
String peer = null;
@Override
protected void execute() {
switch (name) {
case SESSION:
displayPcepSession();
break;
case EXCEPTION:
displayPcepSessionFailureReason();
break;
case ERROR:
displayPcepErrorMsgs();
break;
default:
System.out.print("Unknown Command");
break;
}
}
private void displayPcepSession() {
try {
this.pcepClientController = get(PcepClientController.class);
Map<String, String> pcepSessionMap = pcepClientController.getPcepSessionMap();
Map<String, Byte> pcepSessionIdMap = pcepClientController.getPcepSessionIdMap();
pcepSessionKeySet = pcepSessionMap.keySet();
pcepSessionIdKeySet = pcepSessionIdMap.keySet();
if (peer != null) {
if (pcepSessionKeySet.size() > 0) {
if (pcepSessionKeySet.contains(peer)) {
for (String pcepSessionPeer : pcepSessionKeySet) {
if (pcepSessionPeer.equals(peer)) {
for (String pcepSessionId : pcepSessionIdKeySet) {
if (pcepSessionId.equals(peer)) {
sessionId = pcepSessionIdMap.get(pcepSessionId);
sessionStatus = pcepSessionMap.get(pcepSessionPeer);
if (sessionId < 0) {
sessionIdValue = sessionId + 256;
} else {
sessionIdValue = (int) sessionId;
}
}
}
print("SessionIp = %s, Status = %s, sessionId = %s", pcepSessionPeer, sessionStatus, sessionIdValue);
}
}
} else {
System.out.print("Wrong Peer IP");
}
}
} else {
if (pcepSessionKeySet.size() > 0) {
for (String pcepSessionPeer : pcepSessionKeySet) {
for (String pcepSessionId : pcepSessionIdKeySet) {
if (pcepSessionId.equals(pcepSessionPeer)) {
sessionId = pcepSessionIdMap.get(pcepSessionId);
sessionStatus = pcepSessionMap.get(pcepSessionPeer);
if (sessionId < 0) {
sessionIdValue = sessionId + 256;
} else {
sessionIdValue = (int) sessionId;
}
}
}
print("SessionIp = %s, Status = %s, sessionId = %s", pcepSessionPeer, sessionStatus, sessionIdValue);
}
}
}
} catch (Exception e) {
log.debug("Error occurred while displaying PCEP session information: {}", e.getMessage());
}
}
private void displayPcepSessionFailureReason() {
try {
this.pcepClientController = get(PcepClientController.class);
Map<String, List<String>> pcepSessionFailureReasonMap = pcepClientController.getPcepExceptions();
pcepSessionFailurekeySet = pcepSessionFailureReasonMap.keySet();
if (pcepSessionFailurekeySet.size() > 0) {
if (peer != null) {
if (pcepSessionFailurekeySet.contains(peer)) {
for (String pcepSessionPeerId : pcepSessionFailurekeySet) {
if (pcepSessionPeerId.equals(peer)) {
pcepSessionExceptions = pcepSessionFailureReasonMap.get(pcepSessionPeerId);
print("PeerId = %s, FailureReason = %s", pcepSessionPeerId, pcepSessionExceptions);
}
}
} else {
System.out.print("Wrong Peer IP");
}
} else {
pcepSessionFailurekeySet = pcepSessionFailureReasonMap.keySet();
if (pcepSessionFailurekeySet.size() > 0) {
for (String pcepSessionPeerId : pcepSessionFailurekeySet) {
pcepSessionExceptions = pcepSessionFailureReasonMap.get(pcepSessionPeerId);
print("PeerId = %s, FailureReason = %s", pcepSessionPeerId, pcepSessionExceptions);
}
}
}
}
} catch (Exception e) {
log.debug("Error occurred while displaying PCEP session failure reasons: {}", e.getMessage());
}
}
private void displayPcepErrorMsgs() {
try {
this.pcepClientController = get(PcepClientController.class);
Map<Integer, Integer> pcepErrorMsgMap = pcepClientController.getPcepErrorMsg();
pcepErrorMsgKey = pcepErrorMsgMap.keySet();
if (pcepErrorMsgKey.size() > 0) {
for (Integer errorType : pcepErrorMsgKey) {
pcepErrorValue = pcepErrorMsgMap.get(errorType);
pcepErrorType = PcepErrorType.values()[errorType];
switch (pcepErrorType) {
case SESSIONESTABLISHMENTFAILURE:
sessionEstablishmentFailureMap = pcepErrorDetail.sessionEstablishmentFailure();
Set<Integer> sessionFailureKeySet = sessionEstablishmentFailureMap.keySet();
for (Integer sessionFailureKey : sessionFailureKeySet) {
if (sessionFailureKey == pcepErrorValue) {
System.out.print(sessionEstablishmentFailureMap.get(sessionFailureKey));
}
}
case CAPABALITYNOTSUPPORTED:
System.out.print("Capability not supported");
case UNKNOWNOBJECT:
unknownObjectMap = pcepErrorDetail.unknownObject();
Set<Integer> unknownObjectKeySet = unknownObjectMap.keySet();
for (Integer unknownObjectKey : unknownObjectKeySet) {
if (unknownObjectKey == pcepErrorValue) {
System.out.print(unknownObjectMap.get(unknownObjectKey));
}
}
case NOTSUPPORTEDOBJECT:
notSupportedObjectMap = pcepErrorDetail.notSupportedObject();
Set<Integer> notSupportedObjectKeySet = notSupportedObjectMap.keySet();
for (Integer notSupportedObjectKey : notSupportedObjectKeySet) {
if (notSupportedObjectKey == pcepErrorValue) {
System.out.print(notSupportedObjectMap.get(notSupportedObjectKey));
}
}
case POLICYVIOLATION:
policyViolationMap = pcepErrorDetail.policyViolation();
Set<Integer> policyViolationKeySet = policyViolationMap.keySet();
for (Integer policyViolationKey : policyViolationKeySet) {
if (policyViolationKey == pcepErrorValue) {
System.out.print(policyViolationMap.get(policyViolationKey));
}
}
case MANDATORYOBJECTMISSING:
mandatoryObjectMissingMap = pcepErrorDetail.mandatoryObjectMissing();
Set<Integer> mandatoryObjectMissingKeySet = mandatoryObjectMissingMap.keySet();
for (Integer mandatoryObjectMissingKey : mandatoryObjectMissingKeySet) {
if (mandatoryObjectMissingKey == pcepErrorValue) {
System.out.print(mandatoryObjectMissingMap.get(mandatoryObjectMissingKey));
}
}
case SYNCHRONIZEDPATHCOMPUTATIONREQUESTMISSING:
System.out.print("Synchronized path computation request missing");
case UNKNOWNREQUESTREFERENCE:
System.out.print("Unknown request reference");
case ESTABLISHINGSECONDPCEPSESSION:
System.out.print("Attempt to establish a second PCEP session");
case RECEPTIONOFINVALIDOBJECT:
receptionOfInvalidObjectMap = pcepErrorDetail.receptionOfInvalidObject();
Set<Integer> receptionOfInvalidObjectKeySet = receptionOfInvalidObjectMap.keySet();
for (Integer receptionOfInvalidObjectKey : receptionOfInvalidObjectKeySet) {
if (receptionOfInvalidObjectKey == pcepErrorValue) {
System.out.print(receptionOfInvalidObjectMap.get(receptionOfInvalidObjectKey));
}
}
case INVALIDOPERATION:
invalidOperationMap = pcepErrorDetail.invalidOperation();
Set<Integer> invalidOperationKeySet = invalidOperationMap.keySet();
for (Integer invalidOperationKey : invalidOperationKeySet) {
if (invalidOperationKey == pcepErrorValue) {
System.out.print(invalidOperationMap.get(invalidOperationKey));
}
}
case VIRTUALNETWORKTLVMISSING:
System.out.print("VIRTUAL-NETWORK TLV missing");
default:
System.out.print("Unknown error message");
}
}
}
} catch (Exception e) {
log.debug("Error occurred while displaying PCEP error messages received: {}", e.getMessage());
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/credentials/v1/common.proto
package com.google.cloud.iam.credentials.v1;
/** Protobuf type {@code google.iam.credentials.v1.GenerateIdTokenResponse} */
public final class GenerateIdTokenResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.iam.credentials.v1.GenerateIdTokenResponse)
GenerateIdTokenResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GenerateIdTokenResponse.newBuilder() to construct.
private GenerateIdTokenResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GenerateIdTokenResponse() {
token_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GenerateIdTokenResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private GenerateIdTokenResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
token_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.iam.credentials.v1.IAMCredentialsCommonProto
.internal_static_google_iam_credentials_v1_GenerateIdTokenResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.iam.credentials.v1.IAMCredentialsCommonProto
.internal_static_google_iam_credentials_v1_GenerateIdTokenResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.class,
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.Builder.class);
}
public static final int TOKEN_FIELD_NUMBER = 1;
private volatile java.lang.Object token_;
/**
*
*
* <pre>
* The OpenId Connect ID token.
* </pre>
*
* <code>string token = 1;</code>
*
* @return The token.
*/
@java.lang.Override
public java.lang.String getToken() {
java.lang.Object ref = token_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
token_ = s;
return s;
}
}
/**
*
*
* <pre>
* The OpenId Connect ID token.
* </pre>
*
* <code>string token = 1;</code>
*
* @return The bytes for token.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTokenBytes() {
java.lang.Object ref = token_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
token_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(token_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, token_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(token_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, token_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse)) {
return super.equals(obj);
}
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse other =
(com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse) obj;
if (!getToken().equals(other.getToken())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getToken().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/** Protobuf type {@code google.iam.credentials.v1.GenerateIdTokenResponse} */
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.iam.credentials.v1.GenerateIdTokenResponse)
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.iam.credentials.v1.IAMCredentialsCommonProto
.internal_static_google_iam_credentials_v1_GenerateIdTokenResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.iam.credentials.v1.IAMCredentialsCommonProto
.internal_static_google_iam_credentials_v1_GenerateIdTokenResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.class,
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.Builder.class);
}
// Construct using com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
token_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.iam.credentials.v1.IAMCredentialsCommonProto
.internal_static_google_iam_credentials_v1_GenerateIdTokenResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse getDefaultInstanceForType() {
return com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse build() {
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse buildPartial() {
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse result =
new com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse(this);
result.token_ = token_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse) {
return mergeFrom((com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse other) {
if (other == com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.getDefaultInstance())
return this;
if (!other.getToken().isEmpty()) {
token_ = other.token_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object token_ = "";
/**
*
*
* <pre>
* The OpenId Connect ID token.
* </pre>
*
* <code>string token = 1;</code>
*
* @return The token.
*/
public java.lang.String getToken() {
java.lang.Object ref = token_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
token_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The OpenId Connect ID token.
* </pre>
*
* <code>string token = 1;</code>
*
* @return The bytes for token.
*/
public com.google.protobuf.ByteString getTokenBytes() {
java.lang.Object ref = token_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
token_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The OpenId Connect ID token.
* </pre>
*
* <code>string token = 1;</code>
*
* @param value The token to set.
* @return This builder for chaining.
*/
public Builder setToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
token_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The OpenId Connect ID token.
* </pre>
*
* <code>string token = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearToken() {
token_ = getDefaultInstance().getToken();
onChanged();
return this;
}
/**
*
*
* <pre>
* The OpenId Connect ID token.
* </pre>
*
* <code>string token = 1;</code>
*
* @param value The bytes for token to set.
* @return This builder for chaining.
*/
public Builder setTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
token_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.iam.credentials.v1.GenerateIdTokenResponse)
}
// @@protoc_insertion_point(class_scope:google.iam.credentials.v1.GenerateIdTokenResponse)
private static final com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse();
}
public static com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GenerateIdTokenResponse> PARSER =
new com.google.protobuf.AbstractParser<GenerateIdTokenResponse>() {
@java.lang.Override
public GenerateIdTokenResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GenerateIdTokenResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GenerateIdTokenResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GenerateIdTokenResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.core.index.reader.impl;
import com.linkedin.pinot.common.utils.MmapUtils;
import com.linkedin.pinot.core.index.reader.DataFileMetadata;
import com.linkedin.pinot.core.index.reader.SingleColumnMultiValueReader;
import com.linkedin.pinot.core.util.CustomBitSet;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
/**
* Storage Layout
* ==============
* There will be three sections HEADER section, BITMAP and RAW DATA
* CHUNK OFFSET HEADER will contain one line per chunk, each line corresponding to the start offset and length of the chunk
* BITMAP This will contain sequence of bits. The number of bits will be equal to the totalNumberOfValues.A bit is set to 1 if its start of a new docId. The number of bits set to 1 will be equal to the number of docs.
* RAWDATA This section contains the multi valued data stored in sequence of int's. The number of ints is equal to the totalNumberOfValues
* We divide all the documents into groups referred to as CHUNK. Each CHUNK will
* - Have the same number of documents.
* - Started Offset of each CHUNK in the BITMAP will stored in the HEADER section. This is to speed the look up.
* Over all each look up will take log(NUM CHUNKS) for binary search + CHUNK to linear scan on the bitmap to find the right offset in the raw data section
*
*/
public class FixedByteSkipListSCMVReader implements SingleColumnMultiValueReader {
private static int SIZE_OF_INT = 4;
private static int NUM_COLS_IN_HEADER = 1;
//THIS is HARDCODED in THE FixedByteSkipListSCMVWriter class as well.
//If you are changing PREFERRED_NUM_VALUES_PER_CHUNK, make sure you change this in FixedByteSkipListSCMVWriter class as well
private int PREFERRED_NUM_VALUES_PER_CHUNK = 2048;
private ByteBuffer chunkOffsetsBuffer;
private ByteBuffer bitsetBuffer;
private ByteBuffer rawDataBuffer;
private RandomAccessFile raf;
private FixedByteWidthRowColDataFileReader chunkOffsetsReader;
private CustomBitSet customBitSet;
private FixedByteWidthRowColDataFileReader rawDataReader;
private int numChunks;
int prevRowStartIndex = 0;
int prevRowLength = 0;
int prevRowId = -1;
private int chunkOffsetHeaderSize;
private int bitsetSize;
private int rawDataSize;
private int totalSize;
private int totalNumValues;
private int docsPerChunk;
private boolean isMmap;
public FixedByteSkipListSCMVReader(File file, int numDocs, int totalNumValues, int columnSizeInBytes, boolean isMmap)
throws Exception {
this.totalNumValues = totalNumValues;
float averageValuesPerDoc = totalNumValues / numDocs;
this.docsPerChunk = (int) (Math.ceil(PREFERRED_NUM_VALUES_PER_CHUNK / averageValuesPerDoc));
this.numChunks = (numDocs + docsPerChunk - 1) / docsPerChunk;
this.isMmap = isMmap;
chunkOffsetHeaderSize = numChunks * SIZE_OF_INT * NUM_COLS_IN_HEADER;
bitsetSize = (totalNumValues + 7) / 8;
rawDataSize = totalNumValues * columnSizeInBytes;
totalSize = chunkOffsetHeaderSize + bitsetSize + rawDataSize;
raf = new RandomAccessFile(file, "rw");
if (isMmap) {
//mmap chunk offsets
chunkOffsetsBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, 0, chunkOffsetHeaderSize, file,
this.getClass().getSimpleName() + " chunkOffsetsBuffer");
chunkOffsetsReader = new FixedByteWidthRowColDataFileReader(chunkOffsetsBuffer, numDocs, NUM_COLS_IN_HEADER, new int[] { SIZE_OF_INT });
//mmap bitset buffer
bitsetBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, chunkOffsetHeaderSize, bitsetSize, file,
this.getClass().getSimpleName() + " bitsetBuffer");
customBitSet = CustomBitSet.withByteBuffer(bitsetSize, bitsetBuffer);
//mmap rawData
rawDataBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, chunkOffsetHeaderSize + bitsetSize, rawDataSize, file,
this.getClass().getSimpleName() + " rawDataBuffer");
rawDataReader = new FixedByteWidthRowColDataFileReader(rawDataBuffer, totalNumValues, 1, new int[] { columnSizeInBytes });
} else {
//chunk offsets
chunkOffsetsBuffer = MmapUtils.allocateDirectByteBuffer(chunkOffsetHeaderSize, file,
this.getClass().getSimpleName() + " chunkOffsetsBuffer");
raf.getChannel().read(chunkOffsetsBuffer);
chunkOffsetsReader = new FixedByteWidthRowColDataFileReader(chunkOffsetsBuffer, numDocs, NUM_COLS_IN_HEADER, new int[] { SIZE_OF_INT });
//bitset buffer
bitsetBuffer = MmapUtils.allocateDirectByteBuffer(bitsetSize, file,
this.getClass().getSimpleName() + " bitsetBuffer");
raf.getChannel().read(bitsetBuffer);
customBitSet = CustomBitSet.withByteBuffer(bitsetSize, bitsetBuffer);
//raw data
rawDataBuffer = MmapUtils.allocateDirectByteBuffer(rawDataSize, file,
this.getClass().getSimpleName() + " rawDataBuffer");
raf.getChannel().read(rawDataBuffer);
rawDataReader = new FixedByteWidthRowColDataFileReader(rawDataBuffer, totalNumValues, 1, new int[] { columnSizeInBytes });
raf.close();
}
}
public int getChunkOffsetHeaderSize() {
return chunkOffsetHeaderSize;
}
public int getBitsetSize() {
return bitsetSize;
}
public int getRawDataSize() {
return rawDataSize;
}
public int getTotalSize() {
return totalSize;
}
public ByteBuffer getChunkOffsetsBuffer() {
return chunkOffsetsBuffer;
}
public ByteBuffer getBitsetBuffer() {
return bitsetBuffer;
}
public ByteBuffer getRawDataBuffer() {
return rawDataBuffer;
}
public int getNumChunks() {
return numChunks;
}
private int computeLength(int rowOffSetStart) {
long rowOffSetEnd = customBitSet.nextSetBitAfter(rowOffSetStart);
if (rowOffSetEnd < 0) {
return totalNumValues - rowOffSetStart;
}
return (int)(rowOffSetEnd - rowOffSetStart);
}
private int computeStartOffset(int row) {
int chunkId = row / docsPerChunk;
int chunkIdOffset = chunkOffsetsReader.getInt(chunkId, 0);
if (row % docsPerChunk == 0) {
return chunkIdOffset;
}
long rowOffSetStart = customBitSet.findNthBitSetAfter(chunkIdOffset, row - chunkId * docsPerChunk);
return (int)(rowOffSetStart);
}
public void close() throws IOException {
MmapUtils.unloadByteBuffer(chunkOffsetsBuffer);
chunkOffsetsBuffer = null;
MmapUtils.unloadByteBuffer(bitsetBuffer);
bitsetBuffer = null;
MmapUtils.unloadByteBuffer(rawDataBuffer);
rawDataBuffer = null;
customBitSet.close();
customBitSet = null;
rawDataReader.close();
rawDataReader = null;
if (isMmap) {
raf.close();
}
}
public int getDocsPerChunk() {
return docsPerChunk;
}
@Override
public DataFileMetadata getMetadata() {
return null;
}
@Override
public int getCharArray(int row, char[] charArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
for (int i = 0; i < length; i++) {
charArray[i] = rawDataReader.getChar(startOffset + i, 0);
}
return length;
}
@Override
public int getShortArray(int row, short[] shortsArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
for (int i = 0; i < length; i++) {
shortsArray[i] = rawDataReader.getShort(startOffset + i, 0);
}
return length;
}
@Override
public int getIntArray(int row, int[] intArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
//System.out.println("row:" + row + " startOffset:" + startOffset + " length:" + length);
for (int i = 0; i < length; i++) {
intArray[i] = rawDataReader.getInt(startOffset + i, 0);
}
return length;
}
@Override
public int getLongArray(int row, long[] longArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
for (int i = 0; i < length; i++) {
longArray[i] = rawDataReader.getLong(startOffset + i, 0);
}
return length;
}
@Override
public int getFloatArray(int row, float[] floatArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
for (int i = 0; i < length; i++) {
floatArray[i] = rawDataReader.getFloat(startOffset + i, 0);
}
return length;
}
@Override
public int getDoubleArray(int row, double[] doubleArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
for (int i = 0; i < length; i++) {
doubleArray[i] = rawDataReader.getDouble(startOffset + i, 0);
}
return length;
}
@Override
public int getStringArray(int row, String[] stringArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
for (int i = 0; i < length; i++) {
stringArray[i] = rawDataReader.getString(startOffset + i, 0);
}
return length;
}
@Override
public int getBytesArray(int row, byte[][] bytesArray) {
int startOffset = computeStartOffset(row);
int length = computeLength(startOffset);
for (int i = 0; i < length; i++) {
bytesArray[i] = rawDataReader.getBytes(startOffset + i, 0);
}
return length;
}
}
| |
package cz.habarta.typescript.generator;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.databind.ObjectMapper;
import cz.habarta.typescript.generator.ext.ClassEnumExtension;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
@SuppressWarnings("unused")
public class EnumTest {
@Test
public void testEnumAsUnion() {
final Settings settings = TestUtils.settings();
// settings.mapEnum = EnumMapping.asUnion;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(AClass.class));
final String expected = (
"\n" +
"interface AClass {\n" +
" direction: Direction;\n" +
"}\n" +
"\n" +
"type Direction = 'North' | 'East' | 'South' | 'West';\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testSingleEnum() {
final Settings settings = TestUtils.settings();
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(Direction.class));
final String expected = (
"\n" +
"type Direction = 'North' | 'East' | 'South' | 'West';\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testEnumAsInlineUnion() {
final Settings settings = TestUtils.settings();
settings.quotes = "'";
settings.mapEnum = EnumMapping.asInlineUnion;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(AClass.class));
final String expected =
"\n" +
"interface AClass {\n" +
" direction: 'North' | 'East' | 'South' | 'West';\n" +
"}\n";
assertEquals(expected, output);
}
@Test
public void testEnumAsNumberBasedEnum() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asNumberBasedEnum;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(AClass.class));
final String expected = (
"\n" +
"interface AClass {\n" +
" direction: Direction;\n" +
"}\n" +
"\n" +
"declare const enum Direction {\n" +
" North,\n" +
" East,\n" +
" South,\n" +
" West,\n" +
"}\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testEnumAsEnum() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asEnum;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(AClass.class));
final String expected = (
"interface AClass {\n" +
" direction: Direction;\n" +
"}\n" +
"\n" +
"declare const enum Direction {\n" +
" North = 'North',\n" +
" East = 'East',\n" +
" South = 'South',\n" +
" West = 'West',\n" +
"}"
).replace("'", "\"");
assertEquals(expected.trim(), output.trim());
}
@Test
public void testEnumsWithClassEnumPattern() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asEnum;
settings.jsonLibrary = JsonLibrary.jackson2;
final ClassEnumExtension classEnumExtension = new ClassEnumExtension();
classEnumExtension.setConfiguration(Collections.singletonMap("classEnumPattern", "Enum"));
settings.extensions.add(classEnumExtension);
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(DummyEnum.class, DummyClassEnum.class));
final String expected = (
"\ndeclare const enum DummyClassEnum {\n" +
" ATYPE = 'ATYPE',\n" +
" BTYPE = 'BTYPE',\n" +
" CTYPE = 'CTYPE',\n" +
"}\n" +
"\ndeclare const enum DummyEnum {\n" +
" Red = 'Red',\n" +
" Green = 'Green',\n" +
" Blue = 'Blue',\n" +
"}\n"
).replace("'", "\"");
assertEquals(expected.trim(), output.trim());
}
@Test
public void testEnumsAsPascalCaseWithClassEnumPattern() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asEnum;
settings.enumMemberCasing = IdentifierCasing.PascalCase;
settings.jsonLibrary = JsonLibrary.jackson2;
final ClassEnumExtension classEnumExtension = new ClassEnumExtension();
classEnumExtension.setConfiguration(Collections.singletonMap("classEnumPattern", "Enum"));
settings.extensions.add(classEnumExtension);
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(DummyEnum.class, DummyClassEnum.class, DummyMixedCaseEnum.class));
final String expected = (
"\ndeclare const enum DummyClassEnum {\n" +
" Atype = 'ATYPE',\n" +
" Btype = 'BTYPE',\n" +
" Ctype = 'CTYPE',\n" +
"}\n" +
"\ndeclare const enum DummyEnum {\n" +
" Red = 'Red',\n" +
" Green = 'Green',\n" +
" Blue = 'Blue',\n" +
"}\n" +
"\ndeclare const enum DummyMixedCaseEnum {\n" +
" CamelCaseType = 'camelCaseType',\n" +
" PascalCaseType = 'PascalCaseType',\n" +
" UpperCaseType = 'UPPER_CASE_TYPE',\n" +
"}\n"
).replace("'", "\"");
assertEquals(expected.trim(), output.trim());
}
@Test
public void testEnumsAsCamelCase() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asNumberBasedEnum;
settings.enumMemberCasing = IdentifierCasing.camelCase;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(DummyMixedCaseEnum.class));
assertTrue(output.contains("camelCaseType"));
assertTrue(output.contains("pascalCaseType"));
assertTrue(output.contains("upperCaseType"));
}
@Test
public void testEnumWithJsonPropertyAnnotations() {
final Settings settings = TestUtils.settings();
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(SideWithJsonPropertyAnnotations.class));
final String expected = (
"\n" +
"type SideWithJsonPropertyAnnotations = 'left-side' | 'right-side';\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testEnumWithJsonValueMethodAnnotation() {
final Settings settings = TestUtils.settings();
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(SideWithJsonValueMethodAnnotation.class));
final String expected = (
"\n" +
"type SideWithJsonValueMethodAnnotation = 'left-side' | 'right-side';\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testEnumWithJsonValueFieldAnnotation() {
final Settings settings = TestUtils.settings();
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(SideWithJsonValueFieldAnnotation.class));
final String expected = (
"\n" +
"type SideWithJsonValueFieldAnnotation = 'left-side' | 'right-side';\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testEnumUsingToString() {
final Settings settings = TestUtils.settings();
settings.jackson2Configuration = new Jackson2ConfigurationResolved();
settings.jackson2Configuration.enumsUsingToString = true;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(SideUsingToString.class));
final String expected = (
"\n" +
"type SideUsingToString = 'toString:left-side' | 'toString:right-side';\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testEmptyEnum() {
final Settings settings = TestUtils.settings();
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(EmptyEnum.class));
final String expected = (
"\n" +
"type EmptyEnum = never;\n"
).replace("'", "\"");
assertEquals(expected, output);
}
@Test
public void testExcludeObjectEnum() {
final Settings settings = TestUtils.settings();
settings.setExcludeFilter(Arrays.asList(StatusType.class.getName()), Arrays.<String>asList());
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithObjectEnum.class, StatusType.class));
assertTrue(!output.contains("StatusType"));
}
@Test
public void testObjectEnum() {
final Settings settings = TestUtils.settings();
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(StatusType.class));
final String expected = "" +
"interface StatusType {\n" +
" code: number;\n" +
" label: string;\n" +
"}";
assertEquals(expected.trim(), output.trim());
}
@Test
public void testJavaLangEnum1() {
final Settings settings = TestUtils.settings();
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(Child.NoEnumFactory.class));
assertTrue(output.contains("interface Enum<E>"));
}
private static @interface Child {
public static class NoEnumFactory implements IBaseEnumFactory<Enum<?>> {
}
}
private static interface IBaseEnumFactory<T> {
}
@Test
public void testJavaLangEnum2() {
final Settings settings = TestUtils.settings();
settings.setExcludeFilter(Arrays.asList(Enum.class.getName()), null);
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithEnum.class));
assertTrue(output.contains("enumA: any"));
}
private static class ClassWithEnum {
public Enum<?> enumA;
}
private static class AClass {
public Direction direction;
}
enum Direction {
North,
East,
South,
West
}
enum SideWithJsonPropertyAnnotations {
@JsonProperty("left-side")
Left,
@JsonProperty("right-side")
Right
}
enum SideWithJsonValueMethodAnnotation {
@JsonProperty("@JsonProperty ignored since @JsonValue has higher precedence")
Left("left-side"),
@JsonProperty("@JsonProperty ignored since @JsonValue has higher precedence")
Right("right-side");
private final String jsonValue;
private SideWithJsonValueMethodAnnotation(String jsonValue) {
this.jsonValue = jsonValue;
}
@JsonValue
public Object getJsonValue() {
return jsonValue;
}
}
enum SideWithJsonValueFieldAnnotation {
@JsonProperty("@JsonProperty ignored since @JsonValue has higher precedence")
Left("left-side"),
@JsonProperty("@JsonProperty ignored since @JsonValue has higher precedence")
Right("right-side");
@JsonValue
private final String jsonValue;
private SideWithJsonValueFieldAnnotation(String jsonValue) {
this.jsonValue = jsonValue;
}
@Override
public String toString() {
return "AAA " + name();
}
}
enum SideUsingToString {
@JsonProperty("@JsonProperty ignored since toString() has higher precedence")
Left("left-side"),
@JsonProperty("@JsonProperty ignored since toString() has higher precedence")
Right("right-side");
private final String jsonValue;
private SideUsingToString(String jsonValue) {
this.jsonValue = jsonValue;
}
@Override
public String toString() {
return "toString:" + jsonValue;
}
}
enum EmptyEnum {
}
static class ClassWithObjectEnum {
public StatusType status;
public List<Map<String, StatusType>> statuses;
}
@JsonFormat(shape = JsonFormat.Shape.OBJECT)
public enum StatusType {
GOOD(0, "Good"),
FULL(1, "Full");
private final int code;
private final String label;
private StatusType(int code, String label) {
this.label = label;
this.code = code;
}
public int getCode() {
return code;
}
public String getLabel() {
return label;
}
}
@Test
public void testEnumMapKeys_asUnion() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asUnion;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithMapWithEnumKeys.class));
assertTrue(output.contains("labels: { [P in Direction]?: string }"));
assertTrue(output.contains("type Direction ="));
}
@Test
public void testEnumMapKeys_asInlineUnion() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asInlineUnion;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithMapWithEnumKeys.class));
assertTrue(output.contains("labels: { [P in 'North' | 'East' | 'South' | 'West']?: string }".replace('\'', '"')));
assertTrue(!output.contains("Direction"));
}
@Test
public void testEnumMapKeys_asEnum() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asEnum;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithMapWithEnumKeys.class));
assertTrue(output.contains("labels: { [P in Direction]?: string }"));
assertTrue(output.contains("enum Direction {"));
}
@Test
public void testEnumMapKeys_asNumberBasedEnum() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asNumberBasedEnum;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithMapWithEnumKeys.class));
assertTrue(output.contains("labels: { [index: string]: string }"));
}
static class ClassWithMapWithEnumKeys {
public Map<Direction, String> labels;
}
@Test
public void testEnumMapKeys_MixedEnum() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asUnion;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithMixedEnum.class));
assertTrue(output.contains("mixedEnumMap: { [P in MixedEnum]?: string }"));
assertTrue(output.contains("MixedEnum"));
}
public static enum MixedEnum {
NUMBER(42),
STRING("foo");
private final Object jsonValue;
private MixedEnum(Object jsonValue) {
this.jsonValue = jsonValue;
}
@JsonValue
public Object getJsonValue() {
return this.jsonValue;
}
}
static class ClassWithMixedEnum {
public MixedEnum mixedEnum;
public Map<MixedEnum, String> mixedEnumMap;
}
@Test
public void testEnumMapKeys_NumberEnum() {
final Settings settings = TestUtils.settings();
settings.mapEnum = EnumMapping.asNumberBasedEnum;
final String output = new TypeScriptGenerator(settings).generateTypeScript(Input.from(ClassWithNumberEnum.class));
assertTrue(output.contains("numberEnumMap: { [index: string]: string }"));
assertTrue(output.contains("NumberEnum"));
}
@JsonFormat(shape = JsonFormat.Shape.NUMBER_INT)
public static enum NumberEnum {
VALUE0,
VALUE1;
}
static class ClassWithNumberEnum {
public NumberEnum numberEnum;
public Map<NumberEnum, String> numberEnumMap;
}
public static void main(String[] args) throws Exception {
final ClassWithMixedEnum classWithMixedEnum = new ClassWithMixedEnum();
classWithMixedEnum.mixedEnum = MixedEnum.NUMBER;
classWithMixedEnum.mixedEnumMap = Collections.singletonMap(MixedEnum.NUMBER, "bar");
System.out.println(new ObjectMapper().writeValueAsString(classWithMixedEnum));
final ClassWithNumberEnum classWithNumberEnum = new ClassWithNumberEnum();
classWithNumberEnum.numberEnum = NumberEnum.VALUE0;
classWithNumberEnum.numberEnumMap = Collections.singletonMap(NumberEnum.VALUE0, "bar");
System.out.println(new ObjectMapper().writeValueAsString(classWithNumberEnum));
}
}
| |
/*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.emm.agent.services;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.json.JSONException;
import org.json.JSONArray;
import org.json.JSONObject;
import org.wso2.emm.agent.AndroidAgentException;
import org.wso2.emm.agent.R;
import org.wso2.emm.agent.AlertActivity;
import org.wso2.emm.agent.ServerDetails;
import org.wso2.emm.agent.api.ApplicationManager;
import org.wso2.emm.agent.api.DeviceInfo;
import org.wso2.emm.agent.api.GPSTracker;
import org.wso2.emm.agent.api.WiFiConfig;
import org.wso2.emm.agent.beans.ComplianceFeature;
import org.wso2.emm.agent.beans.DeviceAppInfo;
import org.wso2.emm.agent.beans.ServerConfig;
import org.wso2.emm.agent.proxy.interfaces.APIResultCallBack;
import org.wso2.emm.agent.utils.Constants;
import org.wso2.emm.agent.utils.Preference;
import org.wso2.emm.agent.utils.CommonUtils;
import android.app.admin.DevicePolicyManager;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.media.AudioManager;
import android.net.Uri;
import android.util.Log;
import android.widget.Toast;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
/**
* This class handles all the functionalities related to device management operations.
*/
public class Operation implements APIResultCallBack {
private Context context;
private DevicePolicyManager devicePolicyManager;
private ComponentName cdmDeviceAdmin;
private ApplicationManager appList;
private Resources resources;
private ResultPayload resultBuilder;
private DeviceInfo deviceInfo;
private GPSTracker gps;
private static final String TAG = "Operation Handler";
private static final String LOCATION_INFO_TAG_LONGITUDE = "longitude";
private static final String LOCATION_INFO_TAG_LATITUDE = "latitude";
private static final String APP_INFO_TAG_NAME = "name";
private static final String APP_INFO_TAG_PACKAGE = "package";
private static final String APP_INFO_TAG_ICON = "icon";
private static final int DEFAULT_PASSWORD_LENGTH = 0;
private static final int DEFAULT_VOLUME = 0;
private static final int DEFAULT_FLAG = 0;
private static final int DEFAULT_PASSWORD_MIN_LENGTH = 4;
private static final long DAY_MILLISECONDS_MULTIPLIER = 24 * 60 * 60 * 1000;
public Operation(Context context) {
this.context = context;
this.resources = context.getResources();
this.devicePolicyManager =
(DevicePolicyManager) context.getSystemService(Context.DEVICE_POLICY_SERVICE);
this.cdmDeviceAdmin = new ComponentName(context, AgentDeviceAdminReceiver.class);
this.appList = new ApplicationManager(context.getApplicationContext());
this.resultBuilder = new ResultPayload();
deviceInfo = new DeviceInfo(context.getApplicationContext());
gps = new GPSTracker(context.getApplicationContext());
}
/**
* Executes device management operations on the device.
*
* @param operation - Operation object.
*/
public void doTask(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
switch (operation.getCode()) {
case Constants.Operation.DEVICE_INFO:
getDeviceInfo(operation);
break;
case Constants.Operation.DEVICE_LOCATION:
getLocationInfo(operation);
break;
case Constants.Operation.APPLICATION_LIST:
getApplicationList(operation);
break;
case Constants.Operation.DEVICE_LOCK:
lockDevice(operation);
break;
case Constants.Operation.WIPE_DATA:
wipeDevice(operation);
break;
case Constants.Operation.CLEAR_PASSWORD:
clearPassword(operation);
break;
case Constants.Operation.NOTIFICATION:
displayNotification(operation);
break;
case Constants.Operation.WIFI:
configureWifi(operation);
break;
case Constants.Operation.CAMERA:
disableCamera(operation);
break;
case Constants.Operation.INSTALL_APPLICATION:
installAppBundle(operation);
break;
case Constants.Operation.INSTALL_APPLICATION_BUNDLE:
installAppBundle(operation);
break;
case Constants.Operation.UNINSTALL_APPLICATION:
uninstallApplication(operation);
break;
case Constants.Operation.ENCRYPT_STORAGE:
encryptStorage(operation);
break;
case Constants.Operation.DEVICE_RING:
ringDevice(operation);
break;
case Constants.Operation.DEVICE_MUTE:
muteDevice(operation);
break;
case Constants.Operation.WEBCLIP:
manageWebClip(operation);
break;
case Constants.Operation.PASSWORD_POLICY:
setPasswordPolicy(operation);
break;
case Constants.Operation.INSTALL_GOOGLE_APP:
installGooglePlayApp(operation);
break;
case Constants.Operation.CHANGE_LOCK_CODE:
changeLockCode(operation);
break;
case Constants.Operation.POLICY_BUNDLE:
if(devicePolicyManager.isAdminActive(cdmDeviceAdmin)) {
setPolicyBundle(operation);
}
break;
case Constants.Operation.POLICY_MONITOR:
monitorPolicy(operation);
break;
case Constants.Operation.POLICY_REVOKE:
revokePolicy(operation);
break;
case Constants.Operation.ENTERPRISE_WIPE:
enterpriseWipe(operation);
break;
case Constants.Operation.BLACKLIST_APPLICATIONS:
blacklistApps(operation);
break;
case Constants.Operation.DISENROLL:
disenrollDevice(operation);
break;
default:
Log.e(TAG, "Invalid operation code received");
break;
}
}
/**
* Retrieve device information.
*
* @param operation - Operation object.
*/
public void getDeviceInfo(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
DeviceInfoPayload deviceInfoPayload = new DeviceInfoPayload(context);
deviceInfoPayload.build();
String replyPayload = deviceInfoPayload.getDeviceInfoPayload();
String ipSaved = Preference.getString(context.getApplicationContext(), Constants.IP);
ServerConfig utils = new ServerConfig();
utils.setServerIP(ipSaved);
String url = utils.getAPIServerURL(context) + Constants.DEVICE_ENDPOINT + deviceInfo.getDeviceId();
CommonUtils.callSecuredAPI(context, url,
org.wso2.emm.agent.proxy.utils.Constants.HTTP_METHODS.PUT, replyPayload,
Operation.this,
Constants.DEVICE_INFO_REQUEST_CODE);
operation.setPayLoad(replyPayload);
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
}
/**
* Retrieve location device information.
*
* @param operation - Operation object.
*/
public void getLocationInfo(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
double latitude;
double longitude;
JSONObject result = new JSONObject();
try {
latitude = gps.getLatitude();
longitude = gps.getLongitude();
result.put(LOCATION_INFO_TAG_LATITUDE, latitude);
result.put(LOCATION_INFO_TAG_LONGITUDE, longitude);
operation.setPayLoad(result.toString());
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Device location sent");
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
/**
* Retrieve device application information.
*
* @param operation - Operation object.
*/
public void getApplicationList(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
ArrayList<DeviceAppInfo> apps = new ArrayList<>(appList.getInstalledApps().values());
JSONArray result = new JSONArray();
for (DeviceAppInfo infoApp : apps) {
JSONObject app = new JSONObject();
try {
app.put(APP_INFO_TAG_NAME, Uri.encode(infoApp.getAppname()));
app.put(APP_INFO_TAG_PACKAGE, infoApp.getPackagename());
app.put(APP_INFO_TAG_ICON, infoApp.getIcon());
result.put(app);
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
operation.setOperationResponse(result.toString());
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Application list sent");
}
}
/**
* Lock the device.
*
* @param operation - Operation object.
*/
public void lockDevice(org.wso2.emm.agent.beans.Operation operation) {
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
devicePolicyManager.lockNow();
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Device locked");
}
}
/**
* Ring the device.
*
* @param operation - Operation object.
*/
public void ringDevice(org.wso2.emm.agent.beans.Operation operation) {
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
Intent intent = new Intent(context, AlertActivity.class);
intent.putExtra(resources.getString(R.string.intent_extra_type),
resources.getString(R.string.intent_extra_ring));
intent.putExtra(resources.getString(R.string.intent_extra_message),
resources.getString(R.string.intent_extra_stop_ringing));
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP |
Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Ringing is activated on the device");
}
}
/**
* Wipe the device.
*
* @param operation - Operation object.
*/
public void wipeDevice(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
String inputPin;
String savedPin = Preference.getString(context, resources.getString(R.string.shared_pref_pin));
JSONObject result = new JSONObject();
String ownershipType = Preference.getString(context, Constants.DEVICE_TYPE);
try {
JSONObject wipeKey = new JSONObject(operation.getPayLoad().toString());
inputPin = (String) wipeKey.get(resources.getString(R.string.shared_pref_pin));
String status;
if (Constants.OWNERSHIP_COPE.equals(ownershipType.trim()) || (inputPin != null && inputPin.trim().equals(savedPin.trim()))) {
status = resources.getString(R.string.shared_pref_default_status);
result.put(resources.getString(R.string.operation_status), status);
} else {
status = resources.getString(R.string.shared_pref_false_status);
result.put(resources.getString(R.string.operation_status), status);
}
operation.setPayLoad(result.toString());
if (status.equals(resources.getString(R.string.shared_pref_default_status))) {
Toast.makeText(context, resources.getString(R.string.toast_message_wipe),
Toast.LENGTH_LONG).show();
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Started to wipe data");
}
} else {
Toast.makeText(context, resources.getString(R.string.toast_message_wipe_failed),
Toast.LENGTH_LONG).show();
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
/**
* Clear device password.
*
* @param operation - Operation object.
*/
public void clearPassword(org.wso2.emm.agent.beans.Operation operation) {
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
devicePolicyManager.setPasswordQuality(cdmDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED);
devicePolicyManager.setPasswordMinimumLength(cdmDeviceAdmin, DEFAULT_PASSWORD_LENGTH);
devicePolicyManager.resetPassword(resources.getString(R.string.shared_pref_default_string),
DevicePolicyManager.RESET_PASSWORD_REQUIRE_ENTRY);
devicePolicyManager.lockNow();
devicePolicyManager.setPasswordQuality(cdmDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Password cleared");
}
}
/**
* Display notification.
*
* @param operation - Operation object.
*/
public void displayNotification(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
try {
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
JSONObject inputData = new JSONObject(operation.getPayLoad().toString());
String message = inputData.getString(resources.getString(R.string.intent_extra_message));
if (message != null && !message.isEmpty()) {
Intent intent = new Intent(context, AlertActivity.class);
intent.putExtra(resources.getString(R.string.intent_extra_message), message);
intent.putExtra(resources.getString(R.string.intent_extra_type),
resources.getString(R.string.intent_extra_alert));
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP |
Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Notification received");
}
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
/**
* Configure device WIFI profile.
*
* @param operation - Operation object.
*/
public void configureWifi(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
boolean wifiStatus;
String ssid = null;
String password = null;
JSONObject result = new JSONObject();
try {
JSONObject wifiData = new JSONObject(operation.getPayLoad().toString());
if (!wifiData.isNull(resources.getString(R.string.intent_extra_ssid))) {
ssid = (String) wifiData.get(resources.getString(R.string.intent_extra_ssid));
}
if (!wifiData.isNull(resources.getString(R.string.intent_extra_password))) {
password = (String) wifiData.get(resources.getString(R.string.intent_extra_password));
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
WiFiConfig config = new WiFiConfig(context.getApplicationContext());
wifiStatus = config.saveWEPConfig(ssid, password);
try {
String status;
if (wifiStatus) {
status = resources.getString(R.string.shared_pref_default_status);
result.put(resources.getString(R.string.operation_status), status);
} else {
status = resources.getString(R.string.shared_pref_false_status);
result.put(resources.getString(R.string.operation_status), status);
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Wifi configured");
}
operation.setStatus(resources.getString(R.string.operation_value_completed));
operation.setPayLoad(result.toString());
resultBuilder.build(operation);
}
/**
* Disable/Enable device camera.
*
* @param operation - Operation object.
*/
public void disableCamera(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
boolean camFunc = operation.isEnabled();
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
devicePolicyManager.setCameraDisabled(cdmDeviceAdmin, !camFunc);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Camera enabled: " + camFunc);
}
}
/**
* Install application/bundle.
*
* @param operation - Operation object.
*/
public void installAppBundle(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
try {
if (operation.getCode().equals(Constants.Operation.INSTALL_APPLICATION)) {
JSONObject appData = new JSONObject(operation.getPayLoad().toString());
installApplication(appData, operation);
} else if (operation.getCode().equals(Constants.Operation.INSTALL_APPLICATION_BUNDLE)) {
JSONArray jArray;
jArray = new JSONArray(operation.getPayLoad().toString());
for (int i = 0; i < jArray.length(); i++) {
JSONObject appObj = jArray.getJSONObject(i);
installApplication(appObj, operation);
}
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Application bundle installation started");
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
/**
* Uninstall application.
*
* @param operation - Operation object.
*/
public void uninstallApplication(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
String packageName;
String type;
try {
JSONObject appData = new JSONObject(operation.getPayLoad().toString());
type = appData.getString(resources.getString(R.string.app_type));
if (resources.getString(R.string.intent_extra_web).equalsIgnoreCase(type)) {
String appUrl = appData.getString(resources.getString(R.string.app_url));
String name = appData.getString(resources.getString(R.string.intent_extra_name));
String operationType = resources.getString(R.string.operation_uninstall);
JSONObject payload = new JSONObject();
payload.put(resources.getString(R.string.intent_extra_identity), appUrl);
payload.put(resources.getString(R.string.intent_extra_title), name);
payload.put(resources.getString(R.string.operation_type), operationType);
operation.setPayLoad(payload.toString());
manageWebClip(operation);
} else {
packageName = appData.getString(resources.getString(R.string.app_identifier));
appList.uninstallApplication(packageName);
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Application started to uninstall");
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
/**
* Encrypt/Decrypt device storage.
*
* @param operation - Operation object.
*/
public void encryptStorage(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
boolean doEncrypt = operation.isEnabled();
JSONObject result = new JSONObject();
if (doEncrypt &&
devicePolicyManager.getStorageEncryptionStatus() != DevicePolicyManager.ENCRYPTION_STATUS_UNSUPPORTED &&
(devicePolicyManager.getStorageEncryptionStatus() == DevicePolicyManager.ENCRYPTION_STATUS_INACTIVE)) {
devicePolicyManager.setStorageEncryption(cdmDeviceAdmin, doEncrypt);
Intent intent = new Intent(DevicePolicyManager.ACTION_START_ENCRYPTION);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
} else if (!doEncrypt &&
devicePolicyManager.getStorageEncryptionStatus() != DevicePolicyManager.ENCRYPTION_STATUS_UNSUPPORTED &&
(devicePolicyManager.getStorageEncryptionStatus() == DevicePolicyManager.ENCRYPTION_STATUS_ACTIVE ||
devicePolicyManager.getStorageEncryptionStatus() == DevicePolicyManager.ENCRYPTION_STATUS_ACTIVATING)) {
devicePolicyManager.setStorageEncryption(cdmDeviceAdmin, doEncrypt);
}
try {
String status;
if (devicePolicyManager.getStorageEncryptionStatus() !=
DevicePolicyManager.ENCRYPTION_STATUS_UNSUPPORTED) {
status = resources.getString(R.string.shared_pref_default_status);
result.put(resources.getString(R.string.operation_status), status);
} else {
status = resources.getString(R.string.shared_pref_false_status);
result.put(resources.getString(R.string.operation_status), status);
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Issue in parsing json", e);
}
operation.setPayLoad(result.toString());
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Encryption process started");
}
}
/**
* Mute the device.
*
* @param operation - Operation object.
*/
private void muteDevice(org.wso2.emm.agent.beans.Operation operation) {
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
audioManager.setStreamVolume(AudioManager.STREAM_RING, DEFAULT_VOLUME, DEFAULT_FLAG);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Device muted");
}
}
/**
* Create web clip (Web app shortcut on device home screen).
*
* @param operation - Operation object.
*/
public void manageWebClip(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
String appUrl;
String title;
String operationType;
try {
JSONObject webClipData = new JSONObject(operation.getPayLoad().toString());
appUrl = webClipData.getString(resources.getString(R.string.intent_extra_identity));
title = webClipData.getString(resources.getString(R.string.intent_extra_title));
operationType = webClipData.getString(resources.getString(R.string.operation_type));
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
if (appUrl != null && title != null) {
appList.manageWebAppBookmark(appUrl, title, operationType);
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Created webclip");
}
}
/**
* Set device password policy.
*
* @param operation - Operation object.
*/
public void setPasswordPolicy(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
int attempts, length, history, specialChars;
String alphanumeric, complex;
boolean isAlphanumeric, isComplex;
long timout;
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
try {
JSONObject policyData = new JSONObject(operation.getPayLoad().toString());
if (!policyData.isNull(resources.getString(R.string.policy_password_max_failed_attempts)) &&
policyData.get(resources.getString(R.string.policy_password_max_failed_attempts)) != null) {
if (!policyData.get(resources.getString(R.string.policy_password_max_failed_attempts)).toString().isEmpty()) {
attempts = policyData.getInt(resources.getString(R.string.policy_password_max_failed_attempts));
devicePolicyManager.setMaximumFailedPasswordsForWipe(cdmDeviceAdmin, attempts);
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_min_length)) &&
policyData.get(resources.getString(R.string.policy_password_min_length)) != null) {
if (!policyData.get(resources.getString(R.string.policy_password_min_length)).toString().isEmpty()) {
length = policyData.getInt(resources.getString(R.string.policy_password_min_length));
devicePolicyManager.setPasswordMinimumLength(cdmDeviceAdmin, length);
} else {
devicePolicyManager.setPasswordMinimumLength(cdmDeviceAdmin, DEFAULT_PASSWORD_MIN_LENGTH);
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_pin_history)) &&
policyData.get(resources.getString(R.string.policy_password_pin_history)) != null) {
if (!policyData.get(resources.getString(R.string.policy_password_pin_history)).toString().isEmpty()) {
history = policyData.getInt(resources.getString(R.string.policy_password_pin_history));
devicePolicyManager.setPasswordHistoryLength(cdmDeviceAdmin, history);
} else {
devicePolicyManager.setPasswordHistoryLength(cdmDeviceAdmin, DEFAULT_PASSWORD_LENGTH);
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_min_complex_chars)) &&
policyData.get(resources.getString(R.string.policy_password_min_complex_chars)) != null) {
if (!policyData.get(resources.getString(R.string.policy_password_min_complex_chars)).toString().isEmpty()) {
specialChars = policyData.getInt(resources.getString(R.string.policy_password_min_complex_chars));
devicePolicyManager.setPasswordMinimumSymbols(cdmDeviceAdmin, specialChars);
} else {
devicePolicyManager.setPasswordMinimumSymbols(cdmDeviceAdmin, DEFAULT_PASSWORD_LENGTH);
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_require_alphanumeric)) &&
policyData.get(resources.getString(R.string.policy_password_require_alphanumeric)) != null) {
if (policyData.get(resources.getString(
R.string.policy_password_require_alphanumeric)) instanceof String) {
alphanumeric = (String) policyData.get(resources.getString(
R.string.policy_password_require_alphanumeric));
if (alphanumeric.equals(resources.getString(R.string.shared_pref_default_status))) {
devicePolicyManager.setPasswordQuality(cdmDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_ALPHANUMERIC);
}
} else if (policyData.get(resources.getString(
R.string.policy_password_require_alphanumeric)) instanceof Boolean) {
isAlphanumeric = policyData.getBoolean(resources.getString(
R.string.policy_password_require_alphanumeric));
if (isAlphanumeric) {
devicePolicyManager.setPasswordQuality(cdmDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_ALPHANUMERIC);
}
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_allow_simple)) &&
policyData.get(resources.getString(R.string.policy_password_allow_simple)) != null) {
if (policyData.get(resources.getString(
R.string.policy_password_allow_simple)) instanceof String) {
complex = (String) policyData.get(resources.getString(
R.string.policy_password_allow_simple));
if (!complex.equals(resources.getString(R.string.shared_pref_default_status))) {
devicePolicyManager.setPasswordQuality(cdmDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_COMPLEX);
}
} else if (policyData.get(resources.getString(
R.string.policy_password_allow_simple)) instanceof Boolean) {
isComplex = policyData.getBoolean(
resources.getString(R.string.policy_password_allow_simple));
if (!isComplex) {
devicePolicyManager.setPasswordQuality(cdmDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_COMPLEX);
}
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_pin_age_in_days)) &&
policyData.get(resources.getString(R.string.policy_password_pin_age_in_days)) != null) {
if (!policyData.get(resources.getString(R.string.policy_password_pin_age_in_days)).toString().isEmpty()) {
int daysOfExp = policyData.getInt(resources.getString(R.string.policy_password_pin_age_in_days));
timout = daysOfExp * DAY_MILLISECONDS_MULTIPLIER;
devicePolicyManager.setPasswordExpirationTimeout(cdmDeviceAdmin, timout);
}
}
if (!devicePolicyManager.isActivePasswordSufficient()) {
Intent intent = new Intent(context, AlertActivity.class);
intent.putExtra(resources.getString(R.string.intent_extra_type),
resources.getString(R.string.intent_extra_password_setting));
intent.putExtra(resources.getString(R.string.intent_extra_message),
resources.getString(R.string.policy_violation_password_tail));
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP |
Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Password policy set");
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
/**
* Install google play applications.
*
* @param operation - Operation object.
*/
public void installGooglePlayApp(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
String packageName;
try {
JSONObject appData = new JSONObject(operation.getPayLoad().toString());
packageName = (String) appData.get(resources.getString(R.string.intent_extra_package));
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Started installing GoogleApp");
}
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
triggerGooglePlayApp(packageName);
}
/**
* Open Google Play store application with an application given.
*
* @param packageName - Application package name.
*/
public void triggerGooglePlayApp(String packageName) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setData(Uri.parse(Constants.GOOGLE_PLAY_APP_URI + packageName));
context.startActivity(intent);
}
/**
* Change device lock code.
*
* @param operation - Operation object.
*/
public void changeLockCode(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
devicePolicyManager.setPasswordMinimumLength(cdmDeviceAdmin, DEFAULT_PASSWORD_MIN_LENGTH);
String password = null;
try {
JSONObject lockData = new JSONObject(operation.getPayLoad().toString());
if (!lockData.isNull(resources.getString(R.string.intent_extra_lock_code))) {
password =
(String) lockData.get(resources.getString(R.string.intent_extra_lock_code));
}
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
if (password != null && !password.isEmpty()) {
devicePolicyManager.resetPassword(password,
DevicePolicyManager.RESET_PASSWORD_REQUIRE_ENTRY);
devicePolicyManager.lockNow();
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Lock code changed");
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
/**
* Set policy bundle.
*
* @param operation - Operation object.
*/
public void setPolicyBundle(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
String payload = operation.getPayLoad().toString();
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Policy payload: " + payload);
}
PolicyOperationsMapper operationsMapper = new PolicyOperationsMapper();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
try {
if(payload != null){
Preference.putString(context, resources.getString(R.string.shared_pref_policy_applied), payload);
}
List<org.wso2.emm.agent.beans.Operation> operations = mapper.readValue(
payload,
mapper.getTypeFactory().constructCollectionType(List.class,
org.wso2.emm.agent.beans.Operation.class));
for (org.wso2.emm.agent.beans.Operation op : operations) {
op = operationsMapper.getOperation(op);
this.doTask(op);
}
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Policy applied");
}
} catch (IOException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Error occurred while parsing stream", e);
}
}
/**
* Monitor currently enforced policy for compliance.
*
* @param operation - Operation object.
*/
public void monitorPolicy(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
String payload = Preference.getString(context, resources.getString(R.string.shared_pref_policy_applied));
PolicyOperationsMapper operationsMapper = new PolicyOperationsMapper();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
PolicyComplianceChecker policyChecker = new PolicyComplianceChecker(context);
ArrayList<ComplianceFeature> result = new ArrayList<>();
try {
if(payload != null) {
List<org.wso2.emm.agent.beans.Operation> operations = mapper.readValue(
payload,
mapper.getTypeFactory().constructCollectionType(List.class,
org.wso2.emm.agent.beans.Operation.class));
for (org.wso2.emm.agent.beans.Operation op : operations) {
op = operationsMapper.getOperation(op);
result.add(policyChecker.checkPolicyState(op));
}
operation.setStatus(resources.getString(R.string.operation_value_completed));
operation.setPayLoad(mapper.writeValueAsString(result));
resultBuilder.build(operation);
}
} catch (IOException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Error occurred while parsing stream.", e);
}
}
/**
* Revoke currently enforced policy.
*
* @param operation - Operation object.
*/
public void revokePolicy(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
CommonUtils.revokePolicy(context);
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
}
/**
* Enterprise wipe the device.
*
* @param operation - Operation object.
*/
public void enterpriseWipe(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
CommonUtils.disableAdmin(context);
Intent intent = new Intent(context, ServerDetails.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Started enterprise wipe");
}
}
/**
* Blacklisting apps.
*
* @param operation - Operation object.
*/
public void blacklistApps(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
ArrayList<DeviceAppInfo> apps = new ArrayList<>(appList.getInstalledApps().values());
JSONArray appList = new JSONArray();
JSONArray blacklistApps = new JSONArray();
String identity;
try {
JSONObject resultApp = new JSONObject(operation.getPayLoad().toString());
if (!resultApp.isNull(resources.getString(R.string.app_identifier))) {
blacklistApps = resultApp.getJSONArray(resources.getString(R.string.app_identifier));
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
for (int i = 0; i < blacklistApps.length(); i++) {
try {
identity = blacklistApps.getString(i);
for (DeviceAppInfo app : apps) {
JSONObject result = new JSONObject();
result.put(resources.getString(R.string.intent_extra_name), app.getAppname());
result.put(resources.getString(R.string.intent_extra_package),
app.getPackagename());
if (identity.trim().equals(app.getPackagename())) {
result.put(resources.getString(R.string.intent_extra_not_violated), false);
result.put(resources.getString(R.string.intent_extra_package),
app.getPackagename());
} else {
result.put(resources.getString(R.string.intent_extra_not_violated), true);
}
appList.put(result);
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
operation.setStatus(resources.getString(R.string.operation_value_completed));
operation.setPayLoad(appList.toString());
resultBuilder.build(operation);
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Marked blacklist app");
}
}
/**
* Install an Application.
*
* @param operation - Operation object.
*/
private void installApplication(JSONObject data, org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException {
String appUrl;
String type;
String name;
String operationType;
try {
if (!data.isNull(resources.getString(R.string.app_type))) {
type = data.getString(resources.getString(R.string.app_type));
if (type.equalsIgnoreCase(resources.getString(R.string.intent_extra_enterprise))) {
appUrl = data.getString(resources.getString(R.string.app_url));
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
appList.installApp(appUrl);
} else if (type.equalsIgnoreCase(resources.getString(R.string.intent_extra_public))) {
appUrl = data.getString(resources.getString(R.string.app_identifier));
operation.setStatus(resources.getString(R.string.operation_value_completed));
resultBuilder.build(operation);
triggerGooglePlayApp(appUrl);
} else if (type.equalsIgnoreCase(resources.getString(R.string.intent_extra_web))) {
name = data.getString(resources.getString(R.string.intent_extra_name));
appUrl = data.getString(resources.getString(R.string.app_url));
operationType = resources.getString(R.string.operation_install);
JSONObject payload = new JSONObject();
payload.put(resources.getString(R.string.intent_extra_identity), appUrl);
payload.put(resources.getString(R.string.intent_extra_title), name);
payload.put(resources.getString(R.string.operation_type), operationType);
operation.setPayLoad(payload.toString());
manageWebClip(operation);
} else {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid application details");
}
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "Application installation started");
}
}
} catch (JSONException e) {
operation.setStatus(resources.getString(R.string.operation_value_error));
resultBuilder.build(operation);
throw new AndroidAgentException("Invalid JSON format.", e);
}
}
public void disenrollDevice(org.wso2.emm.agent.beans.Operation operation) {
boolean status = operation.isEnabled();
if (status) {
CommonUtils.disableAdmin(context);
}
}
/**
* This method returns the completed operations list.
*
* @return operation list
*/
public List<org.wso2.emm.agent.beans.Operation> getResultPayload() {
return resultBuilder.getResultPayload();
}
/**
* This method is being invoked when get info operation get executed.
*
* @param result response result
* @param requestCode code of the requested operation
*/
@Override
public void onReceiveAPIResult(Map<String, String> result, int requestCode) {
String responseStatus;
String response;
if (requestCode == Constants.DEVICE_INFO_REQUEST_CODE) {
if (result != null) {
responseStatus = result.get(Constants.STATUS_KEY);
if (Constants.Status.SUCCESSFUL.equals(responseStatus)) {
response = result.get(Constants.RESPONSE);
if (response != null && !response.isEmpty()) {
if (Constants.DEBUG_MODE_ENABLED) {
Log.d(TAG, "onReceiveAPIResult." + response);
Log.d(TAG, "Device information sent");
}
}
}
}
}
}
}
| |
package coza.opencollab.unipoole.service.util.impl;
import coza.opencollab.unipoole.UnipooleException;
import coza.opencollab.unipoole.service.Defaults;
import static coza.opencollab.unipoole.service.ErrorCodes.FILE_MANIPULATION;
import coza.opencollab.unipoole.service.util.StorageEntry;
import coza.opencollab.unipoole.service.util.StorageFileHandler;
import coza.opencollab.unipoole.service.util.StorageFileReader;
import coza.opencollab.unipoole.service.util.StorageFileWriter;
import coza.opencollab.unipoole.service.util.StorageMemoryWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import org.apache.log4j.Logger;
import org.springframework.util.FileCopyUtils;
/**
* A compressed file handler for zip files.
*
* @author OpenCollab
* @since 1.0.0
*/
public class ZipFileHandler implements StorageFileHandler{
/**
* The logger
*/
private static final Logger LOG = Logger.getLogger(ZipFileHandler.class);
/**
* {@inheritDoc}
*/
@Override
public String getExtention(){
return "zip";
}
/**
* {@inheritDoc}
*/
@Override
public String getMimeType(){
return "application/zip";
}
/**
* {@inheritDoc}
*/
@Override
public boolean canHandle(File source) {
if(source == null || !source.exists() || source.isDirectory()){
return false;
}
String name = source.getName();
return getExtention().equals(name.substring(name.lastIndexOf('.')+1));
}
/**
* {@inheritDoc}
*/
@Override
public String getDestinationName(String baseName) {
return baseName + "." + getExtention();
}
/**
* {@inheritDoc}
*/
@Override
public String getFileContents(File root, String fileName) throws FileNotFoundException, IOException{
ZipFile zip = null;
try{
zip = new ZipFile(root);
ZipEntry about = zip.getEntry(fileName);
return FileCopyUtils.copyToString(new InputStreamReader(zip.getInputStream(about), Defaults.UTF8));
}catch(NullPointerException e){
//this is thrown in zip.getInputStream(about)
throw new FileNotFoundException("The 'about.json' file does not exist.");
}finally{
try{
if(zip != null){
zip.close();
}
}catch(Exception e){
LOG.warn("Could not close zip file.", e);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public String getFileContents(File root, String... fileNames) throws FileNotFoundException, IOException{
FileNotFoundException ex = null;
for(String fileName: fileNames){
try{
return getFileContents(root, fileName);
}catch(FileNotFoundException e){
ex = e;
}
}
throw ex;
}
/**
* {@inheritDoc}
*/
@Override
public void writeFromDirectory(File source, File destination) throws IOException {
FileOutputStream fos = null;
CZipOutputStream zos = null;
try{
fos = new FileOutputStream(destination);
zos = new CZipOutputStream(fos);
writeZip(zos, source, source.getAbsolutePath().length() + 1);
}finally{
try{
if(zos != null){
zos.trueClose();
}
}catch(Exception e){
LOG.warn("Could not close zip output stream.", e);
}
try{
if(fos != null){
fos.close();
}
}catch(Exception e){
LOG.warn("Could not close file output stream.", e);
}
}
}
/**
* Write the source files to a zip stream.
*/
private void writeZip(ZipOutputStream zos, File source, int pathLenght) throws IOException{
for(File f: source.listFiles()){
if(f.isFile()){
String zipFileName = f.getAbsolutePath().substring(pathLenght);
zos.putNextEntry(new ZipEntry(zipFileName));
FileCopyUtils.copy(FileCopyUtils.copyToByteArray(f), zos);
}else{
writeZip(zos, f, pathLenght);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void writeToDirectory(File source, File destination) throws IOException{
ZipFile zip = null;
try{
zip = new ZipFile(source);
Enumeration<? extends ZipEntry> e = zip.entries();
while(e.hasMoreElements()){
ZipEntry entry = e.nextElement();
File entryFile = new File(destination, entry.getName());
if(entry.isDirectory()){
entryFile.mkdirs();
continue;
}
entryFile.getParentFile().mkdirs();
entryFile.createNewFile();
FileCopyUtils.copy(FileCopyUtils.copyToByteArray(zip.getInputStream(entry)), entryFile);
}
}finally{
try{
if(zip != null){
zip.close();
}
}catch(Exception e){
LOG.warn("Could not close zip file.", e);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public StorageFileReader getStorageFileReader(File source) {
return new ZipFileReader(source);
}
/**
* {@inheritDoc}
*/
@Override
public StorageFileWriter getStorageFileWriter(File destination){
return new ZipFileWriter(destination);
}
/**
* {@inheritDoc}
*/
@Override
public StorageMemoryWriter getMemoryWriter(){
return new ZipMemoryWriter();
}
/**
* Helper to get the name of a entry.
*/
private String getEntryName(ZipEntry zipEntry){
String name = zipEntry.getName();
if(zipEntry.isDirectory() && name.endsWith("/")){
//the directories return a name ending on a '/'.
name = name.substring(0, zipEntry.getName().length()-1);
}
int index = name.lastIndexOf('/');
if(index < 0){
return name;
}else{
return name.substring(index + 1);
}
}
/**
* Helper to get the directory of a entry.
*/
private String getEntryDir(ZipEntry zipEntry){
String name = zipEntry.getName();
if(zipEntry.isDirectory() && name.endsWith("/")){
//the directories return a name ending on a '/'.
name = name.substring(0, zipEntry.getName().length()-1);
}
int index = name.lastIndexOf('/');
if(index < 0){
return "";
}else{
return name.substring(0, index);
}
}
// Inne Classes -----------------------------------------------------------------------------
/**
* The StorageMemoryWriter for zip.
*/
private static class ZipMemoryWriter implements StorageMemoryWriter{
/**
* The actual content.
*/
private byte[] content;
/**
* {@inheritDoc}
*/
@Override
public String getMimeType(){
return "application/zip";
}
/**
* {@inheritDoc}
*/
@Override
public void write(StorageEntry entry) {
write(Collections.singletonList(entry));
}
/**
* {@inheritDoc}
*/
@Override
public void write(List<StorageEntry> entries) {
ByteArrayOutputStream bos = null;
CZipOutputStream zos = null;
try {
bos = new ByteArrayOutputStream();
zos = new CZipOutputStream(bos);
for(StorageEntry entry: entries){
if(!entry.isDirectory()){
zos.putNextEntry(new ZipEntry(entry.getRelativePath()));
FileCopyUtils.copy(entry.getContents(), zos);
}
}
} catch (Exception ex) {
throw new UnipooleException(FILE_MANIPULATION, "Could not write zip memory.", ex);
} finally {
try {
if (zos != null) {
zos.trueClose();
}
} catch (Exception e) {
LOG.warn("Could not close zip output stream.", e);
}
try {
if (bos != null) {
bos.close();
}
} catch (Exception e) {
LOG.warn("Could not close zip file output stream.", e);
}
}
if(bos != null){
content = bos.toByteArray();
}
}
/**
* {@inheritDoc}
*/
@Override
public byte[] getContent() {
return content;
}
}
/**
* The StorageFileWriter for zip.
*/
private static class ZipFileWriter implements StorageFileWriter{
/**
* The root file object
*/
private File root;
/**
* Constructor setting the root.
*/
ZipFileWriter(File root){
this.root = root;
}
/**
* {@inheritDoc}
*/
@Override
public void write(StorageEntry entry) {
write(Collections.singletonList(entry));
}
/**
* {@inheritDoc}
*/
@Override
public void write(List<StorageEntry> entries) {
FileOutputStream fos = null;
CZipOutputStream zos = null;
try {
fos = new FileOutputStream(root);
zos = new CZipOutputStream(fos);
for(StorageEntry entry: entries){
if(!entry.isDirectory()){
zos.putNextEntry(new ZipEntry(entry.getRelativePath()));
FileCopyUtils.copy(entry.getContents(), zos);
}
}
} catch (Exception ex) {
throw new UnipooleException(FILE_MANIPULATION, "Could not write zip file (" + root.getAbsolutePath() + ").", ex);
} finally {
try {
if (zos != null) {
zos.trueClose();
}
} catch (Exception e) {
LOG.warn("Could not close zip output stream.", e);
}
try {
if (fos != null) {
fos.close();
}
} catch (Exception e) {
LOG.warn("Could not close zip file output stream.", e);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
root = null;
}
}
/**
* The CodeFileReader for zip files.
*/
private class ZipFileReader implements StorageFileReader{
/**
* The zip file object
*/
private ZipFile zip;
/**
* Constructor setting the zip file.
*/
ZipFileReader(File zip){
try {
this.zip = new ZipFile(zip);
} catch (Exception e) {
throw new UnipooleException(FILE_MANIPULATION, "Cannot create the zip file (" + zip.getAbsolutePath() + ")", e);
}
}
/**
* {@inheritDoc}
*/
@Override
public List<StorageEntry> getEntries(){
List<StorageEntry> entries = new ArrayList<StorageEntry>();
Enumeration<? extends ZipEntry> zipEntries = zip.entries();
while(zipEntries.hasMoreElements()){
entries.add(new ZipFileEntry(zip, zipEntries.nextElement()));
}
return entries;
}
/**
* {@inheritDoc}
*/
@Override
public void close(){
try{
zip.close();
} catch (Exception e) {
throw new UnipooleException(FILE_MANIPULATION, "Cannot close the zip file (" + zip.getName() + ")", e);
}
}
}
/**
* The zip entries, files and directories.
*/
private class ZipFileEntry extends StorageEntry{
/**
* The zip file object
*/
private ZipFile zip;
/**
* The zip entry this represents.
*/
private ZipEntry zipEntry;
/**
* Constructor setting the zip entry.
*/
ZipFileEntry(ZipFile zip, ZipEntry zipEntry){
super(getEntryName(zipEntry), getEntryDir(zipEntry), zipEntry.isDirectory());
this.zip = zip;
this.zipEntry = zipEntry;
}
/**
* {@inheritDoc}
*/
@Override
public byte[] getContents(){
try {
return FileCopyUtils.copyToByteArray(zip.getInputStream(zipEntry));
} catch (IOException e) {
throw new UnipooleException(FILE_MANIPULATION, "Could not read the zip file (" + zip.getName() + ").", e);
}
}
}
/**
* A extention of ZipOutputStream to manage the close method
*/
private static class CZipOutputStream extends ZipOutputStream {
/**
* Creates a new CZIP output stream.
*
* @param out the actual output stream
*/
public CZipOutputStream(OutputStream out) {
super(out);
}
/**
* We don't close the stream here. This is because we use the Spring FileCopyUtil class that calls this method
* when it should call closeEntry, so that is what we do.
*/
@Override
public void close() throws IOException {
closeEntry();
}
/**
* We call this method to close the stream.
*/
public void trueClose() throws IOException {
super.close();
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.sftp;
import org.pentaho.di.job.entry.validator.AbstractFileValidator;
import org.pentaho.di.job.entry.validator.AndValidator;
import org.pentaho.di.job.entry.validator.JobEntryValidatorUtils;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.vfs2.FileObject;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.job.entry.validator.ValidatorContext;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a SFTP job entry.
*
* @author Matt
* @since 05-11-2003
*
*/
public class JobEntrySFTP extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntrySFTP.class; // for i18n purposes, needed by Translator2!!
private static final int DEFAULT_PORT = 22;
private String serverName;
private String serverPort;
private String userName;
private String password;
private String sftpDirectory;
private String targetDirectory;
private String wildcard;
private boolean remove;
private boolean isaddresult;
private boolean createtargetfolder;
private boolean copyprevious;
private boolean usekeyfilename;
private String keyfilename;
private String keyfilepass;
private String compression;
// proxy
private String proxyType;
private String proxyHost;
private String proxyPort;
private String proxyUsername;
private String proxyPassword;
public JobEntrySFTP( String n ) {
super( n, "" );
serverName = null;
serverPort = "22";
isaddresult = true;
createtargetfolder = false;
copyprevious = false;
usekeyfilename = false;
keyfilename = null;
keyfilepass = null;
compression = "none";
proxyType = null;
proxyHost = null;
proxyPort = null;
proxyUsername = null;
proxyPassword = null;
}
public JobEntrySFTP() {
this( "" );
}
public Object clone() {
JobEntrySFTP je = (JobEntrySFTP) super.clone();
return je;
}
public String getXML() {
StringBuilder retval = new StringBuilder( 200 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "servername", serverName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "serverport", serverPort ) );
retval.append( " " ).append( XMLHandler.addTagValue( "username", userName ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "password", Encr.encryptPasswordIfNotUsingVariables( getPassword() ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "sftpdirectory", sftpDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "targetdirectory", targetDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildcard ) );
retval.append( " " ).append( XMLHandler.addTagValue( "remove", remove ) );
retval.append( " " ).append( XMLHandler.addTagValue( "isaddresult", isaddresult ) );
retval.append( " " ).append( XMLHandler.addTagValue( "createtargetfolder", createtargetfolder ) );
retval.append( " " ).append( XMLHandler.addTagValue( "copyprevious", copyprevious ) );
retval.append( " " ).append( XMLHandler.addTagValue( "usekeyfilename", usekeyfilename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "keyfilename", keyfilename ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "keyfilepass", Encr.encryptPasswordIfNotUsingVariables( keyfilepass ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "compression", compression ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyType", proxyType ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyHost", proxyHost ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyPort", proxyPort ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyUsername", proxyUsername ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "proxyPassword", Encr.encryptPasswordIfNotUsingVariables( proxyPassword ) ) );
return retval.toString();
}
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
serverName = XMLHandler.getTagValue( entrynode, "servername" );
serverPort = XMLHandler.getTagValue( entrynode, "serverport" );
userName = XMLHandler.getTagValue( entrynode, "username" );
password = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "password" ) );
sftpDirectory = XMLHandler.getTagValue( entrynode, "sftpdirectory" );
targetDirectory = XMLHandler.getTagValue( entrynode, "targetdirectory" );
wildcard = XMLHandler.getTagValue( entrynode, "wildcard" );
remove = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "remove" ) );
String addresult = XMLHandler.getTagValue( entrynode, "isaddresult" );
if ( Utils.isEmpty( addresult ) ) {
isaddresult = true;
} else {
isaddresult = "Y".equalsIgnoreCase( addresult );
}
createtargetfolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createtargetfolder" ) );
copyprevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "copyprevious" ) );
usekeyfilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "usekeyfilename" ) );
keyfilename = XMLHandler.getTagValue( entrynode, "keyfilename" );
keyfilepass = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "keyfilepass" ) );
compression = XMLHandler.getTagValue( entrynode, "compression" );
proxyType = XMLHandler.getTagValue( entrynode, "proxyType" );
proxyHost = XMLHandler.getTagValue( entrynode, "proxyHost" );
proxyPort = XMLHandler.getTagValue( entrynode, "proxyPort" );
proxyUsername = XMLHandler.getTagValue( entrynode, "proxyUsername" );
proxyPassword =
Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "proxyPassword" ) );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException( "Unable to load job entry of type 'SFTP' from XML node", xe );
}
}
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
serverName = rep.getJobEntryAttributeString( id_jobentry, "servername" );
serverPort = rep.getJobEntryAttributeString( id_jobentry, "serverport" );
userName = rep.getJobEntryAttributeString( id_jobentry, "username" );
password =
Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "password" ) );
sftpDirectory = rep.getJobEntryAttributeString( id_jobentry, "sftpdirectory" );
targetDirectory = rep.getJobEntryAttributeString( id_jobentry, "targetdirectory" );
wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" );
remove = rep.getJobEntryAttributeBoolean( id_jobentry, "remove" );
String addToResult = rep.getJobEntryAttributeString( id_jobentry, "isaddresult" );
if ( Utils.isEmpty( addToResult ) ) {
isaddresult = true;
} else {
isaddresult = rep.getJobEntryAttributeBoolean( id_jobentry, "isaddresult" );
}
createtargetfolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createtargetfolder" );
copyprevious = rep.getJobEntryAttributeBoolean( id_jobentry, "copyprevious" );
usekeyfilename = rep.getJobEntryAttributeBoolean( id_jobentry, "usekeyfilename" );
keyfilename = rep.getJobEntryAttributeString( id_jobentry, "keyfilename" );
keyfilepass =
Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "keyfilepass" ) );
compression = rep.getJobEntryAttributeString( id_jobentry, "compression" );
proxyType = rep.getJobEntryAttributeString( id_jobentry, "proxyType" );
proxyHost = rep.getJobEntryAttributeString( id_jobentry, "proxyHost" );
proxyPort = rep.getJobEntryAttributeString( id_jobentry, "proxyPort" );
proxyUsername = rep.getJobEntryAttributeString( id_jobentry, "proxyUsername" );
proxyPassword =
Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "proxyPassword" ) );
} catch ( KettleException dbe ) {
throw new KettleException( "Unable to load job entry of type 'SFTP' from the repository for id_jobentry="
+ id_jobentry, dbe );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "servername", serverName );
rep.saveJobEntryAttribute( id_job, getObjectId(), "serverport", serverPort );
rep.saveJobEntryAttribute( id_job, getObjectId(), "username", userName );
rep.saveJobEntryAttribute( id_job, getObjectId(), "password", Encr
.encryptPasswordIfNotUsingVariables( password ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "sftpdirectory", sftpDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "targetdirectory", targetDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildcard );
rep.saveJobEntryAttribute( id_job, getObjectId(), "remove", remove );
rep.saveJobEntryAttribute( id_job, getObjectId(), "isaddresult", isaddresult );
rep.saveJobEntryAttribute( id_job, getObjectId(), "createtargetfolder", createtargetfolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "copyprevious", copyprevious );
rep.saveJobEntryAttribute( id_job, getObjectId(), "usekeyfilename", usekeyfilename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilename", keyfilename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilepass", Encr
.encryptPasswordIfNotUsingVariables( keyfilepass ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "compression", compression );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyType", proxyType );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyHost", proxyHost );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPort", proxyPort );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyUsername", proxyUsername );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPassword", Encr
.encryptPasswordIfNotUsingVariables( proxyPassword ) );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException(
"Unable to save job entry of type 'SFTP' to the repository for id_job=" + id_job, dbe );
}
}
/**
* @return Returns the directory.
*/
public String getScpDirectory() {
return sftpDirectory;
}
/**
* @param directory
* The directory to set.
*/
public void setScpDirectory( String directory ) {
this.sftpDirectory = directory;
}
/**
* @return Returns the password.
*/
public String getPassword() {
return password;
}
/**
* @param password
* The password to set.
*/
public void setPassword( String password ) {
this.password = password;
}
/**
* @return Returns the compression.
*/
public String getCompression() {
return compression;
}
/**
* @param compression
* The compression to set.
*/
public void setCompression( String compression ) {
this.compression = compression;
}
/**
* @return Returns the serverName.
*/
public String getServerName() {
return serverName;
}
/**
* @param serverName
* The serverName to set.
*/
public void setServerName( String serverName ) {
this.serverName = serverName;
}
/**
* @return Returns the userName.
*/
public String getUserName() {
return userName;
}
/**
* @param userName
* The userName to set.
*/
public void setUserName( String userName ) {
this.userName = userName;
}
/**
* @return Returns the wildcard.
*/
public String getWildcard() {
return wildcard;
}
/**
* @param wildcard
* The wildcard to set.
*/
public void setWildcard( String wildcard ) {
this.wildcard = wildcard;
}
public void setAddToResult( boolean isaddresultin ) {
this.isaddresult = isaddresultin;
}
public boolean isAddToResult() {
return isaddresult;
}
/**
* @return Returns the targetDirectory.
*/
public String getTargetDirectory() {
return targetDirectory;
}
/**
* @deprecated use {@link #setCreateTargetFolder(boolean)} instead
*/
@Deprecated
public void setcreateTargetFolder( boolean createtargetfolder ) {
this.createtargetfolder = createtargetfolder;
}
/**
* @deprecated use {@link #isCreateTargetFolder()} instead.
* @return createTargetFolder
*/
@Deprecated
public boolean iscreateTargetFolder() {
return createtargetfolder;
}
public boolean isCreateTargetFolder() {
return createtargetfolder;
}
public void setCreateTargetFolder( boolean createtargetfolder ) {
this.createtargetfolder = createtargetfolder;
}
public boolean isCopyPrevious() {
return copyprevious;
}
public void setCopyPrevious( boolean copyprevious ) {
this.copyprevious = copyprevious;
}
/**
* @param targetDirectory
* The targetDirectory to set.
*/
public void setTargetDirectory( String targetDirectory ) {
this.targetDirectory = targetDirectory;
}
/**
* @param remove
* The remove to set.
*/
public void setRemove( boolean remove ) {
this.remove = remove;
}
/**
* @return Returns the remove.
*/
public boolean getRemove() {
return remove;
}
public String getServerPort() {
return serverPort;
}
public void setServerPort( String serverPort ) {
this.serverPort = serverPort;
}
public boolean isUseKeyFile() {
return usekeyfilename;
}
public void setUseKeyFile( boolean value ) {
this.usekeyfilename = value;
}
public String getKeyFilename() {
return keyfilename;
}
public void setKeyFilename( String value ) {
this.keyfilename = value;
}
public String getKeyPassPhrase() {
return keyfilepass;
}
public void setKeyPassPhrase( String value ) {
this.keyfilepass = value;
}
public String getProxyType() {
return proxyType;
}
public void setProxyType( String value ) {
this.proxyType = value;
}
public String getProxyHost() {
return proxyHost;
}
public void setProxyHost( String value ) {
this.proxyHost = value;
}
public String getProxyPort() {
return proxyPort;
}
public void setProxyPort( String value ) {
this.proxyPort = value;
}
public String getProxyUsername() {
return proxyUsername;
}
public void setProxyUsername( String value ) {
this.proxyUsername = value;
}
public String getProxyPassword() {
return proxyPassword;
}
public void setProxyPassword( String value ) {
this.proxyPassword = value;
}
public Result execute( Result previousResult, int nr ) {
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setResult( false );
long filesRetrieved = 0;
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.StartJobEntry" ) );
}
HashSet<String> list_previous_filenames = new HashSet<String>();
if ( copyprevious ) {
if ( rows.size() == 0 ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.ArgsFromPreviousNothing" ) );
}
result.setResult( true );
return result;
}
try {
// Copy the input row to the (command line) arguments
for ( int iteration = 0; iteration < rows.size(); iteration++ ) {
resultRow = rows.get( iteration );
// Get file names
String file_previous = resultRow.getString( 0, null );
if ( !Utils.isEmpty( file_previous ) ) {
list_previous_filenames.add( file_previous );
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobSFTP.Log.FilenameFromResult", file_previous ) );
}
}
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSFTP.Error.ArgFromPrevious" ) );
result.setNrErrors( 1 );
return result;
}
}
SFTPClient sftpclient = null;
// String substitution..
String realServerName = environmentSubstitute( serverName );
String realServerPort = environmentSubstitute( serverPort );
String realUsername = environmentSubstitute( userName );
String realPassword = Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( password ) );
String realSftpDirString = environmentSubstitute( sftpDirectory );
String realWildcard = environmentSubstitute( wildcard );
String realTargetDirectory = environmentSubstitute( targetDirectory );
String realKeyFilename = null;
String realPassPhrase = null;
FileObject TargetFolder = null;
try {
// Let's perform some checks before starting
if ( isUseKeyFile() ) {
// We must have here a private keyfilename
realKeyFilename = environmentSubstitute( getKeyFilename() );
if ( Utils.isEmpty( realKeyFilename ) ) {
// Error..Missing keyfile
logError( BaseMessages.getString( PKG, "JobSFTP.Error.KeyFileMissing" ) );
result.setNrErrors( 1 );
return result;
}
if ( !KettleVFS.fileExists( realKeyFilename ) ) {
// Error.. can not reach keyfile
logError( BaseMessages.getString( PKG, "JobSFTP.Error.KeyFileNotFound", realKeyFilename ) );
result.setNrErrors( 1 );
return result;
}
realPassPhrase = environmentSubstitute( getKeyPassPhrase() );
}
if ( !Utils.isEmpty( realTargetDirectory ) ) {
TargetFolder = KettleVFS.getFileObject( realTargetDirectory, this );
boolean TargetFolderExists = TargetFolder.exists();
if ( TargetFolderExists ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.TargetFolderExists", realTargetDirectory ) );
}
} else {
if ( !createtargetfolder ) {
// Error..Target folder can not be found !
logError( BaseMessages.getString( PKG, "JobSFTP.Error.TargetFolderNotExists", realTargetDirectory ) );
result.setNrErrors( 1 );
return result;
} else {
// create target folder
TargetFolder.createFolder();
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.TargetFolderCreated", realTargetDirectory ) );
}
}
}
}
if ( TargetFolder != null ) {
TargetFolder.close();
TargetFolder = null;
}
// Create sftp client to host ...
sftpclient =
new SFTPClient(
InetAddress.getByName( realServerName ), Const.toInt( realServerPort, DEFAULT_PORT ), realUsername,
realKeyFilename, realPassPhrase );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "JobSFTP.Log.OpenedConnection", realServerName, realServerPort, realUsername ) );
}
// Set compression
sftpclient.setCompression( getCompression() );
// Set proxy?
String realProxyHost = environmentSubstitute( getProxyHost() );
if ( !Utils.isEmpty( realProxyHost ) ) {
// Set proxy
String password = getRealPassword( getProxyPassword() );
sftpclient.setProxy(
realProxyHost, environmentSubstitute( getProxyPort() ), environmentSubstitute( getProxyUsername() ),
password, getProxyType() );
}
// login to ftp host ...
sftpclient.login( realPassword );
// Passwords should not appear in log files.
// logDetailed("logged in using password "+realPassword); // Logging this seems a bad idea! Oh well.
// move to spool dir ...
if ( !Utils.isEmpty( realSftpDirString ) ) {
try {
sftpclient.chdir( realSftpDirString );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSFTP.Error.CanNotFindRemoteFolder", realSftpDirString ) );
throw new Exception( e );
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.ChangedDirectory", realSftpDirString ) );
}
}
Pattern pattern = null;
// Get all the files in the current directory...
String[] filelist = sftpclient.dir();
if ( filelist == null ) {
// Nothing was found !!! exit
result.setResult( true );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.Found", "" + 0 ) );
}
return result;
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.Found", "" + filelist.length ) );
}
if ( !copyprevious ) {
if ( !Utils.isEmpty( realWildcard ) ) {
pattern = Pattern.compile( realWildcard );
}
}
// Get the files in the list...
for ( int i = 0; i < filelist.length && !parentJob.isStopped(); i++ ) {
boolean getIt = true;
if ( copyprevious ) {
// filenames list is send by previous job entry
// download if the current file is in this list
getIt = list_previous_filenames.contains( filelist[i] );
} else {
// download files
// but before see if the file matches the regular expression!
if ( pattern != null ) {
Matcher matcher = pattern.matcher( filelist[i] );
getIt = matcher.matches();
}
}
if ( getIt ) {
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobSFTP.Log.GettingFiles", filelist[i], realTargetDirectory ) );
}
FileObject targetFile = KettleVFS.getFileObject(
realTargetDirectory + Const.FILE_SEPARATOR + filelist[i], this );
sftpclient.get( targetFile, filelist[i] );
filesRetrieved++;
if ( isaddresult ) {
// Add to the result files...
ResultFile resultFile =
new ResultFile(
ResultFile.FILE_TYPE_GENERAL, targetFile, parentJob
.getJobname(), toString() );
result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.FilenameAddedToResultFilenames", filelist[i] ) );
}
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.TransferedFile", filelist[i] ) );
}
// Delete the file if this is needed!
if ( remove ) {
sftpclient.delete( filelist[i] );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTP.Log.DeletedFile", filelist[i] ) );
}
}
}
}
result.setResult( true );
result.setNrFilesRetrieved( filesRetrieved );
} catch ( Exception e ) {
result.setNrErrors( 1 );
logError( BaseMessages.getString( PKG, "JobSFTP.Error.GettingFiles", e.getMessage() ) );
logError( Const.getStackTracker( e ) );
} finally {
// close connection, if possible
try {
if ( sftpclient != null ) {
sftpclient.disconnect();
}
} catch ( Exception e ) {
// just ignore this, makes no big difference
}
try {
if ( TargetFolder != null ) {
TargetFolder.close();
TargetFolder = null;
}
if ( list_previous_filenames != null ) {
list_previous_filenames = null;
}
} catch ( Exception e ) {
// Ignore errors
}
}
return result;
}
public String getRealPassword( String password ) {
return Utils.resolvePassword( variables, password );
}
public boolean evaluates() {
return true;
}
public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) {
List<ResourceReference> references = super.getResourceDependencies( jobMeta );
if ( !Utils.isEmpty( serverName ) ) {
String realServerName = jobMeta.environmentSubstitute( serverName );
ResourceReference reference = new ResourceReference( this );
reference.getEntries().add( new ResourceEntry( realServerName, ResourceType.SERVER ) );
references.add( reference );
}
return references;
}
@Override
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
JobEntryValidatorUtils.andValidator().validate( this, "serverName", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
ValidatorContext ctx = new ValidatorContext();
AbstractFileValidator.putVariableSpace( ctx, getVariables() );
AndValidator.putValidators( ctx, JobEntryValidatorUtils.notBlankValidator(), JobEntryValidatorUtils.fileExistsValidator() );
JobEntryValidatorUtils.andValidator().validate( this, "targetDirectory", remarks, ctx );
JobEntryValidatorUtils.andValidator().validate( this, "userName", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
JobEntryValidatorUtils.andValidator().validate( this, "password", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) );
JobEntryValidatorUtils.andValidator().validate( this, "serverPort", remarks, AndValidator.putValidators( JobEntryValidatorUtils.integerValidator() ) );
}
public static void main( String[] args ) {
List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>();
new JobEntrySFTP().check( remarks, null, new Variables(), null, null );
System.out.printf( "Remarks: %s\n", remarks );
}
}
| |
package com.xjt.crazypic.edit.filters;
import java.util.Arrays;
public class ColorSpaceMatrix {
private final float[] mMatrix = new float[16];
private static final float RLUM = 0.3086f;
private static final float GLUM = 0.6094f;
private static final float BLUM = 0.0820f;
public ColorSpaceMatrix() {
identity();
}
/**
* Copy constructor
*
* @param matrix
*/
public ColorSpaceMatrix(ColorSpaceMatrix matrix) {
System.arraycopy(matrix.mMatrix, 0, mMatrix, 0, matrix.mMatrix.length);
}
/**
* get the matrix
*
* @return the internal matrix
*/
public float[] getMatrix() {
return mMatrix;
}
/**
* set matrix to identity
*/
public void identity() {
Arrays.fill(mMatrix, 0);
mMatrix[0] = mMatrix[5] = mMatrix[10] = mMatrix[15] = 1;
}
public void convertToLuminance() {
mMatrix[0] = mMatrix[1] = mMatrix[2] = 0.3086f;
mMatrix[4] = mMatrix[5] = mMatrix[6] = 0.6094f;
mMatrix[8] = mMatrix[9] = mMatrix[10] = 0.0820f;
}
private void multiply(float[] a)
{
int x, y;
float[] temp = new float[16];
for (y = 0; y < 4; y++) {
int y4 = y * 4;
for (x = 0; x < 4; x++) {
temp[y4 + x] = mMatrix[y4 + 0] * a[x]
+ mMatrix[y4 + 1] * a[4 + x]
+ mMatrix[y4 + 2] * a[8 + x]
+ mMatrix[y4 + 3] * a[12 + x];
}
}
for (int i = 0; i < 16; i++)
mMatrix[i] = temp[i];
}
private void xRotateMatrix(float rs, float rc)
{
ColorSpaceMatrix c = new ColorSpaceMatrix();
float[] tmp = c.mMatrix;
tmp[5] = rc;
tmp[6] = rs;
tmp[9] = -rs;
tmp[10] = rc;
multiply(tmp);
}
private void yRotateMatrix(float rs, float rc)
{
ColorSpaceMatrix c = new ColorSpaceMatrix();
float[] tmp = c.mMatrix;
tmp[0] = rc;
tmp[2] = -rs;
tmp[8] = rs;
tmp[10] = rc;
multiply(tmp);
}
private void zRotateMatrix(float rs, float rc)
{
ColorSpaceMatrix c = new ColorSpaceMatrix();
float[] tmp = c.mMatrix;
tmp[0] = rc;
tmp[1] = rs;
tmp[4] = -rs;
tmp[5] = rc;
multiply(tmp);
}
private void zShearMatrix(float dx, float dy)
{
ColorSpaceMatrix c = new ColorSpaceMatrix();
float[] tmp = c.mMatrix;
tmp[2] = dx;
tmp[6] = dy;
multiply(tmp);
}
/**
* sets the transform to a shift in Hue
*
* @param rot rotation in degrees
*/
public void setHue(float rot)
{
float mag = (float) Math.sqrt(2.0);
float xrs = 1 / mag;
float xrc = 1 / mag;
xRotateMatrix(xrs, xrc);
mag = (float) Math.sqrt(3.0);
float yrs = -1 / mag;
float yrc = (float) Math.sqrt(2.0) / mag;
yRotateMatrix(yrs, yrc);
float lx = getRedf(RLUM, GLUM, BLUM);
float ly = getGreenf(RLUM, GLUM, BLUM);
float lz = getBluef(RLUM, GLUM, BLUM);
float zsx = lx / lz;
float zsy = ly / lz;
zShearMatrix(zsx, zsy);
float zrs = (float) Math.sin(rot * Math.PI / 180.0);
float zrc = (float) Math.cos(rot * Math.PI / 180.0);
zRotateMatrix(zrs, zrc);
zShearMatrix(-zsx, -zsy);
yRotateMatrix(-yrs, yrc);
xRotateMatrix(-xrs, xrc);
}
/**
* set it to a saturation matrix
*
* @param s
*/
public void changeSaturation(float s) {
mMatrix[0] = (1 - s) * RLUM + s;
mMatrix[1] = (1 - s) * RLUM;
mMatrix[2] = (1 - s) * RLUM;
mMatrix[4] = (1 - s) * GLUM;
mMatrix[5] = (1 - s) * GLUM + s;
mMatrix[6] = (1 - s) * GLUM;
mMatrix[8] = (1 - s) * BLUM;
mMatrix[9] = (1 - s) * BLUM;
mMatrix[10] = (1 - s) * BLUM + s;
}
/**
* Transform RGB value
*
* @param r red pixel value
* @param g green pixel value
* @param b blue pixel value
* @return computed red pixel value
*/
public float getRed(int r, int g, int b) {
return r * mMatrix[0] + g * mMatrix[4] + b * mMatrix[8] + mMatrix[12];
}
/**
* Transform RGB value
*
* @param r red pixel value
* @param g green pixel value
* @param b blue pixel value
* @return computed green pixel value
*/
public float getGreen(int r, int g, int b) {
return r * mMatrix[1] + g * mMatrix[5] + b * mMatrix[9] + mMatrix[13];
}
/**
* Transform RGB value
*
* @param r red pixel value
* @param g green pixel value
* @param b blue pixel value
* @return computed blue pixel value
*/
public float getBlue(int r, int g, int b) {
return r * mMatrix[2] + g * mMatrix[6] + b * mMatrix[10] + mMatrix[14];
}
private float getRedf(float r, float g, float b) {
return r * mMatrix[0] + g * mMatrix[4] + b * mMatrix[8] + mMatrix[12];
}
private float getGreenf(float r, float g, float b) {
return r * mMatrix[1] + g * mMatrix[5] + b * mMatrix[9] + mMatrix[13];
}
private float getBluef(float r, float g, float b) {
return r * mMatrix[2] + g * mMatrix[6] + b * mMatrix[10] + mMatrix[14];
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import java.util.ArrayList;
import java.util.Collection;
import java.util.TreeMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.apache.ignite.cache.eviction.lru.LruEvictionPolicy;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.processors.cache.IgniteInternalCache;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxManager;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.PRIMARY_SYNC;
/**
* Tests explicit lock.
*/
public class IgniteCacheMultiTxLockSelfTest extends GridCommonAbstractTest {
/** */
public static final String CACHE_NAME = "part_cache";
/** IP finder. */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private volatile boolean run = true;
/** */
private boolean client;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration c = super.getConfiguration(igniteInstanceName);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
c.setDiscoverySpi(disco);
CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME);
ccfg.setName(CACHE_NAME);
ccfg.setAtomicityMode(TRANSACTIONAL);
ccfg.setWriteSynchronizationMode(PRIMARY_SYNC);
ccfg.setBackups(2);
ccfg.setCacheMode(PARTITIONED);
LruEvictionPolicy plc = new LruEvictionPolicy();
plc.setMaxSize(100000);
ccfg.setEvictionPolicy(plc);
ccfg.setOnheapCacheEnabled(true);
c.setCacheConfiguration(ccfg);
c.setClientMode(client);
return c;
}
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return 60_000;
}
/**
* @throws Exception If failed.
*/
public void testExplicitLockOneKey() throws Exception {
checkExplicitLock(1, false);
}
/**
* @throws Exception If failed.
*/
public void testExplicitLockManyKeys() throws Exception {
checkExplicitLock(4, false);
}
/**
* @throws Exception If failed.
*/
public void testExplicitLockManyKeysWithClient() throws Exception {
checkExplicitLock(4, true);
}
/**
* @param keys Number of keys.
* @param testClient If {@code true} uses one client node.
* @throws Exception If failed.
*/
public void checkExplicitLock(int keys, boolean testClient) throws Exception {
Collection<Thread> threads = new ArrayList<>();
try {
// Start grid 1.
IgniteEx grid1 = startGrid(1);
assertFalse(grid1.configuration().isClientMode());
threads.add(runCacheOperations(grid1.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
client = testClient; // If test client start on node in client mode.
// Start grid 2.
IgniteEx grid2 = startGrid(2);
assertEquals((Object)testClient, grid2.configuration().isClientMode());
client = false;
threads.add(runCacheOperations(grid2.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
// Start grid 3.
IgniteEx grid3 = startGrid(3);
assertFalse(grid3.configuration().isClientMode());
if (testClient)
log.info("Started client node: " + grid3.name());
threads.add(runCacheOperations(grid3.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
// Start grid 4.
IgniteEx grid4 = startGrid(4);
assertFalse(grid4.configuration().isClientMode());
threads.add(runCacheOperations(grid4.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
stopThreads(threads);
for (int i = 1; i <= 4; i++) {
IgniteTxManager tm = ((IgniteKernal)grid(i)).internalCache(CACHE_NAME).context().tm();
assertEquals("txMap is not empty:" + i, 0, tm.idMapSize());
}
}
finally {
stopAllGrids();
}
}
/**
* @param threads Thread which will be stopped.
*/
private void stopThreads(Iterable<Thread> threads) {
try {
run = false;
for (Thread thread : threads)
thread.join();
}
catch (Exception e) {
U.error(log(), "Couldn't stop threads.", e);
}
}
/**
* @param cache Cache.
* @param keys Number of keys.
* @return Running thread.
*/
@SuppressWarnings("TypeMayBeWeakened")
private Thread runCacheOperations(final IgniteInternalCache<Object,Object> cache, final int keys) {
Thread t = new Thread() {
@Override public void run() {
while (run) {
TreeMap<Integer, String> vals = generateValues(keys);
try {
// Explicit lock.
cache.lock(vals.firstKey(), 0);
try {
// Put or remove.
if (ThreadLocalRandom.current().nextDouble(1) < 0.65)
cache.putAll(vals);
else
cache.removeAll(vals.keySet());
}
catch (Exception e) {
U.error(log(), "Failed cache operation.", e);
}
finally {
cache.unlock(vals.firstKey());
}
U.sleep(100);
}
catch (Exception e){
U.error(log(), "Failed unlock.", e);
}
}
}
};
t.start();
return t;
}
/**
* @param cnt Number of keys to generate.
* @return Map.
*/
private TreeMap<Integer, String> generateValues(int cnt) {
TreeMap<Integer, String> res = new TreeMap<>();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (res.size() < cnt) {
int key = rnd.nextInt(0, 100);
res.put(key, String.valueOf(key));
}
return res;
}
}
| |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.evaluation.value;
import proguard.classfile.ClassConstants;
/**
* This class represents a partially evaluated double value.
*
* @author Eric Lafortune
*/
public abstract class DoubleValue extends Category2Value
{
/**
* Returns the specific double value, if applicable.
*/
public double value()
{
return 0.0;
}
// Basic unary methods.
/**
* Returns the negated value of this DoubleValue.
*/
public abstract DoubleValue negate();
/**
* Converts this DoubleValue to an IntegerValue.
*/
public abstract IntegerValue convertToInteger();
/**
* Converts this DoubleValue to a LongValue.
*/
public abstract LongValue convertToLong();
/**
* Converts this DoubleValue to a FloatValue.
*/
public abstract FloatValue convertToFloat();
// Basic binary methods.
/**
* Returns the generalization of this DoubleValue and the given other
* DoubleValue.
*/
public abstract DoubleValue generalize(DoubleValue other);
/**
* Returns the sum of this DoubleValue and the given DoubleValue.
*/
public abstract DoubleValue add(DoubleValue other);
/**
* Returns the difference of this DoubleValue and the given DoubleValue.
*/
public abstract DoubleValue subtract(DoubleValue other);
/**
* Returns the difference of the given DoubleValue and this DoubleValue.
*/
public abstract DoubleValue subtractFrom(DoubleValue other);
/**
* Returns the product of this DoubleValue and the given DoubleValue.
*/
public abstract DoubleValue multiply(DoubleValue other);
/**
* Returns the quotient of this DoubleValue and the given DoubleValue.
*/
public abstract DoubleValue divide(DoubleValue other);
/**
* Returns the quotient of the given DoubleValue and this DoubleValue.
*/
public abstract DoubleValue divideOf(DoubleValue other);
/**
* Returns the remainder of this DoubleValue divided by the given DoubleValue.
*/
public abstract DoubleValue remainder(DoubleValue other);
/**
* Returns the remainder of the given DoubleValue divided by this DoubleValue.
*/
public abstract DoubleValue remainderOf(DoubleValue other);
/**
* Returns an IntegerValue with value -1, 0, or 1, if this DoubleValue is
* less than, equal to, or greater than the given DoubleValue, respectively.
*/
public abstract IntegerValue compare(DoubleValue other);
// Derived binary methods.
/**
* Returns an IntegerValue with value 1, 0, or -1, if this DoubleValue is
* less than, equal to, or greater than the given DoubleValue, respectively.
*/
public final IntegerValue compareReverse(DoubleValue other)
{
return compare(other).negate();
}
// Similar binary methods, but this time with more specific arguments.
/**
* Returns the generalization of this DoubleValue and the given other
* SpecificDoubleValue.
*/
public DoubleValue generalize(SpecificDoubleValue other)
{
return generalize((DoubleValue)other);
}
/**
* Returns the sum of this DoubleValue and the given SpecificDoubleValue.
*/
public DoubleValue add(SpecificDoubleValue other)
{
return add((DoubleValue)other);
}
/**
* Returns the difference of this DoubleValue and the given SpecificDoubleValue.
*/
public DoubleValue subtract(SpecificDoubleValue other)
{
return subtract((DoubleValue)other);
}
/**
* Returns the difference of the given SpecificDoubleValue and this DoubleValue.
*/
public DoubleValue subtractFrom(SpecificDoubleValue other)
{
return subtractFrom((DoubleValue)other);
}
/**
* Returns the product of this DoubleValue and the given SpecificDoubleValue.
*/
public DoubleValue multiply(SpecificDoubleValue other)
{
return multiply((DoubleValue)other);
}
/**
* Returns the quotient of this DoubleValue and the given SpecificDoubleValue.
*/
public DoubleValue divide(SpecificDoubleValue other)
{
return divide((DoubleValue)other);
}
/**
* Returns the quotient of the given SpecificDoubleValue and this
* DoubleValue.
*/
public DoubleValue divideOf(SpecificDoubleValue other)
{
return divideOf((DoubleValue)other);
}
/**
* Returns the remainder of this DoubleValue divided by the given
* SpecificDoubleValue.
*/
public DoubleValue remainder(SpecificDoubleValue other)
{
return remainder((DoubleValue)other);
}
/**
* Returns the remainder of the given SpecificDoubleValue and this
* DoubleValue.
*/
public DoubleValue remainderOf(SpecificDoubleValue other)
{
return remainderOf((DoubleValue)other);
}
/**
* Returns an IntegerValue with value -1, 0, or 1, if this DoubleValue is
* less than, equal to, or greater than the given SpecificDoubleValue,
* respectively.
*/
public IntegerValue compare(SpecificDoubleValue other)
{
return compare((DoubleValue)other);
}
// Derived binary methods.
/**
* Returns an IntegerValue with value 1, 0, or -1, if this DoubleValue is
* less than, equal to, or greater than the given SpecificDoubleValue,
* respectively.
*/
public final IntegerValue compareReverse(SpecificDoubleValue other)
{
return compare(other).negate();
}
// Similar binary methods, but this time with particular arguments.
/**
* Returns the generalization of this DoubleValue and the given other
* ParticularDoubleValue.
*/
public DoubleValue generalize(ParticularDoubleValue other)
{
return generalize((SpecificDoubleValue)other);
}
/**
* Returns the sum of this DoubleValue and the given ParticularDoubleValue.
*/
public DoubleValue add(ParticularDoubleValue other)
{
return add((SpecificDoubleValue)other);
}
/**
* Returns the difference of this DoubleValue and the given ParticularDoubleValue.
*/
public DoubleValue subtract(ParticularDoubleValue other)
{
return subtract((SpecificDoubleValue)other);
}
/**
* Returns the difference of the given ParticularDoubleValue and this DoubleValue.
*/
public DoubleValue subtractFrom(ParticularDoubleValue other)
{
return subtractFrom((SpecificDoubleValue)other);
}
/**
* Returns the product of this DoubleValue and the given ParticularDoubleValue.
*/
public DoubleValue multiply(ParticularDoubleValue other)
{
return multiply((SpecificDoubleValue)other);
}
/**
* Returns the quotient of this DoubleValue and the given ParticularDoubleValue.
*/
public DoubleValue divide(ParticularDoubleValue other)
{
return divide((SpecificDoubleValue)other);
}
/**
* Returns the quotient of the given ParticularDoubleValue and this
* DoubleValue.
*/
public DoubleValue divideOf(ParticularDoubleValue other)
{
return divideOf((SpecificDoubleValue)other);
}
/**
* Returns the remainder of this DoubleValue divided by the given
* ParticularDoubleValue.
*/
public DoubleValue remainder(ParticularDoubleValue other)
{
return remainder((SpecificDoubleValue)other);
}
/**
* Returns the remainder of the given ParticularDoubleValue and this
* DoubleValue.
*/
public DoubleValue remainderOf(ParticularDoubleValue other)
{
return remainderOf((SpecificDoubleValue)other);
}
/**
* Returns an IntegerValue with value -1, 0, or 1, if this DoubleValue is
* less than, equal to, or greater than the given ParticularDoubleValue,
* respectively.
*/
public IntegerValue compare(ParticularDoubleValue other)
{
return compare((SpecificDoubleValue)other);
}
// Derived binary methods.
/**
* Returns an IntegerValue with value 1, 0, or -1, if this DoubleValue is
* less than, equal to, or greater than the given ParticularDoubleValue,
* respectively.
*/
public final IntegerValue compareReverse(ParticularDoubleValue other)
{
return compare(other).negate();
}
// Implementations for Value.
public final DoubleValue doubleValue()
{
return this;
}
public final Value generalize(Value other)
{
return this.generalize(other.doubleValue());
}
public final int computationalType()
{
return TYPE_DOUBLE;
}
public final String internalType()
{
return String.valueOf(ClassConstants.TYPE_DOUBLE);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import org.apache.logging.log4j.Logger;
import org.apache.geode.cache.AttributesMutator;
import org.apache.geode.cache.CacheLoaderException;
import org.apache.geode.cache.CacheStatistics;
import org.apache.geode.cache.CacheWriterException;
import org.apache.geode.cache.EntryDestroyedException;
import org.apache.geode.cache.EntryExistsException;
import org.apache.geode.cache.EntryNotFoundException;
import org.apache.geode.cache.InterestResultPolicy;
import org.apache.geode.cache.Operation;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.RegionDestroyedException;
import org.apache.geode.cache.RegionExistsException;
import org.apache.geode.cache.RegionService;
import org.apache.geode.cache.StatisticsDisabledException;
import org.apache.geode.cache.TimeoutException;
import org.apache.geode.cache.query.FunctionDomainException;
import org.apache.geode.cache.query.NameResolutionException;
import org.apache.geode.cache.query.QueryInvocationTargetException;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.SelectResults;
import org.apache.geode.cache.query.TypeMismatchException;
import org.apache.geode.cache.query.internal.DefaultQuery;
import org.apache.geode.cache.query.internal.ExecutionContext;
import org.apache.geode.cache.query.internal.QueryExecutionContext;
import org.apache.geode.cache.query.internal.QueryExecutor;
import org.apache.geode.cache.query.internal.QueryObserver;
import org.apache.geode.cache.snapshot.RegionSnapshotService;
import org.apache.geode.internal.NanoTimer;
import org.apache.geode.internal.cache.LocalRegion.IteratorType;
import org.apache.geode.internal.cache.execute.BucketMovedException;
import org.apache.geode.internal.cache.execute.InternalRegionFunctionContext;
import org.apache.geode.internal.cache.snapshot.RegionSnapshotServiceImpl;
import org.apache.geode.internal.logging.LogService;
public class LocalDataSet implements Region, QueryExecutor {
private static final Logger logger = LogService.getLogger();
private final PartitionedRegion proxy;
private final Set<Integer> buckets;
private InternalRegionFunctionContext rfContext;
public LocalDataSet(PartitionedRegion pr, Set<Integer> buckets) {
this.proxy = pr;
this.buckets = buckets;
}
@Override
public Set<Region.Entry> entrySet(boolean recursive) {
return this.proxy.entrySet(getBucketSet());
}
@Override
public Set<Region.Entry> entrySet() {
return entrySet(false);
}
@Override
public Collection values() {
this.proxy.checkReadiness();
return this.proxy.new ValuesSet(getBucketSet());
}
public Set keys() {
return this.proxy.keySet(getBucketSet());
}
@Override
public Set keySet() {
return keys();
}
public Collection localValues() {
return new LocalEntriesSet(IteratorType.VALUES);
}
public Set<Region.Entry> localEntrySet() {
return new LocalEntriesSet(IteratorType.ENTRIES);
}
public Set<Region.Entry> localKeys() {
return new LocalEntriesSet(IteratorType.KEYS);
}
/**
* This instance method was added so that unit tests could mock it
*/
int getHashKey(Operation op, Object key, Object value, Object callbackArg) {
return PartitionedRegionHelper.getHashKey(this.proxy, op, key, value, callbackArg);
}
private boolean isInDataSet(Object key, Object callbackArgument) {
int bucketId = getHashKey(Operation.CONTAINS_KEY, key, null, callbackArgument);
Integer bucketIdInt = Integer.valueOf(bucketId);
return buckets.contains(bucketIdInt);
}
public InternalRegionFunctionContext getFunctionContext() {
return this.rfContext;
}
public void setFunctionContext(InternalRegionFunctionContext fContext) {
this.rfContext = fContext;
}
@Override
public SelectResults query(String queryPredicate) throws FunctionDomainException,
TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
QueryService qs = getCache().getLocalQueryService();
DefaultQuery query = (DefaultQuery) qs
.newQuery("select * from " + getFullPath() + " this where " + queryPredicate);
final ExecutionContext executionContext = new QueryExecutionContext(null, getCache(), query);
Object[] params = null;
return (SelectResults) this.executeQuery(query, executionContext, params, getBucketSet());
}
@Override
public Object selectValue(String queryPredicate) throws FunctionDomainException,
TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
SelectResults result = query(queryPredicate);
if (result.isEmpty()) {
return null;
}
if (result.size() > 1) {
throw new FunctionDomainException(
String.format("selectValue expects results of size 1, but found results of size %s",
Integer.valueOf(result.size())));
}
return result.iterator().next();
}
/**
* Asif: This method should not be used for multiple partitioned regions based join queries We do
* not support equijoin queries on PartitionedRegions unless they are colocated and the the
* colocated columns ACTUALLY EXIST IN WHERE CLAUSE , AND IN CASE OF MULTI COLUMN PARTITIONING ,
* SHOULD HAVE AND CLAUSE.
*
* If not , this method will return wrong results. We DO NOT DETECT COLOCATION CRITERIA IN THE
* MULTI REGION PR BASED QUERIES.
*/
@Override
public Object executeQuery(DefaultQuery query,
final ExecutionContext executionContext,
Object[] parameters, Set buckets)
throws FunctionDomainException, TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
long startTime = 0L;
Object result = null;
boolean traceOn = DefaultQuery.QUERY_VERBOSE || query.isTraced();
if (traceOn && this.proxy != null) {
startTime = NanoTimer.getTime();
}
QueryObserver indexObserver = query.startTrace();
try {
result = this.proxy.executeQuery(query, executionContext, parameters, buckets);
} finally {
query.endTrace(indexObserver, startTime, result);
}
return result;
}
public PartitionedRegion getProxy() {
return this.proxy;
}
public Set<Integer> getBucketSet() {
return this.buckets;
}
// / Proxied calls
@Override
public void becomeLockGrantor() {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public void close() {
this.proxy.close();
}
@Override
public boolean containsKeyOnServer(Object key) {
return this.proxy.containsKeyOnServer(key);
}
@Override
public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
@Override
public void create(Object key, Object value)
throws TimeoutException, EntryExistsException, CacheWriterException {
create(key, value, null);
}
@Override
public void create(Object key, Object value, Object callbackArgument)
throws TimeoutException, EntryExistsException, CacheWriterException {
this.proxy.create(key, value, callbackArgument);
}
@Override
public Region createSubregion(String subregionName, RegionAttributes regionAttributes)
throws RegionExistsException, TimeoutException {
throw new UnsupportedOperationException();
}
@Override
public Object destroy(Object key)
throws TimeoutException, EntryNotFoundException, CacheWriterException {
return destroy(key, null);
}
@Override
public Object destroy(Object key, Object callbackArgument)
throws TimeoutException, EntryNotFoundException, CacheWriterException {
return this.proxy.destroy(key, callbackArgument);
}
@Override
public void destroyRegion() throws CacheWriterException, TimeoutException {
destroyRegion(null);
}
@Override
public void destroyRegion(Object callbackArgument) throws CacheWriterException, TimeoutException {
this.proxy.destroyRegion(callbackArgument);
}
@Override
public boolean existsValue(String queryPredicate) throws FunctionDomainException,
TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
return this.proxy.existsValue(queryPredicate);
}
@Override
public void forceRolling() {
this.proxy.forceRolling();
}
@Override
public InternalCache getCache() {
return this.proxy.getCache();
}
@Override
public String getFullPath() {
return this.proxy.getFullPath();
}
@Override
public List getInterestList() throws CacheWriterException {
return this.proxy.getInterestList();
}
@Override
public List getInterestListRegex() throws CacheWriterException {
return this.proxy.getInterestListRegex();
}
@Override
public String getName() {
return this.proxy.getName();
}
@Override
public Region getParentRegion() {
return this.proxy.getParentRegion();
}
@Override
public Lock getRegionDistributedLock() throws IllegalStateException {
throw new UnsupportedOperationException();
}
@Override
public CacheStatistics getStatistics() throws StatisticsDisabledException {
throw new UnsupportedOperationException();
}
@Override
public Region getSubregion(String path) {
return this.proxy.getSubregion(path);
}
@Override
public RegionAttributes getAttributes() {
return this.proxy.getAttributes();
}
@Override
public AttributesMutator getAttributesMutator() {
return this.proxy.getAttributesMutator();
}
@Override
public Lock getDistributedLock(Object key) throws IllegalStateException {
throw new UnsupportedOperationException();
}
@Override
public void invalidate(Object key) throws TimeoutException, EntryNotFoundException {
invalidate(key, null);
}
@Override
public void invalidate(Object key, Object callbackArgument)
throws TimeoutException, EntryNotFoundException {
this.proxy.invalidate(key, callbackArgument);
}
@Override
public void invalidateRegion() throws TimeoutException {
invalidateRegion(null);
}
@Override
public void invalidateRegion(Object callbackArgument) throws TimeoutException {
throw new UnsupportedOperationException();
}
@Override
public boolean isDestroyed() {
return this.proxy.isDestroyed();
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public Object getUserAttribute() {
return this.proxy.getUserAttribute();
}
public int[] getDiskDirSizes() {
return this.proxy.getDiskDirSizes();
}
@Override
public Set subregions(boolean recursive) {
return this.proxy.subregions(recursive);
}
@Override
public void unregisterInterest(Object key) throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void unregisterInterestRegex(String regex) throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void writeToDisk() {
throw new UnsupportedOperationException();
}
@Override
public void setUserAttribute(Object value) {
this.proxy.setUserAttribute(value);
}
@Override
public Object remove(Object key) {
return this.proxy.remove(key);
}
@Override
public void registerInterest(Object key) throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterest(Object key, boolean isDurable) throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterest(Object key, InterestResultPolicy policy)
throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterest(Object key, InterestResultPolicy policy, boolean isDurable)
throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterestRegex(String regex) throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterestRegex(String regex, boolean isDurable) throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterestRegex(String regex, InterestResultPolicy policy)
throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterestRegex(String regex, InterestResultPolicy policy, boolean isDurable)
throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public Set keySetOnServer() {
return this.proxy.keySetOnServer();
}
@Override
public int sizeOnServer() {
return this.proxy.sizeOnServer();
}
@Override
public boolean isEmptyOnServer() {
return this.proxy.isEmptyOnServer();
}
@Override
public void loadSnapshot(InputStream inputStream)
throws IOException, ClassNotFoundException, CacheWriterException, TimeoutException {
throw new UnsupportedOperationException();
}
@Override
public Map getAll(Collection keys) {
return getAll(keys, null);
}
@Override
public Map getAll(Collection keys, Object callback) {
HashMap result = new HashMap();
for (Iterator i = keys.iterator(); i.hasNext();) {
Object key = i.next();
try {
result.put(key, get(key, callback));
} catch (Exception e) {
logger.warn(String.format("The following exception occurred attempting to get key=%s",
key),
e);
}
}
return result;
}
@Override
public void localClear() {
this.proxy.localClear();
}
@Override
public void localDestroyRegion() {
localDestroyRegion(null);
}
@Override
public void localDestroyRegion(Object callbackArgument) {
this.proxy.localDestroyRegion(callbackArgument);
}
@Override
public void localInvalidateRegion() {
localInvalidateRegion(null);
}
@Override
public void localInvalidateRegion(Object callbackArgument) {
this.proxy.localInvalidateRegion(callbackArgument);
}
@Override
public void localDestroy(Object key) throws EntryNotFoundException {
localDestroy(key, null);
}
// TODO, we could actually perform a local destroy
@Override
public void localDestroy(Object key, Object callbackArgument) throws EntryNotFoundException {
throw new UnsupportedOperationException();
}
@Override
public void localInvalidate(Object key) throws EntryNotFoundException {
localInvalidate(key, null);
}
// TODO, we could actually perform a local invalidate
@Override
public void localInvalidate(Object key, Object callbackArgument) throws EntryNotFoundException {
throw new UnsupportedOperationException();
}
@Override
public Object put(Object key, Object value) throws TimeoutException, CacheWriterException {
return put(key, value, null);
}
@Override
public Object put(Object key, Object value, Object callbackArgument)
throws TimeoutException, CacheWriterException {
return this.proxy.put(key, value, callbackArgument);
}
@Override
public void putAll(Map map) {
this.proxy.putAll(map);
}
@Override
public void putAll(Map map, Object callbackArg) {
this.proxy.putAll(map, callbackArg);
}
// /
// / Read only calls
// /
@Override
public boolean containsKey(Object key) {
if (isInDataSet(key, null)) {
return this.proxy.containsKey(key);
} else {
return false;
}
}
@Override
public boolean containsValueForKey(Object key) {
if (isInDataSet(key, null)) {
return this.proxy.containsValueForKey(key);
} else {
return false;
}
}
@Override
public Entry getEntry(Object key) {
if (isInDataSet(key, null)) {
return this.proxy.getEntry(key);
} else {
return null;
}
}
@Override
public int size() {
return this.proxy.entryCount(getBucketSet());
}
@Override
public Object get(Object key) throws CacheLoaderException, TimeoutException {
return get(key, null);
}
@Override
public Object get(Object key, Object aCallbackArgument)
throws TimeoutException, CacheLoaderException {
if (isInDataSet(key, aCallbackArgument)) {
return this.proxy.get(key, aCallbackArgument);
} else {
return null;
}
}
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getName());
sb.append("[path='").append(getFullPath());
sb.append("';scope=").append(this.proxy.getScope());
sb.append("';dataPolicy=").append(this.proxy.getDataPolicy());
sb.append(" ;bucketIds=").append(this.buckets);
return sb.append(']').toString();
}
@Override
public void saveSnapshot(OutputStream outputStream) throws IOException {}
@Override
public void registerInterest(Object key, boolean isDurable, boolean receiveValues)
throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterest(Object key, InterestResultPolicy policy, boolean isDurable,
boolean receiveValues) throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterestRegex(String regex, boolean isDurable, boolean receiveValues)
throws CacheWriterException {
throw new UnsupportedOperationException();
}
@Override
public void registerInterestRegex(String regex, InterestResultPolicy policy, boolean isDurable,
boolean receiveValues) throws CacheWriterException {
throw new UnsupportedOperationException();
}
/*
* (non-Javadoc)
*
* @see java.util.concurrent.ConcurrentMap#putIfAbsent(java.lang.Object, java.lang.Object)
*/
@Override
public Object putIfAbsent(Object key, Object value) {
return this.proxy.putIfAbsent(key, value);
}
/*
* (non-Javadoc)
*
* @see java.util.concurrent.ConcurrentMap#remove(java.lang.Object, java.lang.Object)
*/
@Override
public boolean remove(Object key, Object value) {
return this.proxy.remove(key, value);
}
/*
* (non-Javadoc)
*
* @see java.util.concurrent.ConcurrentMap#replace(java.lang.Object, java.lang.Object)
*/
@Override
public Object replace(Object key, Object value) {
return this.proxy.replace(key, value);
}
/*
* (non-Javadoc)
*
* @see java.util.concurrent.ConcurrentMap#replace(java.lang.Object, java.lang.Object,
* java.lang.Object)
*/
@Override
public boolean replace(Object key, Object oldValue, Object newValue) {
return this.proxy.replace(key, oldValue, newValue);
}
@Override
public RegionService getRegionService() {
return getCache();
}
@Override
public RegionSnapshotService<?, ?> getSnapshotService() {
return new RegionSnapshotServiceImpl(this);
}
protected class LocalEntriesSet extends EntriesSet {
public LocalEntriesSet(IteratorType type) {
super(proxy, false, type, false);
}
public LocalEntriesSet() {
this(IteratorType.ENTRIES);
}
@Override
public Iterator iterator() {
return new LocalEntriesSetIterator();
}
protected class LocalEntriesSetIterator implements Iterator<Object> {
Iterator curBucketIter = null;
Integer curBucketId;
List<Integer> localBuckets = new ArrayList<Integer>(buckets);
int index = 0;
int localBucketsSize = localBuckets.size();
boolean hasNext = false;
Object next = null;
LocalEntriesSetIterator() {
this.next = moveNext();
}
@Override
public Object next() {
Object result = next;
if (result != null) {
next = moveNext();
return result;
}
throw new NoSuchElementException();
}
@Override
public boolean hasNext() {
return (this.next != null);
}
private Object moveNext() {
// Check if PR is destroyed.
proxy.checkReadiness();
try {
for (;;) { // Loop till we get valid value
while (curBucketIter == null || !(hasNext = curBucketIter.hasNext())) { // Loop all the
// buckets.
if (index >= localBucketsSize) {
return null;
}
curBucketId = localBuckets.get(index++);
BucketRegion br = proxy.getDataStore().getLocalBucketById(curBucketId);
if (br == null) {
throw new BucketMovedException(
"The Bucket region with id " + curBucketId + " is moved/destroyed.");
}
br.waitForData();
curBucketIter = br.entrySet().iterator();
}
// Check if there is a valid value.
if (hasNext) {
Map.Entry e = (Map.Entry) curBucketIter.next();
try {
if (iterType == IteratorType.VALUES) {
if (isKeepSerialized()) {
next = ((NonTXEntry) e).getRawValue();
} else if (ignoreCopyOnReadForQuery) {
next = ((NonTXEntry) e).getValue(true);
} else {
next = e.getValue();
}
if (next == null || Token.isInvalidOrRemoved(next)) {
continue;
}
} else if (iterType == IteratorType.KEYS) {
next = e.getKey();
} else {
if (((NonTXEntry) e).isDestroyed()) {
throw new EntryDestroyedException();
}
next = e;
}
} catch (EntryDestroyedException ede) {
// Entry is destroyed, continue to the next element.
continue;
}
}
return next;
}
} catch (RegionDestroyedException rde) {
throw new BucketMovedException(
"The Bucket region with id " + curBucketId + " is moved/destroyed.");
}
}
@Override
public void remove() {
throw new UnsupportedOperationException(
"This iterator does not support modification");
}
}
@Override
public int size() {
int size = 0;
for (Integer bId : buckets) {
BucketRegion br = proxy.getDataStore().getLocalBucketById(bId);
size += br.size();
}
return size;
}
}
@Override
public void removeAll(Collection keys) {
this.proxy.removeAll(keys);
}
@Override
public void removeAll(Collection keys, Object aCallbackArgument) {
this.proxy.removeAll(keys, aCallbackArgument);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common.task.batch.parallel;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import org.apache.druid.client.indexing.IndexingServiceClient;
import org.apache.druid.data.input.FiniteFirehoseFactory;
import org.apache.druid.data.input.InputSplit;
import org.apache.druid.data.input.impl.StringInputRowParser;
import org.apache.druid.indexer.RunnerTaskState;
import org.apache.druid.indexer.TaskLocation;
import org.apache.druid.indexer.TaskState;
import org.apache.druid.indexer.TaskStatus;
import org.apache.druid.indexer.TaskStatusPlus;
import org.apache.druid.indexing.common.TaskLock;
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.indexing.common.actions.LockListAction;
import org.apache.druid.indexing.common.actions.SurrogateAction;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.task.AbstractTask;
import org.apache.druid.indexing.common.task.IndexTaskClientFactory;
import org.apache.druid.indexing.common.task.TaskResource;
import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.NumberedShardSpec;
import org.easymock.EasyMock;
import org.joda.time.Interval;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
public class ParallelIndexSupervisorTaskResourceTest extends AbstractParallelIndexSupervisorTaskTest
{
private static final int NUM_SUB_TASKS = 10;
/** specId -> spec */
private final ConcurrentMap<String, ParallelIndexSubTaskSpec> subTaskSpecs = new ConcurrentHashMap<>();
/** specId -> taskStatusPlus */
private final ConcurrentMap<String, TaskStatusPlus> runningSpecs = new ConcurrentHashMap<>();
/** specId -> taskStatusPlus list */
private final ConcurrentHashMap<String, List<TaskStatusPlus>> taskHistories = new ConcurrentHashMap<>();
/** taskId -> subTaskSpec */
private final ConcurrentMap<String, ParallelIndexSubTaskSpec> taskIdToSpec = new ConcurrentHashMap<>();
/** taskId -> task */
private final CopyOnWriteArrayList<TestSubTask> runningTasks = new CopyOnWriteArrayList<>();
private ExecutorService service;
private TestSupervisorTask task;
@Before
public void setup() throws IOException
{
service = Execs.singleThreaded("parallel-index-supervisor-task-resource-test-%d");
indexingServiceClient = new LocalIndexingServiceClient();
localDeepStorage = temporaryFolder.newFolder("localStorage");
}
@After
public void teardown()
{
indexingServiceClient.shutdown();
temporaryFolder.delete();
service.shutdownNow();
}
@Test(timeout = 20000L)
public void testAPIs() throws Exception
{
task = newTask(
Intervals.of("2017/2018"),
new ParallelIndexIOConfig(
new TestFirehose(IntStream.range(0, NUM_SUB_TASKS).boxed().collect(Collectors.toList())),
false
)
);
actionClient = createActionClient(task);
toolbox = createTaskToolbox(task);
prepareTaskForLocking(task);
Assert.assertTrue(task.isReady(actionClient));
final Future<TaskStatus> supervisorTaskFuture = service.submit(() -> task.run(toolbox));
Thread.sleep(1000);
final SinglePhaseParallelIndexTaskRunner runner = (SinglePhaseParallelIndexTaskRunner) task.getRunner();
Assert.assertNotNull("runner is null", runner);
// test getMode
Response response = task.getMode(newRequest());
Assert.assertEquals(200, response.getStatus());
Assert.assertEquals("parallel", response.getEntity());
// test expectedNumSucceededTasks
response = task.getProgress(newRequest());
Assert.assertEquals(200, response.getStatus());
Assert.assertEquals(NUM_SUB_TASKS, ((SinglePhaseParallelIndexingProgress) response.getEntity()).getExpectedSucceeded());
// Since taskMonitor works based on polling, it's hard to use a fancier way to check its state.
// We use polling to check the state of taskMonitor in this test.
while (getNumSubTasks(SinglePhaseParallelIndexingProgress::getRunning) < NUM_SUB_TASKS) {
Thread.sleep(100);
}
int succeededTasks = 0;
int failedTasks = 0;
checkState(
succeededTasks,
failedTasks,
buildStateMap()
);
// numRunningTasks and numSucceededTasks after some successful subTasks
succeededTasks += 2;
for (int i = 0; i < succeededTasks; i++) {
runningTasks.get(0).setState(TaskState.SUCCESS);
}
while (getNumSubTasks(SinglePhaseParallelIndexingProgress::getSucceeded) < succeededTasks) {
Thread.sleep(100);
}
checkState(
succeededTasks,
failedTasks,
buildStateMap()
);
// numRunningTasks and numSucceededTasks after some failed subTasks
failedTasks += 3;
for (int i = 0; i < failedTasks; i++) {
runningTasks.get(0).setState(TaskState.FAILED);
}
// Wait for new tasks to be started
while (getNumSubTasks(SinglePhaseParallelIndexingProgress::getFailed) < failedTasks || runningTasks.size() < NUM_SUB_TASKS - succeededTasks) {
Thread.sleep(100);
}
checkState(
succeededTasks,
failedTasks,
buildStateMap()
);
// Make sure only one subTask is running
succeededTasks += 7;
for (int i = 0; i < 7; i++) {
runningTasks.get(0).setState(TaskState.SUCCESS);
}
while (getNumSubTasks(SinglePhaseParallelIndexingProgress::getSucceeded) < succeededTasks) {
Thread.sleep(100);
}
checkState(
succeededTasks,
failedTasks,
buildStateMap()
);
Assert.assertEquals(1, runningSpecs.size());
final String lastRunningSpecId = runningSpecs.keySet().iterator().next();
final List<TaskStatusPlus> taskHistory = taskHistories.get(lastRunningSpecId);
// This should be a failed task history because new tasks appear later in runningTasks.
Assert.assertEquals(1, taskHistory.size());
// Test one more failure
runningTasks.get(0).setState(TaskState.FAILED);
failedTasks++;
while (getNumSubTasks(SinglePhaseParallelIndexingProgress::getFailed) < failedTasks) {
Thread.sleep(100);
}
while (getNumSubTasks(SinglePhaseParallelIndexingProgress::getRunning) < 1) {
Thread.sleep(100);
}
checkState(
succeededTasks,
failedTasks,
buildStateMap()
);
Assert.assertEquals(2, taskHistory.size());
runningTasks.get(0).setState(TaskState.SUCCESS);
succeededTasks++;
while (getNumSubTasks(SinglePhaseParallelIndexingProgress::getSucceeded) < succeededTasks) {
Thread.sleep(100);
}
Assert.assertEquals(TaskState.SUCCESS, supervisorTaskFuture.get(1000, TimeUnit.MILLISECONDS).getStatusCode());
}
@SuppressWarnings({"ConstantConditions"})
private int getNumSubTasks(Function<SinglePhaseParallelIndexingProgress, Integer> func)
{
final Response response = task.getProgress(newRequest());
Assert.assertEquals(200, response.getStatus());
return func.apply((SinglePhaseParallelIndexingProgress) response.getEntity());
}
private Map<String, SubTaskSpecStatus> buildStateMap()
{
final Map<String, SubTaskSpecStatus> stateMap = new HashMap<>();
subTaskSpecs.forEach((specId, spec) -> {
final List<TaskStatusPlus> taskHistory = taskHistories.get(specId);
final TaskStatusPlus runningTaskStatus = runningSpecs.get(specId);
stateMap.put(
specId,
new SubTaskSpecStatus(spec, runningTaskStatus, taskHistory == null ? Collections.emptyList() : taskHistory)
);
});
return stateMap;
}
/**
* Test all endpoints of {@link ParallelIndexSupervisorTask}.
*/
private void checkState(
int expectedSucceededTasks,
int expectedFailedTask,
Map<String, SubTaskSpecStatus> expectedSubTaskStateResponses // subTaskSpecId -> response
)
{
Response response = task.getProgress(newRequest());
Assert.assertEquals(200, response.getStatus());
final SinglePhaseParallelIndexingProgress monitorStatus = (SinglePhaseParallelIndexingProgress) response.getEntity();
// numRunningTasks
Assert.assertEquals(runningTasks.size(), monitorStatus.getRunning());
// numSucceededTasks
Assert.assertEquals(expectedSucceededTasks, monitorStatus.getSucceeded());
// numFailedTasks
Assert.assertEquals(expectedFailedTask, monitorStatus.getFailed());
// numCompleteTasks
Assert.assertEquals(expectedSucceededTasks + expectedFailedTask, monitorStatus.getComplete());
// numTotalTasks
Assert.assertEquals(runningTasks.size() + expectedSucceededTasks + expectedFailedTask, monitorStatus.getTotal());
// runningSubTasks
response = task.getRunningTasks(newRequest());
Assert.assertEquals(200, response.getStatus());
Assert.assertEquals(
runningTasks.stream().map(AbstractTask::getId).collect(Collectors.toSet()),
new HashSet<>((Collection<String>) response.getEntity())
);
// subTaskSpecs
response = task.getSubTaskSpecs(newRequest());
Assert.assertEquals(200, response.getStatus());
List<SubTaskSpec<ParallelIndexSubTask>> actualSubTaskSpecMap =
(List<SubTaskSpec<ParallelIndexSubTask>>) response.getEntity();
Assert.assertEquals(
subTaskSpecs.keySet(),
actualSubTaskSpecMap.stream().map(SubTaskSpec::getId).collect(Collectors.toSet())
);
// runningSubTaskSpecs
response = task.getRunningSubTaskSpecs(newRequest());
Assert.assertEquals(200, response.getStatus());
actualSubTaskSpecMap =
(List<SubTaskSpec<ParallelIndexSubTask>>) response.getEntity();
Assert.assertEquals(
runningSpecs.keySet(),
actualSubTaskSpecMap.stream().map(SubTaskSpec::getId).collect(Collectors.toSet())
);
// completeSubTaskSpecs
final List<SubTaskSpec<ParallelIndexSubTask>> completeSubTaskSpecs = expectedSubTaskStateResponses
.entrySet()
.stream()
.filter(entry -> !runningSpecs.containsKey(entry.getKey()))
.map(entry -> entry.getValue().getSpec())
.collect(Collectors.toList());
response = task.getCompleteSubTaskSpecs(newRequest());
Assert.assertEquals(200, response.getStatus());
Assert.assertEquals(completeSubTaskSpecs, response.getEntity());
// subTaskSpec
final String subTaskId = runningSpecs.keySet().iterator().next();
response = task.getSubTaskSpec(subTaskId, newRequest());
Assert.assertEquals(200, response.getStatus());
final SubTaskSpec<ParallelIndexSubTask> subTaskSpec =
(SubTaskSpec<ParallelIndexSubTask>) response.getEntity();
Assert.assertEquals(subTaskId, subTaskSpec.getId());
// subTaskState
response = task.getSubTaskState(subTaskId, newRequest());
Assert.assertEquals(200, response.getStatus());
final SubTaskSpecStatus expectedResponse = Preconditions.checkNotNull(
expectedSubTaskStateResponses.get(subTaskId),
"response for task[%s]",
subTaskId
);
final SubTaskSpecStatus actualResponse = (SubTaskSpecStatus) response.getEntity();
Assert.assertEquals(expectedResponse.getSpec().getId(), actualResponse.getSpec().getId());
Assert.assertEquals(expectedResponse.getCurrentStatus(), actualResponse.getCurrentStatus());
Assert.assertEquals(expectedResponse.getTaskHistory(), actualResponse.getTaskHistory());
// completeSubTaskSpecAttemptHistory
final String completeSubTaskSpecId = expectedSubTaskStateResponses
.entrySet()
.stream()
.filter(entry -> {
final TaskStatusPlus currentStatus = entry.getValue().getCurrentStatus();
return currentStatus != null &&
(currentStatus.getStatusCode() == TaskState.SUCCESS || currentStatus.getStatusCode() == TaskState.FAILED);
})
.map(Entry::getKey)
.findFirst()
.orElse(null);
if (completeSubTaskSpecId != null) {
response = task.getCompleteSubTaskSpecAttemptHistory(completeSubTaskSpecId, newRequest());
Assert.assertEquals(200, response.getStatus());
Assert.assertEquals(
expectedSubTaskStateResponses.get(completeSubTaskSpecId).getTaskHistory(),
response.getEntity()
);
}
}
private static HttpServletRequest newRequest()
{
final HttpServletRequest request = EasyMock.niceMock(HttpServletRequest.class);
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null);
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(new AuthenticationResult("test", "test", "test", Collections.emptyMap()));
EasyMock.replay(request);
return request;
}
private TestSupervisorTask newTask(
Interval interval,
ParallelIndexIOConfig ioConfig
)
{
// set up ingestion spec
final ParallelIndexIngestionSpec ingestionSpec = new ParallelIndexIngestionSpec(
new DataSchema(
"dataSource",
getObjectMapper().convertValue(
new StringInputRowParser(
DEFAULT_PARSE_SPEC,
null
),
Map.class
),
new AggregatorFactory[]{
new LongSumAggregatorFactory("val", "val")
},
new UniformGranularitySpec(
Granularities.DAY,
Granularities.MINUTE,
interval == null ? null : Collections.singletonList(interval)
),
null,
getObjectMapper()
),
ioConfig,
new ParallelIndexTuningConfig(
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
NUM_SUB_TASKS,
null,
null,
null,
null,
null,
null,
null
)
);
// set up test tools
return new TestSupervisorTask(
null,
null,
ingestionSpec,
Collections.emptyMap(),
indexingServiceClient
);
}
private static class TestFirehose implements FiniteFirehoseFactory<StringInputRowParser, Integer>
{
private final List<Integer> ids;
TestFirehose(List<Integer> ids)
{
this.ids = ids;
}
@Override
public Stream<InputSplit<Integer>> getSplits()
{
return ids.stream().map(InputSplit::new);
}
@Override
public int getNumSplits()
{
return ids.size();
}
@Override
public FiniteFirehoseFactory<StringInputRowParser, Integer> withSplit(InputSplit<Integer> split)
{
return new TestFirehose(Collections.singletonList(split.get()));
}
}
private class TestSupervisorTask extends TestParallelIndexSupervisorTask
{
TestSupervisorTask(
String id,
TaskResource taskResource,
ParallelIndexIngestionSpec ingestionSchema,
Map<String, Object> context,
IndexingServiceClient indexingServiceClient
)
{
super(
id,
taskResource,
ingestionSchema,
context,
indexingServiceClient
);
}
@Override
ParallelIndexTaskRunner createRunner(TaskToolbox toolbox)
{
setRunner(
new TestRunner(
toolbox,
this,
indexingServiceClient
)
);
return getRunner();
}
}
private class TestRunner extends TestParallelIndexTaskRunner
{
private final ParallelIndexSupervisorTask supervisorTask;
TestRunner(
TaskToolbox toolbox,
ParallelIndexSupervisorTask supervisorTask,
@Nullable IndexingServiceClient indexingServiceClient
)
{
super(
toolbox,
supervisorTask.getId(),
supervisorTask.getGroupId(),
supervisorTask.getIngestionSchema(),
supervisorTask.getContext(),
indexingServiceClient
);
this.supervisorTask = supervisorTask;
}
@Override
ParallelIndexSubTaskSpec newTaskSpec(InputSplit split)
{
final FiniteFirehoseFactory baseFirehoseFactory = (FiniteFirehoseFactory) getIngestionSchema()
.getIOConfig()
.getFirehoseFactory();
final TestSubTaskSpec spec = new TestSubTaskSpec(
supervisorTask.getId() + "_" + getAndIncrementNextSpecId(),
supervisorTask.getGroupId(),
supervisorTask,
this,
new ParallelIndexIngestionSpec(
getIngestionSchema().getDataSchema(),
new ParallelIndexIOConfig(
baseFirehoseFactory.withSplit(split),
getIngestionSchema().getIOConfig().isAppendToExisting()
),
getIngestionSchema().getTuningConfig()
),
supervisorTask.getContext(),
split
);
subTaskSpecs.put(spec.getId(), spec);
return spec;
}
}
private class TestSubTaskSpec extends ParallelIndexSubTaskSpec
{
private final ParallelIndexSupervisorTask supervisorTask;
TestSubTaskSpec(
String id,
String groupId,
ParallelIndexSupervisorTask supervisorTask,
SinglePhaseParallelIndexTaskRunner runner,
ParallelIndexIngestionSpec ingestionSpec,
Map<String, Object> context,
InputSplit inputSplit
)
{
super(id, groupId, supervisorTask.getId(), ingestionSpec, context, inputSplit);
this.supervisorTask = supervisorTask;
}
@Override
public ParallelIndexSubTask newSubTask(int numAttempts)
{
try {
// taskId is suffixed by the current time and this sleep is to make sure that every sub task has different id
Thread.sleep(10);
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
final TestSubTask subTask = new TestSubTask(
getGroupId(),
getSupervisorTaskId(),
numAttempts,
getIngestionSpec(),
getContext(),
new LocalParallelIndexTaskClientFactory(supervisorTask)
);
final TestFirehose firehose = (TestFirehose) getIngestionSpec().getIOConfig().getFirehoseFactory();
final InputSplit<Integer> split = firehose.getSplits().findFirst().orElse(null);
if (split == null) {
throw new ISE("Split is null");
}
runningTasks.add(subTask);
taskIdToSpec.put(subTask.getId(), this);
runningSpecs.put(
getId(),
new TaskStatusPlus(
subTask.getId(),
subTask.getType(),
DateTimes.EPOCH,
DateTimes.EPOCH,
TaskState.RUNNING,
RunnerTaskState.RUNNING,
-1L,
TaskLocation.unknown(),
null,
null
)
);
return subTask;
}
}
private class TestSubTask extends ParallelIndexSubTask
{
private volatile TaskState state = TaskState.RUNNING;
TestSubTask(
String groupId,
String supervisorTaskId,
int numAttempts,
ParallelIndexIngestionSpec ingestionSchema,
Map<String, Object> context,
IndexTaskClientFactory<ParallelIndexTaskClient> taskClientFactory
)
{
super(
null,
groupId,
null,
supervisorTaskId,
numAttempts,
ingestionSchema,
context,
null,
taskClientFactory
);
}
@Override
public boolean isReady(TaskActionClient taskActionClient)
{
return true;
}
@Override
public TaskStatus run(final TaskToolbox toolbox) throws Exception
{
while (state == TaskState.RUNNING) {
Thread.sleep(100);
}
final TestFirehose firehose = (TestFirehose) getIngestionSchema().getIOConfig().getFirehoseFactory();
final List<TaskLock> locks = toolbox.getTaskActionClient()
.submit(new SurrogateAction<>(getSupervisorTaskId(), new LockListAction()));
Preconditions.checkState(locks.size() == 1, "There should be a single lock");
task.getRunner().collectReport(
new PushedSegmentsReport(
getId(),
Collections.singletonList(
new DataSegment(
getDataSource(),
Intervals.of("2017/2018"),
locks.get(0).getVersion(),
null,
null,
null,
new NumberedShardSpec(firehose.ids.get(0), NUM_SUB_TASKS),
0,
1L
)
)
)
);
return TaskStatus.fromCode(getId(), state);
}
void setState(TaskState state)
{
Preconditions.checkArgument(
state == TaskState.SUCCESS || state == TaskState.FAILED,
"state[%s] should be SUCCESS of FAILED",
state
);
this.state = state;
final int taskIndex = IntStream.range(0, runningTasks.size())
.filter(i -> runningTasks.get(i).getId().equals(getId())).findAny()
.orElse(-1);
if (taskIndex == -1) {
throw new ISE("Can't find an index for task[%s]", getId());
}
runningTasks.remove(taskIndex);
final String specId = Preconditions.checkNotNull(taskIdToSpec.get(getId()), "spec for task[%s]", getId()).getId();
runningSpecs.remove(specId);
taskHistories.computeIfAbsent(specId, k -> new ArrayList<>()).add(
new TaskStatusPlus(
getId(),
getType(),
DateTimes.EPOCH,
DateTimes.EPOCH,
state,
RunnerTaskState.NONE,
-1L,
TaskLocation.unknown(),
null,
null
)
);
}
}
}
| |
/*
* Copyright 2018 Hippo Seven
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hippo.ehviewer.ui;
/*
* Created by Hippo on 2018/3/23.
*/
import android.app.Dialog;
import android.content.DialogInterface;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.util.Pair;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.google.android.material.floatingactionbutton.FloatingActionButton;
import com.google.android.material.textfield.TextInputLayout;
import com.hippo.android.resource.AttrResources;
import com.hippo.easyrecyclerview.EasyRecyclerView;
import com.hippo.easyrecyclerview.LinearDividerItemDecoration;
import com.hippo.ehviewer.EhApplication;
import com.hippo.ehviewer.Hosts;
import com.hippo.ehviewer.R;
import com.hippo.ripple.Ripple;
import com.hippo.yorozuya.LayoutUtils;
import java.util.List;
import java.util.Locale;
public class HostsActivity extends ToolbarActivity
implements EasyRecyclerView.OnItemClickListener, View.OnClickListener {
private static final String DIALOG_TAG_ADD_HOST = AddHostDialogFragment.class.getName();
private static final String DIALOG_TAG_EDIT_HOST = EditHostDialogFragment.class.getName();
private static final String KEY_HOST = "com.hippo.ehviewer.ui.HostsActivity.HOST";
private static final String KEY_IP = "com.hippo.ehviewer.ui.HostsActivity.IP";
private Hosts hosts;
private List<Pair<String, String>> data;
private EasyRecyclerView recyclerView;
private View tip;
private HostsAdapter adapter;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hosts = EhApplication.getHosts(this);
data = hosts.getAll();
setContentView(R.layout.activity_hosts);
setNavigationIcon(R.drawable.v_arrow_left_dark_x24);
recyclerView = findViewById(R.id.recycler_view);
tip = findViewById(R.id.tip);
FloatingActionButton fab = findViewById(R.id.fab);
adapter = new HostsAdapter();
recyclerView.setAdapter(adapter);
recyclerView.setLayoutManager(new LinearLayoutManager(this, RecyclerView.VERTICAL, false));
LinearDividerItemDecoration decoration = new LinearDividerItemDecoration(
LinearDividerItemDecoration.VERTICAL,
AttrResources.getAttrColor(this, R.attr.dividerColor),
LayoutUtils.dp2pix(this, 1));
decoration.setShowLastDivider(true);
recyclerView.addItemDecoration(decoration);
recyclerView.setSelector(Ripple.generateRippleDrawable(this, !AttrResources.getAttrBoolean(this, R.attr.isLightTheme), new ColorDrawable(Color.TRANSPARENT)));
recyclerView.setHasFixedSize(true);
recyclerView.setOnItemClickListener(this);
recyclerView.setPadding(
recyclerView.getPaddingLeft(),
recyclerView.getPaddingTop(),
recyclerView.getPaddingRight(),
recyclerView.getPaddingBottom() + getResources().getDimensionPixelOffset(R.dimen.gallery_padding_bottom_fab));
fab.setOnClickListener(this);
recyclerView.setVisibility(data.isEmpty() ? View.GONE : View.VISIBLE);
tip.setVisibility(data.isEmpty() ? View.VISIBLE : View.GONE);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
finish();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public boolean onItemClick(EasyRecyclerView easyRecyclerView, View view, int position, long id) {
Pair<String, String> pair = data.get(position);
Bundle args = new Bundle();
args.putString(KEY_HOST, pair.first);
args.putString(KEY_IP, pair.second);
DialogFragment fragment = new EditHostDialogFragment();
fragment.setArguments(args);
fragment.show(getSupportFragmentManager(), DIALOG_TAG_EDIT_HOST);
return true;
}
@Override
public void onClick(View v) {
new AddHostDialogFragment().show(getSupportFragmentManager(), DIALOG_TAG_ADD_HOST);
}
private void notifyHostsChanges() {
data = hosts.getAll();
recyclerView.setVisibility(data.isEmpty() ? View.GONE : View.VISIBLE);
tip.setVisibility(data.isEmpty() ? View.VISIBLE : View.GONE);
adapter.notifyDataSetChanged();
}
private class HostsHolder extends RecyclerView.ViewHolder {
public final TextView host;
public final TextView ip;
public HostsHolder(View itemView) {
super(itemView);
host = itemView.findViewById(R.id.host);
ip = itemView.findViewById(R.id.ip);
}
}
private class HostsAdapter extends RecyclerView.Adapter<HostsHolder> {
private final LayoutInflater inflater = getLayoutInflater();
@NonNull
@Override
public HostsHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
return new HostsHolder(inflater.inflate(R.layout.item_hosts, parent, false));
}
@Override
public void onBindViewHolder(@NonNull HostsHolder holder, int position) {
Pair<String, String> pair = data.get(position);
holder.host.setText(pair.first);
holder.ip.setText(pair.second);
}
@Override
public int getItemCount() {
return data.size();
}
}
public abstract static class HostDialogFragment extends DialogFragment {
private TextView host;
private TextView ip;
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
View view = getActivity().getLayoutInflater().inflate(R.layout.dialog_hosts, null, false);
host = view.findViewById(R.id.host);
ip = view.findViewById(R.id.ip);
Bundle arguments = getArguments();
if (savedInstanceState == null && arguments != null) {
host.setText(arguments.getString(KEY_HOST));
ip.setText(arguments.getString(KEY_IP));
}
AlertDialog.Builder builder = new AlertDialog.Builder(getContext()).setView(view);
onCreateDialogBuilder(builder);
AlertDialog dialog = builder.create();
dialog.setOnShowListener(d -> onCreateDialog((AlertDialog) d));
return dialog;
}
protected abstract void onCreateDialogBuilder(AlertDialog.Builder builder);
protected abstract void onCreateDialog(AlertDialog dialog);
protected void put(AlertDialog dialog) {
TextView host = dialog.findViewById(R.id.host);
TextView ip = dialog.findViewById(R.id.ip);
String hostString = host.getText().toString().trim().toLowerCase(Locale.US);
String ipString = ip.getText().toString().trim();
if (!Hosts.isValidHost(hostString)) {
TextInputLayout hostInputLayout = dialog.findViewById(R.id.host_input_layout);
hostInputLayout.setError(getContext().getString(R.string.invalid_host));
return;
}
if (!Hosts.isValidIp(ipString)) {
TextInputLayout ipInputLayout = dialog.findViewById(R.id.ip_input_layout);
ipInputLayout.setError(getContext().getString(R.string.invalid_ip));
return;
}
HostsActivity activity = (HostsActivity) dialog.getOwnerActivity();
activity.hosts.put(hostString, ipString);
activity.notifyHostsChanges();
dialog.dismiss();
}
protected void delete(AlertDialog dialog) {
TextView host = dialog.findViewById(R.id.host);
String hostString = host.getText().toString().trim().toLowerCase(Locale.US);
HostsActivity activity = (HostsActivity) dialog.getOwnerActivity();
activity.hosts.delete(hostString);
activity.notifyHostsChanges();
dialog.dismiss();
}
}
public static class AddHostDialogFragment extends HostDialogFragment {
@Override
protected void onCreateDialogBuilder(AlertDialog.Builder builder) {
builder.setTitle(R.string.add_host);
builder.setPositiveButton(R.string.add_host_add, null);
}
@Override
protected void onCreateDialog(AlertDialog dialog) {
dialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(v -> put(dialog));
}
}
public static class EditHostDialogFragment extends HostDialogFragment {
@Override
protected void onCreateDialogBuilder(AlertDialog.Builder builder) {
builder.setTitle(R.string.edit_host);
builder.setPositiveButton(R.string.edit_host_confirm, null);
builder.setNegativeButton(R.string.edit_host_delete, null);
}
@Override
protected void onCreateDialog(AlertDialog dialog) {
dialog.findViewById(R.id.host_input_layout).setEnabled(false);
dialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(v -> put(dialog));
dialog.getButton(DialogInterface.BUTTON_NEGATIVE).setOnClickListener(v -> delete(dialog));
}
}
}
| |
// HTMLParser Library $Name: v1_5 $ - A java-based parser for HTML
// http://sourceforge.org/projects/htmlparser
// Copyright (C) 2004 Derrick Oswald
//
// Revision Control Information
//
// $Source: /cvsroot/htmlparser/htmlparser/src/org/htmlparser/sax/Attributes.java,v $
// $Author: derrickoswald $
// $Date: 2004/07/14 01:58:02 $
// $Revision: 1.1 $
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
package org.htmlparser.sax;
import java.util.List;
import org.htmlparser.Attribute;
import org.htmlparser.Tag;
import org.xml.sax.helpers.NamespaceSupport;
/**
* Provides access to the tag attributes.
*/
public class Attributes
implements
org.xml.sax.Attributes
{
/**
* The tag from which attributes are exposed.
*/
protected Tag mTag;
/**
* The utility class that converts namespaces.
*/
protected NamespaceSupport mSupport;
/**
* Elements of the qname.
* Allocated once for all uses of {@link #mSupport}.
*/
protected String[] mParts;
/**
* Create an attibute access object.
* @param tag The tag to expose.
* @param support The namespace converter.
* @param parts The elements of the qualified name.
*/
public Attributes (Tag tag, NamespaceSupport support, String[] parts)
{
mTag = tag;
mSupport = support;
mParts = parts;
}
////////////////////////////////////////////////////////////////////
// Indexed access.
////////////////////////////////////////////////////////////////////
/**
* Return the number of attributes in the list.
*
* <p>Once you know the number of attributes, you can iterate
* through the list.</p>
*
* @return The number of attributes in the list.
* @see #getURI(int)
* @see #getLocalName(int)
* @see #getQName(int)
* @see #getType(int)
* @see #getValue(int)
*/
public int getLength ()
{
return (mTag.getAttributesEx ().size () - 1);
}
/**
* Look up an attribute's Namespace URI by index.
*
* @param index The attribute index (zero-based).
* @return The Namespace URI, or the empty string if none
* is available, or null if the index is out of
* range.
* @see #getLength
*/
public String getURI (int index)
{
mSupport.processName (getQName (index), mParts, true);
return (mParts[0]);
}
/**
* Look up an attribute's local name by index.
*
* @param index The attribute index (zero-based).
* @return The local name, or the empty string if Namespace
* processing is not being performed, or null
* if the index is out of range.
* @see #getLength
*/
public String getLocalName (int index)
{
mSupport.processName (getQName (index), mParts, true);
return (mParts[1]);
}
/**
* Look up an attribute's XML qualified (prefixed) name by index.
*
* @param index The attribute index (zero-based).
* @return The XML qualified name, or the empty string
* if none is available, or null if the index
* is out of range.
* @see #getLength
*/
public String getQName (int index)
{
Attribute attribute;
String ret;
attribute = (Attribute)(mTag.getAttributesEx ().get (index + 1));
if (attribute.isWhitespace ())
ret = "#text";
else
ret = attribute.getName ();
return (ret);
}
/**
* Look up an attribute's type by index.
*
* <p>The attribute type is one of the strings "CDATA", "ID",
* "IDREF", "IDREFS", "NMTOKEN", "NMTOKENS", "ENTITY", "ENTITIES",
* or "NOTATION" (always in upper case).</p>
*
* <p>If the parser has not read a declaration for the attribute,
* or if the parser does not report attribute types, then it must
* return the value "CDATA" as stated in the XML 1.0 Recommendation
* (clause 3.3.3, "Attribute-Value Normalization").</p>
*
* <p>For an enumerated attribute that is not a notation, the
* parser will report the type as "NMTOKEN".</p>
*
* @param index The attribute index (zero-based).
* @return The attribute's type as a string, or null if the
* index is out of range.
* @see #getLength
*/
public String getType (int index)
{
return ("CDATA");
}
/**
* Look up an attribute's value by index.
*
* <p>If the attribute value is a list of tokens (IDREFS,
* ENTITIES, or NMTOKENS), the tokens will be concatenated
* into a single string with each token separated by a
* single space.</p>
*
* @param index The attribute index (zero-based).
* @return The attribute's value as a string, or null if the
* index is out of range.
* @see #getLength
*/
public String getValue (int index)
{
Attribute attribute;
String ret;
attribute = (Attribute)(mTag.getAttributesEx ().get (index + 1));
ret = attribute.getValue ();
if (null == ret)
ret = "";
return (ret);
}
////////////////////////////////////////////////////////////////////
// Name-based query.
////////////////////////////////////////////////////////////////////
/**
* Look up the index of an attribute by Namespace name.
*
* @param uri The Namespace URI, or the empty string if
* the name has no Namespace URI.
* @param localName The attribute's local name.
* @return The index of the attribute, or -1 if it does not
* appear in the list.
*/
public int getIndex (String uri, String localName)
{
List attributes;
int size;
Attribute attribute;
String string;
int ret;
ret = -1;
attributes = mTag.getAttributesEx ();
if (null != attributes)
{
size = attributes.size ();
for (int i = 1; i < size; i++)
{
attribute = (Attribute)attributes.get (i);
string = attribute.getName ();
if (null != string) // not whitespace
{
mSupport.processName (string, mParts, true);
if ( uri.equals (mParts[0])
& localName.equalsIgnoreCase (mParts[1]))
{
ret = i;
i = size; // exit fast
}
}
}
}
return (ret);
}
/**
* Look up the index of an attribute by XML qualified (prefixed) name.
*
* @param qName The qualified (prefixed) name.
* @return The index of the attribute, or -1 if it does not
* appear in the list.
*/
public int getIndex (String qName)
{
mSupport.processName (qName, mParts, true);
return (getIndex (mParts[0], mParts[1]));
}
/**
* Look up an attribute's type by Namespace name.
*
* <p>See {@link #getType(int) getType(int)} for a description
* of the possible types.</p>
*
* @param uri The Namespace URI, or the empty String if the
* name has no Namespace URI.
* @param localName The local name of the attribute.
* @return The attribute type as a string, or null if the
* attribute is not in the list or if Namespace
* processing is not being performed.
*/
public String getType (String uri, String localName)
{
return (null);
}
/**
* Look up an attribute's type by XML qualified (prefixed) name.
*
* <p>See {@link #getType(int) getType(int)} for a description
* of the possible types.</p>
*
* @param qName The XML qualified name.
* @return The attribute type as a string, or null if the
* attribute is not in the list or if qualified names
* are not available.
*/
public String getType (String qName)
{
return (null);
}
/**
* Look up an attribute's value by Namespace name.
*
* <p>See {@link #getValue(int) getValue(int)} for a description
* of the possible values.</p>
*
* @param uri The Namespace URI, or the empty String if the
* name has no Namespace URI.
* @param localName The local name of the attribute.
* @return The attribute value as a string, or null if the
* attribute is not in the list.
*/
public String getValue (String uri, String localName)
{
return (mTag.getAttribute (localName));
}
/**
* Look up an attribute's value by XML qualified (prefixed) name.
*
* <p>See {@link #getValue(int) getValue(int)} for a description
* of the possible values.</p>
*
* @param qName The XML qualified name.
* @return The attribute value as a string, or null if the
* attribute is not in the list or if qualified names
* are not available.
*/
public String getValue (String qName)
{
mSupport.processName (qName, mParts, true);
return (getValue (mParts[0], mParts[1]));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.vector.complex.BaseRepeatedValueVector;
import org.apache.arrow.vector.complex.ListVector;
import org.apache.arrow.vector.complex.impl.UnionListWriter;
import org.apache.arrow.vector.complex.reader.FieldReader;
import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.TransferPair;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import io.netty.buffer.ArrowBuf;
public class TestListVector {
private BufferAllocator allocator;
@Before
public void init() {
allocator = new DirtyRootAllocator(Long.MAX_VALUE, (byte) 100);
}
@After
public void terminate() throws Exception {
allocator.close();
}
@Test
public void testCopyFrom() throws Exception {
try (ListVector inVector = ListVector.empty("input", allocator);
ListVector outVector = ListVector.empty("output", allocator)) {
UnionListWriter writer = inVector.getWriter();
writer.allocate();
// populate input vector with the following records
// [1, 2, 3]
// null
// []
writer.setPosition(0); // optional
writer.startList();
writer.bigInt().writeBigInt(1);
writer.bigInt().writeBigInt(2);
writer.bigInt().writeBigInt(3);
writer.endList();
writer.setPosition(2);
writer.startList();
writer.endList();
writer.setValueCount(3);
// copy values from input to output
outVector.allocateNew();
for (int i = 0; i < 3; i++) {
outVector.copyFrom(i, i, inVector);
}
outVector.setValueCount(3);
// assert the output vector is correct
FieldReader reader = outVector.getReader();
Assert.assertTrue("shouldn't be null", reader.isSet());
reader.setPosition(1);
Assert.assertFalse("should be null", reader.isSet());
reader.setPosition(2);
Assert.assertTrue("shouldn't be null", reader.isSet());
/* index 0 */
Object result = outVector.getObject(0);
ArrayList<Long> resultSet = (ArrayList<Long>) result;
assertEquals(3, resultSet.size());
assertEquals(new Long(1), (Long) resultSet.get(0));
assertEquals(new Long(2), (Long) resultSet.get(1));
assertEquals(new Long(3), (Long) resultSet.get(2));
/* index 1 */
result = outVector.getObject(1);
assertNull(result);
/* index 2 */
result = outVector.getObject(2);
resultSet = (ArrayList<Long>) result;
assertEquals(0, resultSet.size());
/* 3+0+0/3 */
assertEquals(1.0D, inVector.getDensity(), 0);
}
}
@Test
public void testSetLastSetUsage() throws Exception {
try (ListVector listVector = ListVector.empty("input", allocator)) {
/* Explicitly add the dataVector */
MinorType type = MinorType.BIGINT;
listVector.addOrGetVector(FieldType.nullable(type.getType()));
/* allocate memory */
listVector.allocateNew();
/* get inner buffers; validityBuffer and offsetBuffer */
ArrowBuf validityBuffer = listVector.getValidityBuffer();
ArrowBuf offsetBuffer = listVector.getOffsetBuffer();
/* get the underlying data vector -- BigIntVector */
BigIntVector dataVector = (BigIntVector) listVector.getDataVector();
/* check current lastSet */
assertEquals(Integer.toString(-1), Integer.toString(listVector.getLastSet()));
int index = 0;
int offset = 0;
/* write [10, 11, 12] to the list vector at index 0 */
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
dataVector.setSafe(0, 1, 10);
dataVector.setSafe(1, 1, 11);
dataVector.setSafe(2, 1, 12);
offsetBuffer.setInt((index + 1) * ListVector.OFFSET_WIDTH, 3);
index += 1;
/* write [13, 14] to the list vector at index 1 */
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
dataVector.setSafe(3, 1, 13);
dataVector.setSafe(4, 1, 14);
offsetBuffer.setInt((index + 1) * ListVector.OFFSET_WIDTH, 5);
index += 1;
/* write [15, 16, 17] to the list vector at index 2 */
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
dataVector.setSafe(5, 1, 15);
dataVector.setSafe(6, 1, 16);
dataVector.setSafe(7, 1, 17);
offsetBuffer.setInt((index + 1) * ListVector.OFFSET_WIDTH, 8);
/* check current lastSet */
assertEquals(Integer.toString(-1), Integer.toString(listVector.getLastSet()));
/* set lastset and arbitrary valuecount for list vector.
*
* NOTE: if we don't execute setLastSet() before setLastValueCount(), then
* the latter will corrupt the offsetBuffer and thus the accessor will not
* retrieve the correct values from underlying dataBuffer. Run the test
* by commenting out next line and we should see failures from 5th assert
* onwards. This is why doing setLastSet() is important before setValueCount()
* once the vector has been loaded.
*
* Another important thing to remember is the value of lastSet itself.
* Even though the listVector has elements till index 2 only, the lastSet should
* be set as 3. This is because the offsetBuffer has valid offsets filled till index 3.
* If we do setLastSet(2), the offsetBuffer at index 3 will contain incorrect value
* after execution of setValueCount().
*
* correct state of the listVector
* bitvector {1, 1, 1, 0, 0.... }
* offsetvector {0, 3, 5, 8, 8, 8.....}
* datavector { [10, 11, 12],
* [13, 14],
* [15, 16, 17]
* }
*
* if we don't do setLastSet() before setValueCount --> incorrect state
* bitvector {1, 1, 1, 0, 0.... }
* offsetvector {0, 0, 0, 0, 0, 0.....}
* datavector { [10, 11, 12],
* [13, 14],
* [15, 16, 17]
* }
*
* if we do setLastSet(2) before setValueCount --> incorrect state
* bitvector {1, 1, 1, 0, 0.... }
* offsetvector {0, 3, 5, 5, 5, 5.....}
* datavector { [10, 11, 12],
* [13, 14],
* [15, 16, 17]
* }
*/
listVector.setLastSet(2);
listVector.setValueCount(10);
/* (3+2+3)/10 */
assertEquals(0.8D, listVector.getDensity(), 0);
index = 0;
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(0), Integer.toString(offset));
Object actual = dataVector.getObject(offset);
assertEquals(new Long(10), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(11), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(12), (Long) actual);
index++;
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(3), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertEquals(new Long(13), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(14), (Long) actual);
index++;
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(5), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertEquals(new Long(15), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(16), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(17), (Long) actual);
index++;
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(8), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertNull(actual);
}
}
@Test
public void testSplitAndTransfer() throws Exception {
try (ListVector listVector = ListVector.empty("sourceVector", allocator)) {
/* Explicitly add the dataVector */
MinorType type = MinorType.BIGINT;
listVector.addOrGetVector(FieldType.nullable(type.getType()));
UnionListWriter listWriter = listVector.getWriter();
/* allocate memory */
listWriter.allocate();
/* populate data */
listWriter.setPosition(0);
listWriter.startList();
listWriter.bigInt().writeBigInt(10);
listWriter.bigInt().writeBigInt(11);
listWriter.bigInt().writeBigInt(12);
listWriter.endList();
listWriter.setPosition(1);
listWriter.startList();
listWriter.bigInt().writeBigInt(13);
listWriter.bigInt().writeBigInt(14);
listWriter.endList();
listWriter.setPosition(2);
listWriter.startList();
listWriter.bigInt().writeBigInt(15);
listWriter.bigInt().writeBigInt(16);
listWriter.bigInt().writeBigInt(17);
listWriter.bigInt().writeBigInt(18);
listWriter.endList();
listWriter.setPosition(3);
listWriter.startList();
listWriter.bigInt().writeBigInt(19);
listWriter.endList();
listWriter.setPosition(4);
listWriter.startList();
listWriter.bigInt().writeBigInt(20);
listWriter.bigInt().writeBigInt(21);
listWriter.bigInt().writeBigInt(22);
listWriter.bigInt().writeBigInt(23);
listWriter.endList();
listVector.setValueCount(5);
assertEquals(4, listVector.getLastSet());
/* get offset buffer */
final ArrowBuf offsetBuffer = listVector.getOffsetBuffer();
/* get dataVector */
BigIntVector dataVector = (BigIntVector) listVector.getDataVector();
/* check the vector output */
int index = 0;
int offset = 0;
Object actual = null;
/* index 0 */
assertFalse(listVector.isNull(index));
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(0), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertEquals(new Long(10), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(11), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(12), (Long) actual);
/* index 1 */
index++;
assertFalse(listVector.isNull(index));
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(3), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertEquals(new Long(13), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(14), (Long) actual);
/* index 2 */
index++;
assertFalse(listVector.isNull(index));
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(5), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertEquals(new Long(15), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(16), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(17), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(18), (Long) actual);
/* index 3 */
index++;
assertFalse(listVector.isNull(index));
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(9), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertEquals(new Long(19), (Long) actual);
/* index 4 */
index++;
assertFalse(listVector.isNull(index));
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(10), Integer.toString(offset));
actual = dataVector.getObject(offset);
assertEquals(new Long(20), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(21), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(22), (Long) actual);
offset++;
actual = dataVector.getObject(offset);
assertEquals(new Long(23), (Long) actual);
/* index 5 */
index++;
assertTrue(listVector.isNull(index));
offset = offsetBuffer.getInt(index * ListVector.OFFSET_WIDTH);
assertEquals(Integer.toString(14), Integer.toString(offset));
/* do split and transfer */
try (ListVector toVector = ListVector.empty("toVector", allocator)) {
TransferPair transferPair = listVector.makeTransferPair(toVector);
int[][] transferLengths = {{0, 2}, {3, 1}, {4, 1}};
for (final int[] transferLength : transferLengths) {
int start = transferLength[0];
int splitLength = transferLength[1];
int dataLength1 = 0;
int dataLength2 = 0;
int offset1 = 0;
int offset2 = 0;
transferPair.splitAndTransfer(start, splitLength);
/* get offsetBuffer of toVector */
final ArrowBuf toOffsetBuffer = toVector.getOffsetBuffer();
/* get dataVector of toVector */
BigIntVector dataVector1 = (BigIntVector) toVector.getDataVector();
for (int i = 0; i < splitLength; i++) {
dataLength1 = offsetBuffer.getInt((start + i + 1) * ListVector.OFFSET_WIDTH) -
offsetBuffer.getInt((start + i) * ListVector.OFFSET_WIDTH);
dataLength2 = toOffsetBuffer.getInt((i + 1) * ListVector.OFFSET_WIDTH) -
toOffsetBuffer.getInt(i * ListVector.OFFSET_WIDTH);
assertEquals("Different data lengths at index: " + i + " and start: " + start,
dataLength1, dataLength2);
offset1 = offsetBuffer.getInt((start + i) * ListVector.OFFSET_WIDTH);
offset2 = toOffsetBuffer.getInt(i * ListVector.OFFSET_WIDTH);
for (int j = 0; j < dataLength1; j++) {
assertEquals("Different data at indexes: " + offset1 + " and " + offset2,
dataVector.getObject(offset1), dataVector1.getObject(offset2));
offset1++;
offset2++;
}
}
}
}
}
}
@Test
public void testNestedListVector() throws Exception {
try (ListVector listVector = ListVector.empty("sourceVector", allocator)) {
UnionListWriter listWriter = listVector.getWriter();
/* allocate memory */
listWriter.allocate();
/* the dataVector that backs a listVector will also be a
* listVector for this test.
*/
/* write one or more inner lists at index 0 */
listWriter.setPosition(0);
listWriter.startList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(50);
listWriter.list().bigInt().writeBigInt(100);
listWriter.list().bigInt().writeBigInt(200);
listWriter.list().endList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(75);
listWriter.list().bigInt().writeBigInt(125);
listWriter.list().bigInt().writeBigInt(150);
listWriter.list().bigInt().writeBigInt(175);
listWriter.list().endList();
listWriter.endList();
/* write one or more inner lists at index 1 */
listWriter.setPosition(1);
listWriter.startList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(10);
listWriter.list().endList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(15);
listWriter.list().bigInt().writeBigInt(20);
listWriter.list().endList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(25);
listWriter.list().bigInt().writeBigInt(30);
listWriter.list().bigInt().writeBigInt(35);
listWriter.list().endList();
listWriter.endList();
assertEquals(1, listVector.getLastSet());
listVector.setValueCount(2);
assertEquals(2, listVector.getValueCount());
/* get listVector value at index 0 -- the value itself is a listvector */
Object result = listVector.getObject(0);
ArrayList<ArrayList<Long>> resultSet = (ArrayList<ArrayList<Long>>) result;
ArrayList<Long> list;
assertEquals(2, resultSet.size()); /* 2 inner lists at index 0 */
assertEquals(3, resultSet.get(0).size()); /* size of first inner list */
assertEquals(4, resultSet.get(1).size()); /* size of second inner list */
list = resultSet.get(0);
assertEquals(new Long(50), list.get(0));
assertEquals(new Long(100), list.get(1));
assertEquals(new Long(200), list.get(2));
list = resultSet.get(1);
assertEquals(new Long(75), list.get(0));
assertEquals(new Long(125), list.get(1));
assertEquals(new Long(150), list.get(2));
assertEquals(new Long(175), list.get(3));
/* get listVector value at index 1 -- the value itself is a listvector */
result = listVector.getObject(1);
resultSet = (ArrayList<ArrayList<Long>>) result;
assertEquals(3, resultSet.size()); /* 3 inner lists at index 1 */
assertEquals(1, resultSet.get(0).size()); /* size of first inner list */
assertEquals(2, resultSet.get(1).size()); /* size of second inner list */
assertEquals(3, resultSet.get(2).size()); /* size of third inner list */
list = resultSet.get(0);
assertEquals(new Long(10), list.get(0));
list = resultSet.get(1);
assertEquals(new Long(15), list.get(0));
assertEquals(new Long(20), list.get(1));
list = resultSet.get(2);
assertEquals(new Long(25), list.get(0));
assertEquals(new Long(30), list.get(1));
assertEquals(new Long(35), list.get(2));
/* check underlying bitVector */
assertFalse(listVector.isNull(0));
assertFalse(listVector.isNull(1));
/* check underlying offsets */
final ArrowBuf offsetBuffer = listVector.getOffsetBuffer();
/* listVector has 2 lists at index 0 and 3 lists at index 1 */
assertEquals(0, offsetBuffer.getInt(0 * ListVector.OFFSET_WIDTH));
assertEquals(2, offsetBuffer.getInt(1 * ListVector.OFFSET_WIDTH));
assertEquals(5, offsetBuffer.getInt(2 * ListVector.OFFSET_WIDTH));
}
}
@Test
public void testNestedListVector1() throws Exception {
try (ListVector listVector = ListVector.empty("sourceVector", allocator)) {
MinorType listType = MinorType.LIST;
MinorType scalarType = MinorType.BIGINT;
listVector.addOrGetVector(FieldType.nullable(listType.getType()));
ListVector innerList1 = (ListVector)listVector.getDataVector();
innerList1.addOrGetVector(FieldType.nullable(listType.getType()));
ListVector innerList2 = (ListVector)innerList1.getDataVector();
innerList2.addOrGetVector(FieldType.nullable(listType.getType()));
ListVector innerList3 = (ListVector)innerList2.getDataVector();
innerList3.addOrGetVector(FieldType.nullable(listType.getType()));
ListVector innerList4 = (ListVector)innerList3.getDataVector();
innerList4.addOrGetVector(FieldType.nullable(listType.getType()));
ListVector innerList5 = (ListVector)innerList4.getDataVector();
innerList5.addOrGetVector(FieldType.nullable(listType.getType()));
ListVector innerList6 = (ListVector)innerList5.getDataVector();
innerList6.addOrGetVector(FieldType.nullable(scalarType.getType()));
listVector.setInitialCapacity(128);
}
}
@Test
public void testNestedListVector2() throws Exception {
try (ListVector listVector = ListVector.empty("sourceVector", allocator)) {
listVector.setInitialCapacity(1);
UnionListWriter listWriter = listVector.getWriter();
/* allocate memory */
listWriter.allocate();
/* write one or more inner lists at index 0 */
listWriter.setPosition(0);
listWriter.startList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(50);
listWriter.list().bigInt().writeBigInt(100);
listWriter.list().bigInt().writeBigInt(200);
listWriter.list().endList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(75);
listWriter.list().bigInt().writeBigInt(125);
listWriter.list().endList();
listWriter.endList();
/* write one or more inner lists at index 1 */
listWriter.setPosition(1);
listWriter.startList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(15);
listWriter.list().bigInt().writeBigInt(20);
listWriter.list().endList();
listWriter.list().startList();
listWriter.list().bigInt().writeBigInt(25);
listWriter.list().bigInt().writeBigInt(30);
listWriter.list().bigInt().writeBigInt(35);
listWriter.list().endList();
listWriter.endList();
assertEquals(1, listVector.getLastSet());
listVector.setValueCount(2);
assertEquals(2, listVector.getValueCount());
/* get listVector value at index 0 -- the value itself is a listvector */
Object result = listVector.getObject(0);
ArrayList<ArrayList<Long>> resultSet = (ArrayList<ArrayList<Long>>) result;
ArrayList<Long> list;
assertEquals(2, resultSet.size()); /* 2 inner lists at index 0 */
assertEquals(3, resultSet.get(0).size()); /* size of first inner list */
assertEquals(2, resultSet.get(1).size()); /* size of second inner list */
list = resultSet.get(0);
assertEquals(new Long(50), list.get(0));
assertEquals(new Long(100), list.get(1));
assertEquals(new Long(200), list.get(2));
list = resultSet.get(1);
assertEquals(new Long(75), list.get(0));
assertEquals(new Long(125), list.get(1));
/* get listVector value at index 1 -- the value itself is a listvector */
result = listVector.getObject(1);
resultSet = (ArrayList<ArrayList<Long>>) result;
assertEquals(2, resultSet.size()); /* 3 inner lists at index 1 */
assertEquals(2, resultSet.get(0).size()); /* size of first inner list */
assertEquals(3, resultSet.get(1).size()); /* size of second inner list */
list = resultSet.get(0);
assertEquals(new Long(15), list.get(0));
assertEquals(new Long(20), list.get(1));
list = resultSet.get(1);
assertEquals(new Long(25), list.get(0));
assertEquals(new Long(30), list.get(1));
assertEquals(new Long(35), list.get(2));
/* check underlying bitVector */
assertFalse(listVector.isNull(0));
assertFalse(listVector.isNull(1));
/* check underlying offsets */
final ArrowBuf offsetBuffer = listVector.getOffsetBuffer();
/* listVector has 2 lists at index 0 and 3 lists at index 1 */
assertEquals(0, offsetBuffer.getInt(0 * ListVector.OFFSET_WIDTH));
assertEquals(2, offsetBuffer.getInt(1 * ListVector.OFFSET_WIDTH));
assertEquals(4, offsetBuffer.getInt(2 * ListVector.OFFSET_WIDTH));
}
}
@Test
public void testGetBufferAddress() throws Exception {
try (ListVector listVector = ListVector.empty("vector", allocator)) {
UnionListWriter listWriter = listVector.getWriter();
boolean error = false;
listWriter.allocate();
listWriter.setPosition(0);
listWriter.startList();
listWriter.bigInt().writeBigInt(50);
listWriter.bigInt().writeBigInt(100);
listWriter.bigInt().writeBigInt(200);
listWriter.endList();
listWriter.setPosition(1);
listWriter.startList();
listWriter.bigInt().writeBigInt(250);
listWriter.bigInt().writeBigInt(300);
listWriter.endList();
listVector.setValueCount(2);
/* check listVector contents */
Object result = listVector.getObject(0);
ArrayList<Long> resultSet = (ArrayList<Long>) result;
assertEquals(3, resultSet.size());
assertEquals(new Long(50), resultSet.get(0));
assertEquals(new Long(100), resultSet.get(1));
assertEquals(new Long(200), resultSet.get(2));
result = listVector.getObject(1);
resultSet = (ArrayList<Long>) result;
assertEquals(2, resultSet.size());
assertEquals(new Long(250), resultSet.get(0));
assertEquals(new Long(300), resultSet.get(1));
List<ArrowBuf> buffers = listVector.getFieldBuffers();
long bitAddress = listVector.getValidityBufferAddress();
long offsetAddress = listVector.getOffsetBufferAddress();
try {
long dataAddress = listVector.getDataBufferAddress();
} catch (UnsupportedOperationException ue) {
error = true;
} finally {
assertTrue(error);
}
assertEquals(2, buffers.size());
assertEquals(bitAddress, buffers.get(0).memoryAddress());
assertEquals(offsetAddress, buffers.get(1).memoryAddress());
/* (3+2)/2 */
assertEquals(2.5, listVector.getDensity(), 0);
}
}
@Test
public void testConsistentChildName() throws Exception {
try (ListVector listVector = ListVector.empty("sourceVector", allocator)) {
String emptyListStr = listVector.getField().toString();
assertTrue(emptyListStr.contains(ListVector.DATA_VECTOR_NAME));
listVector.addOrGetVector(FieldType.nullable(MinorType.INT.getType()));
String emptyVectorStr = listVector.getField().toString();
assertTrue(emptyVectorStr.contains(ListVector.DATA_VECTOR_NAME));
}
}
@Test
public void testSetInitialCapacity() {
try (final ListVector vector = ListVector.empty("", allocator)) {
vector.addOrGetVector(FieldType.nullable(MinorType.INT.getType()));
/**
* use the default multiplier of 5,
* 512 * 5 => 2560 * 4 => 10240 bytes => 16KB => 4096 value capacity.
*/
vector.setInitialCapacity(512);
vector.allocateNew();
assertEquals(512, vector.getValueCapacity());
assertTrue(vector.getDataVector().getValueCapacity() >= 512 * 5);
/* use density as 4 */
vector.setInitialCapacity(512, 4);
vector.allocateNew();
assertEquals(512, vector.getValueCapacity());
assertTrue(vector.getDataVector().getValueCapacity() >= 512 * 4);
/**
* inner value capacity we pass to data vector is 512 * 0.1 => 51
* For an int vector this is 204 bytes of memory for data buffer
* and 7 bytes for validity buffer.
* and with power of 2 allocation, we allocate 256 bytes and 8 bytes
* for the data buffer and validity buffer of the inner vector. Thus
* value capacity of inner vector is 64
*/
vector.setInitialCapacity(512, 0.1);
vector.allocateNew();
assertEquals(512, vector.getValueCapacity());
assertTrue(vector.getDataVector().getValueCapacity() >= 51);
/**
* inner value capacity we pass to data vector is 512 * 0.01 => 5
* For an int vector this is 20 bytes of memory for data buffer
* and 1 byte for validity buffer.
* and with power of 2 allocation, we allocate 32 bytes and 1 bytes
* for the data buffer and validity buffer of the inner vector. Thus
* value capacity of inner vector is 8
*/
vector.setInitialCapacity(512, 0.01);
vector.allocateNew();
assertEquals(512, vector.getValueCapacity());
assertTrue(vector.getDataVector().getValueCapacity() >= 5);
/**
* inner value capacity we pass to data vector is 5 * 0.1 => 0
* which is then rounded off to 1. So we pass value count as 1
* to the inner int vector.
* the offset buffer of the list vector is allocated for 6 values
* which is 24 bytes and then rounded off to 32 bytes (8 values)
* the validity buffer of the list vector is allocated for 5
* values which is 1 byte. This is why value capacity of the list
* vector is 7 as we take the min of validity buffer value capacity
* and offset buffer value capacity.
*/
vector.setInitialCapacity(5, 0.1);
vector.allocateNew();
assertEquals(7, vector.getValueCapacity());
assertTrue(vector.getDataVector().getValueCapacity() >= 1);
}
}
@Test
public void testClearAndReuse() {
try (final ListVector vector = ListVector.empty("list", allocator)) {
BigIntVector bigIntVector =
(BigIntVector) vector.addOrGetVector(FieldType.nullable(MinorType.BIGINT.getType())).getVector();
vector.setInitialCapacity(10);
vector.allocateNew();
vector.startNewValue(0);
bigIntVector.setSafe(0, 7);
vector.endValue(0, 1);
vector.startNewValue(1);
bigIntVector.setSafe(1, 8);
vector.endValue(1, 1);
vector.setValueCount(2);
Object result = vector.getObject(0);
ArrayList<Long> resultSet = (ArrayList<Long>) result;
assertEquals(new Long(7), resultSet.get(0));
result = vector.getObject(1);
resultSet = (ArrayList<Long>) result;
assertEquals(new Long(8), resultSet.get(0));
// Clear and release the buffers to trigger a realloc when adding next value
vector.clear();
// The list vector should reuse a buffer when reallocating the offset buffer
vector.startNewValue(0);
bigIntVector.setSafe(0, 7);
vector.endValue(0, 1);
vector.startNewValue(1);
bigIntVector.setSafe(1, 8);
vector.endValue(1, 1);
vector.setValueCount(2);
result = vector.getObject(0);
resultSet = (ArrayList<Long>) result;
assertEquals(new Long(7), resultSet.get(0));
result = vector.getObject(1);
resultSet = (ArrayList<Long>) result;
assertEquals(new Long(8), resultSet.get(0));
}
}
@Test
public void testGetBufferSizeFor() {
try (final ListVector vector = ListVector.empty("list", allocator)) {
UnionListWriter writer = vector.getWriter();
writer.allocate();
//set some values
writeIntValues(writer, new int[] {1, 2});
writeIntValues(writer, new int[] {3, 4});
writeIntValues(writer, new int[] {5, 6});
writeIntValues(writer, new int[] {7, 8, 9, 10});
writeIntValues(writer, new int[] {11, 12, 13, 14});
writer.setValueCount(5);
IntVector dataVector = (IntVector) vector.getDataVector();
int[] indices = new int[] {0, 2, 4, 6, 10, 14};
for (int valueCount = 1; valueCount <= 5; valueCount++) {
int validityBufferSize = BitVectorHelper.getValidityBufferSize(valueCount);
int offsetBufferSize = (valueCount + 1) * BaseRepeatedValueVector.OFFSET_WIDTH;
int expectedSize = validityBufferSize + offsetBufferSize + dataVector.getBufferSizeFor(indices[valueCount]);
assertEquals(expectedSize, vector.getBufferSizeFor(valueCount));
}
}
}
private void writeIntValues(UnionListWriter writer, int[] values) {
writer.startList();
for (int v: values) {
writer.integer().writeInt(v);
}
writer.endList();
}
}
| |
/*******************************************************************************
* Copyright (c) 2016 comtel inc.
*
* Licensed under the Apache License, version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*******************************************************************************/
package org.jfxvnc.ui.service;
import java.util.function.BiConsumer;
import org.jfxvnc.net.rfb.VncConnection;
import org.jfxvnc.net.rfb.codec.ProtocolState;
import org.jfxvnc.net.rfb.codec.decoder.BellEvent;
import org.jfxvnc.net.rfb.codec.decoder.ColourMapEvent;
import org.jfxvnc.net.rfb.codec.decoder.ServerCutTextEvent;
import org.jfxvnc.net.rfb.codec.decoder.ServerDecoderEvent;
import org.jfxvnc.net.rfb.codec.encoder.InputEventListener;
import org.jfxvnc.net.rfb.render.ConnectInfoEvent;
import org.jfxvnc.net.rfb.render.ProtocolConfiguration;
import org.jfxvnc.net.rfb.render.RenderCallback;
import org.jfxvnc.net.rfb.render.RenderProtocol;
import org.jfxvnc.net.rfb.render.rect.ImageRect;
import org.slf4j.LoggerFactory;
import javafx.application.Platform;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.ReadOnlyBooleanWrapper;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.beans.property.ReadOnlyStringProperty;
import javafx.beans.property.ReadOnlyStringWrapper;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleDoubleProperty;
public class VncRenderService implements RenderProtocol {
private final static org.slf4j.Logger logger = LoggerFactory.getLogger(VncRenderService.class);
private final VncConnection con;
private BiConsumer<ServerDecoderEvent, ImageRect> eventConsumer;
private final BooleanProperty listeningMode = new SimpleBooleanProperty(false);
private final ReadOnlyBooleanWrapper online = new ReadOnlyBooleanWrapper(false);
private final ReadOnlyBooleanWrapper bell = new ReadOnlyBooleanWrapper(false);
private final ReadOnlyStringWrapper serverCutText = new ReadOnlyStringWrapper();
private final ReadOnlyObjectWrapper<ConnectInfoEvent> connectInfo = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<ProtocolState> protocolState = new ReadOnlyObjectWrapper<>(ProtocolState.CLOSED);
private final ReadOnlyObjectWrapper<InputEventListener> inputEventListener = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<ColourMapEvent> colourMapEvent = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<Throwable> exceptionCaught = new ReadOnlyObjectWrapper<>();
private ReadOnlyObjectWrapper<ImageRect> image;
private final double minZoomLevel = 0.2;
private final double maxZoomLevel = 5.0;
private final DoubleProperty zoomLevel = new SimpleDoubleProperty(1);
private final BooleanProperty fullSceen = new SimpleBooleanProperty(false);
private final BooleanProperty restart = new SimpleBooleanProperty(false);
public VncRenderService() {
this(new VncConnection());
}
public VncRenderService(VncConnection con) {
this.con = con;
zoomLevel.addListener((l, a, b) -> {
if (b.doubleValue() > maxZoomLevel) {
zoomLevel.set(maxZoomLevel);
} else if (b.doubleValue() < minZoomLevel) {
zoomLevel.set(minZoomLevel);
}
});
}
public void setEventConsumer(BiConsumer<ServerDecoderEvent, ImageRect> c) {
eventConsumer = c;
}
public ProtocolConfiguration getConfiguration() {
return con.getConfiguration();
}
public void connect() {
con.setRenderProtocol(this);
con.addFaultListener(exceptionCaught::set);
if (listeningMode.get()) {
con.startListeningMode().whenComplete((c, th) -> {
if (th != null) {
exceptionCaught(th);
disconnect();
}
});
return;
}
con.connect().whenComplete((c, th) -> {
if (th != null) {
exceptionCaught(th);
disconnect();
}
});
}
public void disconnect() {
con.disconnect();
online.set(false);
}
@Override
public void render(ImageRect rect) {
if (eventConsumer != null) {
eventConsumer.accept(null, rect);
}
if (image != null) {
image.set(rect);
}
}
@Override
public void renderComplete(RenderCallback callback) {
Platform.runLater(() -> callback.renderComplete());
}
@Override
public void eventReceived(ServerDecoderEvent event) {
logger.debug("event received: {}", event);
if (eventConsumer != null) {
eventConsumer.accept(event, null);
}
if (event instanceof ConnectInfoEvent) {
connectInfo.set((ConnectInfoEvent) event);
online.set(true);
return;
}
if (event instanceof BellEvent) {
bell.set(!bell.get());
return;
}
if (event instanceof ServerCutTextEvent) {
serverCutText.set(((ServerCutTextEvent) event).getText());
return;
}
if (event instanceof ColourMapEvent) {
colourMapEvent.set((ColourMapEvent) event);
return;
}
logger.warn("not handled event: {}", event);
}
@Override
public void exceptionCaught(Throwable t) {
exceptionCaught.set(t);
}
@Override
public void stateChanged(ProtocolState state) {
protocolState.set(state);
if (state == ProtocolState.CLOSED) {
disconnect();
}
}
@Override
public void registerInputEventListener(InputEventListener listener) {
inputEventListener.set(listener);
}
public ReadOnlyObjectProperty<ConnectInfoEvent> connectInfoProperty() {
return connectInfo.getReadOnlyProperty();
}
public ReadOnlyObjectProperty<ProtocolState> protocolStateProperty() {
return protocolState;
}
public ReadOnlyObjectProperty<InputEventListener> inputEventListenerProperty() {
return inputEventListener.getReadOnlyProperty();
}
public ReadOnlyObjectProperty<ImageRect> imageProperty() {
if (image == null) {
image = new ReadOnlyObjectWrapper<>();
}
return image.getReadOnlyProperty();
}
public ReadOnlyObjectProperty<ColourMapEvent> colourMapEventProperty() {
return colourMapEvent.getReadOnlyProperty();
}
public ReadOnlyObjectProperty<Throwable> exceptionCaughtProperty() {
return exceptionCaught.getReadOnlyProperty();
}
public ReadOnlyBooleanProperty connectingProperty() {
return con.connectingProperty();
}
public ReadOnlyBooleanProperty connectedProperty() {
return con.connectedProperty();
}
public ReadOnlyBooleanProperty onlineProperty() {
return online.getReadOnlyProperty();
}
public ReadOnlyBooleanProperty bellProperty() {
return bell.getReadOnlyProperty();
}
public ReadOnlyStringProperty serverCutTextProperty() {
return serverCutText.getReadOnlyProperty();
}
public DoubleProperty zoomLevelProperty() {
return zoomLevel;
}
public BooleanProperty fullSceenProperty() {
return fullSceen;
}
public BooleanProperty restartProperty() {
return restart;
}
public BooleanProperty listeningModeProperty() {
return listeningMode;
}
public IntegerProperty listeningPortProperty() {
return getConfiguration().listeningPortProperty();
}
}
| |
/**
*
*/
package org.vaadin.maps.client.ui;
import java.util.Stack;
import org.vaadin.gwtgraphics.client.shape.Circle;
import org.vaadin.gwtgraphics.client.shape.Path;
import org.vaadin.gwtgraphics.client.shape.path.LineTo;
import org.vaadin.maps.client.drawing.Utils;
import org.vaadin.maps.client.geometry.Coordinate;
import org.vaadin.maps.client.geometry.LineString;
import org.vaadin.maps.shared.ui.Style;
import com.google.gwt.core.client.Duration;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.dom.client.MouseMoveEvent;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.dom.client.MouseOutHandler;
import com.google.gwt.event.dom.client.MouseOverEvent;
import com.google.gwt.event.dom.client.MouseOverHandler;
import com.vaadin.shared.MouseEventDetails;
/**
* @author Kamil Morong
*
*/
public class VPathHandler extends VPointHandler {
public enum FinishStrategy {
AltClick, DoubleClick, NearStartClick
}
public static final String CLASSNAME = "v-pathhandler";
protected static final int DOUBLE_CLICK_THRESHOLD = 400; // milliseconds
/**
* Representation of line start point,
*/
protected Circle startPoint = null;
protected Style startPointStyle = Style.DEFAULT_DRAW_START_POINT;
protected Style startPointHoverStyle = Style.DEFAULT_HOVER_START_POINT;
/**
* Stack of line vertices, excluding first and last vertex
*/
protected Stack<Circle> vertices = new Stack<Circle>();
protected Style vertexStyle = Style.DEFAULT_DRAW_VERTEX;
protected Path line = null;
protected Style lineStyle = Style.DEFAULT_DRAW_LINE;
protected FinishStrategy strategy = FinishStrategy.AltClick;
protected Duration clickDuration = null;
/**
* line actually drawn
*/
protected LineString lineString = null;
public VPathHandler() {
super();
setStyleName(CLASSNAME);
}
private void addStartPoint(int x, int y) {
startPoint = new Circle(x, y, 0);
startPoint.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (canCloseLineString()) {
updateStartPointHoverStyle();
} else {
updateStartPointStyle();
}
}
});
startPoint.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
updateStartPointStyle();
}
});
updateStartPointStyle();
container.add(startPoint);
}
private void updateStartPointStyle() {
if (startPoint != null && startPointStyle != null) {
Utils.updateDrawingStyle(startPoint, startPointStyle);
}
}
private void updateStartPointHoverStyle() {
if (startPoint != null && startPointHoverStyle != null) {
Utils.updateDrawingStyle(startPoint, startPointHoverStyle);
}
}
private void removeStartPoint() {
container.remove(startPoint);
startPoint = null;
}
private void addLine(int x, int y) {
line = new Path(x, y);
line.setFillAllowed(false);
line.lineTo(x, y);
updateLineStyle();
container.add(line);
}
private void updateLineStyle() {
if (line != null && lineStyle != null) {
Utils.updateDrawingStyle(line, lineStyle);
}
}
private void removeLine() {
container.remove(line);
line = null;
removeVertices();
}
private void addVertex(int x, int y) {
Circle vertex = new Circle(x, y, 0);
updateDrawVertexStyle(vertex);
vertices.add(vertex);
container.add(vertex);
}
private void updateDrawVertexStyle(Circle vertex) {
if (vertex != null && vertexStyle != null) {
Utils.updateDrawingStyle(vertex, vertexStyle);
}
}
private void updateVerticesStyle() {
for (Circle vertex : vertices) {
updateDrawVertexStyle(vertex);
}
}
private void removeVertices() {
while (!vertices.isEmpty()) {
Circle vertex = vertices.pop();
container.remove(vertex);
}
}
protected void removeLastVertex() {
if (!vertices.isEmpty()) {
Circle vertex = vertices.pop();
container.remove(vertex);
}
}
protected void removeLastLineStringVertex() {
lineString.getCoordinateSequence().removeLast();
}
protected void addLineSegment(int x, int y) {
addVertex(x, y);
line.lineTo(x, y);
}
protected void addLineStringVertex(int[] xy) {
lineString.getCoordinateSequence().add(createCoordinate(xy));
}
private void updateLineSegment(int x, int y) {
if (line != null) {
LineTo lastStep = (LineTo) line.getStep(line.getStepCount() - 1);
lastStep.setX(x);
lastStep.setY(y);
line.issueRedraw(true);
}
}
protected void prepareDrawing(int x, int y) {
addStartPoint(x, y);
addLine(x, y);
}
protected void prepareLineString(int[] xy) {
lineString = new LineString(createCoordinate(xy));
}
protected void closeLineString() {
lineString.close();
}
protected void cleanDrawing() {
removeStartPoint();
removeLine();
}
protected void cleanLineString() {
lineString = null;
}
public Style getStartPointStyle() {
return startPointStyle;
}
public void setStartPointStyle(Style style) {
if (style != null) {
startPointStyle = style;
} else {
startPointStyle = Style.DEFAULT_DRAW_CURSOR;
}
updateStartPointStyle();
}
public Style getStartPointHoverStyle() {
return startPointHoverStyle;
}
public void setStartPointHoverStyle(Style style) {
if (style != null) {
startPointHoverStyle = style;
} else {
startPointHoverStyle = Style.DEFAULT_HOVER_START_POINT;
}
}
public Style getLineStyle() {
return lineStyle;
}
public void setLineStyle(Style style) {
if (style != null) {
lineStyle = style;
} else {
lineStyle = Style.DEFAULT_DRAW_LINE;
}
updateLineStyle();
}
public Style getVertexStyle() {
return vertexStyle;
}
public void setVertexStyle(Style style) {
if (style != null) {
vertexStyle = style;
} else {
vertexStyle = Style.DEFAULT_DRAW_VERTEX;
}
updateVerticesStyle();
}
@Override
protected void syntheticClick(MouseEventDetails details, Element relativeElement) {
cleanMouseState();
if (!active || frozen) {
return;
}
boolean finish = false;
boolean isDoubleClick = false;
if (null == clickDuration) {
clickDuration = new Duration();
} else {
if (clickDuration.elapsedMillis() <= DOUBLE_CLICK_THRESHOLD) {
isDoubleClick = true;
clickDuration = null;
} else {
clickDuration = new Duration();
}
}
if (isDoubleClick
&& !(FinishStrategy.DoubleClick.equals(strategy) || FinishStrategy.NearStartClick.equals(strategy))) {
return;
}
/*
* if (clickHandlerSlave != null) {
* clickHandlerSlave.syntheticClick(details, relativeElement); }
*/
int[] xy = getMouseEventXY(details, relativeElement);
// first click
// add start point and begin line drawing
// create and insert start point
if (null == startPoint) {
prepareDrawing(xy[0], xy[1]);
prepareLineString(xy);
} else {
if ((details.isShiftKey() && (FinishStrategy.AltClick.equals(strategy)
|| (isDoubleClick && FinishStrategy.DoubleClick.equals(strategy))))
|| FinishStrategy.NearStartClick.equals(strategy)) {
// finish drawing with closing line if shift key has been
// pressed
// and the click is in start point's circle
if (canCloseLineString()
&& isWithinCircle(startPoint.getX(), startPoint.getY(), startPoint.getRadius(), xy[0], xy[1])) {
finish = true;
if (isDoubleClick) {
// remove last vertex from everywhere
removeLastLineStringVertex();
removeLastVertex();
}
// close line
closeLineString();
} else if (isDoubleClick && FinishStrategy.NearStartClick.equals(strategy)) {
finish = true;
}
} else if ((FinishStrategy.AltClick.equals(strategy) && details.isAltKey())) {
// finish drawing when strategy conditions pass
finish = true;
// append last vertex
addLineStringVertex(xy);
} else if ((FinishStrategy.DoubleClick.equals(strategy) || FinishStrategy.NearStartClick.equals(strategy))
&& isDoubleClick) {
finish = true;
}
if (finish) {
fireEvent(new GeometryEvent(VPathHandler.this, lineString));
cleanDrawing();
cleanLineString();
} else {
addLineSegment(xy[0], xy[1]);
// append vertex
addLineStringVertex(xy);
}
}
}
protected boolean isWithinCircle(int circleX, int circleY, int radius, int pointX, int pointY) {
return Math.sqrt(Math.pow(pointX - circleX, 2) + Math.pow(pointY - circleY, 2)) <= radius;
}
@Override
public void onMouseMove(MouseMoveEvent event) {
if (!active) {
return;
}
super.onMouseMove(event);
int[] xy = getMouseEventXY(event);
// update line segment to current position
updateLineSegment(xy[0], xy[1]);
}
public void setStrategyFromCode(int code) {
for (FinishStrategy finishStrategy : FinishStrategy.values()) {
if (code == finishStrategy.ordinal()) {
strategy = finishStrategy;
return;
}
}
strategy = FinishStrategy.AltClick;
}
@Override
protected void updateDrawings(int deltaX, int deltaY) {
if (startPoint != null) {
startPoint.setX(startPoint.getX() + deltaX);
startPoint.setY(startPoint.getY() + deltaY);
}
if (line != null) {
line.setX(line.getX() + deltaX);
line.setY(line.getY() + deltaY);
}
for (Circle vertex : vertices) {
vertex.setX(vertex.getX() + deltaX);
vertex.setY(vertex.getY() + deltaY);
}
if (lineString != null) {
Coordinate[] coordinates = lineString.getCoordinates();
for (Coordinate coordinate : coordinates) {
coordinate.x += deltaX;
coordinate.y += deltaY;
}
}
}
protected boolean canCloseLineString() {
return lineString != null && lineString.getNumPoints() > 2;
}
@Override
public void cancel() {
super.cancel();
cleanDrawing();
cleanLineString();
}
@Override
public void deactivate() {
super.deactivate();
cancel();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.slice;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.VerboseMode;
import java.util.concurrent.ThreadLocalRandom;
import static com.google.common.base.Verify.verify;
import static io.airlift.slice.JvmUtils.unsafe;
@SuppressWarnings("restriction")
@BenchmarkMode(Mode.Throughput)
@Fork(5)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
public class MemoryCopyBenchmark
{
static final int PAGE_SIZE = 4 * 1024;
static final int N_PAGES = 256 * 1024;
static final int ALLOC_SIZE = PAGE_SIZE * N_PAGES;
@State(Scope.Thread)
public static class Buffers
{
Slice data;
long startOffset;
long destOffset;
@Setup
public void fillWithBogusData()
{
data = Slices.allocate(ALLOC_SIZE);
for (int idx = 0; idx < data.length() / 8; idx++) {
data.setLong(idx, ThreadLocalRandom.current().nextLong());
}
long startOffsetPages = ThreadLocalRandom.current().nextInt(N_PAGES / 4);
long destOffsetPages = ThreadLocalRandom.current().nextInt(N_PAGES / 4) + N_PAGES / 2;
startOffset = startOffsetPages * PAGE_SIZE;
destOffset = destOffsetPages * PAGE_SIZE;
}
}
@Benchmark
public Slice b00sliceZero(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.SLICE, 0);
}
@Benchmark
public Slice b01customLoopZero(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.CUSTOM_LOOP, 0);
}
@Benchmark
public Slice b02unsafeZero(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.UNSAFE, 0);
}
@Benchmark
public Slice b03slice32B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.SLICE, 32);
}
@Benchmark
public Slice b04customLoop32B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.CUSTOM_LOOP, 32);
}
@Benchmark
public Slice b05unsafe32B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.UNSAFE, 32);
}
@Benchmark
public Slice b06slice128B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.SLICE, 128);
}
@Benchmark
public Slice b07customLoop128B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.CUSTOM_LOOP, 128);
}
@Benchmark
public Slice b08unsafe128B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.UNSAFE, 128);
}
@Benchmark
public Slice b09slice512B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.SLICE, 512);
}
@Benchmark
public Slice b10customLoop512B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.CUSTOM_LOOP, 512);
}
@Benchmark
public Slice b11unsafe512B(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.UNSAFE, 512);
}
@Benchmark
public Slice b12slice1K(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.SLICE, 1024);
}
@Benchmark
public Slice b13customLoop1K(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.CUSTOM_LOOP, 1024);
}
@Benchmark
public Slice b14unsafe1K(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.UNSAFE, 1024);
}
@Benchmark
public Slice b15slice1M(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.SLICE, 1024 * 1024);
}
@Benchmark
public Slice b16customLoop1M(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.CUSTOM_LOOP, 1024 * 1024);
}
@Benchmark
public Slice b17unsafe1M(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.UNSAFE, 1024 * 1024);
}
@Benchmark
public Slice b18slice128M(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.SLICE, 128 * 1024 * 1024);
}
@Benchmark
public Slice b19customLoop128M(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.CUSTOM_LOOP, 128 * 1024 * 1024);
}
@Benchmark
public Slice b20unsafe128M(Buffers buffers)
{
return doCopy(buffers, CopyStrategy.UNSAFE, 128 * 1024 * 1024);
}
static Slice doCopy(Buffers buffers, CopyStrategy strategy, int length)
{
verify(buffers.startOffset >= 0, "startOffset < 0");
verify(buffers.destOffset >= 0, "destOffset < 0");
verify(buffers.startOffset + length < ALLOC_SIZE, "startOffset + length >= ALLOC_SIZE");
verify(buffers.destOffset + length < ALLOC_SIZE, "destOffset + length >= ALLOC_SIZE");
strategy.doCopy(buffers.data, buffers.startOffset, buffers.destOffset, length);
return buffers.data;
}
private enum CopyStrategy
{
SLICE {
@Override
public void doCopy(Slice data, long src, long dest, int length)
{
data.setBytes((int) dest, data, (int) src, length);
}
},
CUSTOM_LOOP {
@Override
public void doCopy(Slice data, long src, long dest, int length)
{
Object base = data.getBase();
long offset = data.getAddress();
while (length >= SizeOf.SIZE_OF_LONG) {
long srcLong = unsafe.getLong(base, src + offset);
unsafe.putLong(base, dest + offset, srcLong);
offset += SizeOf.SIZE_OF_LONG;
length -= SizeOf.SIZE_OF_LONG;
}
while (length > 0) {
byte srcByte = unsafe.getByte(base, src + offset);
unsafe.putByte(base, dest + offset, srcByte);
offset++;
length--;
}
}
},
UNSAFE {
@Override
public void doCopy(Slice data, long srcOffset, long destOffset, int length)
{
Object base = data.getBase();
srcOffset += data.getAddress();
destOffset += data.getAddress();
int bytesToCopy = length - (length % 8);
unsafe.copyMemory(base, srcOffset, base, destOffset, bytesToCopy);
unsafe.copyMemory(base, srcOffset + bytesToCopy, base, destOffset + bytesToCopy, length - bytesToCopy);
}
};
public abstract void doCopy(Slice data, long src, long dest, int length);
}
public static void main(String[] args)
throws RunnerException
{
Options options = new OptionsBuilder()
.verbosity(VerboseMode.NORMAL)
.include(".*" + MemoryCopyBenchmark.class.getSimpleName() + ".*")
.build();
new Runner(options).run();
}
}
| |
/**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms;
// Start of user code for imports
import org.eclipse.emf.common.util.Enumerator;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreAdapterFactory;
import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider;
import org.eclipse.emf.eef.runtime.EEFRuntimePlugin;
import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart;
import org.eclipse.emf.eef.runtime.ui.parts.PartComposer;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep;
import org.eclipse.emf.eef.runtime.ui.widgets.ButtonsModeEnum;
import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer;
import org.eclipse.emf.eef.runtime.ui.widgets.EObjectFlatComboViewer;
import org.eclipse.emf.eef.runtime.ui.widgets.FormUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.eobjflatcombo.EObjectFlatComboSettings;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages;
// End of user code
/**
*
*
*/
public class AbstractCommonTargetPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, AbstractCommonTargetPropertiesEditionPart {
protected EMFComboViewer sequenceType;
protected EObjectFlatComboViewer sequence;
protected EMFComboViewer endpointType;
/**
* For {@link ISection} use only.
*/
public AbstractCommonTargetPropertiesEditionPartForm() { super(); }
/**
* Default constructor
* @param editionComponent the {@link IPropertiesEditionComponent} that manage this part
*
*/
public AbstractCommonTargetPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) {
super(editionComponent);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit)
*
*/
public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) {
ScrolledForm scrolledForm = widgetFactory.createScrolledForm(parent);
Form form = scrolledForm.getForm();
view = form.getBody();
GridLayout layout = new GridLayout();
layout.numColumns = 3;
view.setLayout(layout);
createControls(widgetFactory, view);
return scrolledForm;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite)
*
*/
public void createControls(final FormToolkit widgetFactory, Composite view) {
CompositionSequence abstractCommonTargetStep = new BindingCompositionSequence(propertiesEditionComponent);
CompositionStep propertiesStep = abstractCommonTargetStep.addStep(EsbViewsRepository.AbstractCommonTarget.Properties.class);
propertiesStep.addStep(EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType);
propertiesStep.addStep(EsbViewsRepository.AbstractCommonTarget.Properties.sequence);
propertiesStep.addStep(EsbViewsRepository.AbstractCommonTarget.Properties.endpointType);
composer = new PartComposer(abstractCommonTargetStep) {
@Override
public Composite addToPart(Composite parent, Object key) {
if (key == EsbViewsRepository.AbstractCommonTarget.Properties.class) {
return createPropertiesGroup(widgetFactory, parent);
}
if (key == EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType) {
return createSequenceTypeEMFComboViewer(widgetFactory, parent);
}
if (key == EsbViewsRepository.AbstractCommonTarget.Properties.sequence) {
return createSequenceFlatComboViewer(parent, widgetFactory);
}
if (key == EsbViewsRepository.AbstractCommonTarget.Properties.endpointType) {
return createEndpointTypeEMFComboViewer(widgetFactory, parent);
}
return parent;
}
};
composer.compose(view);
}
/**
*
*/
protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) {
Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED);
propertiesSection.setText(EsbMessages.AbstractCommonTargetPropertiesEditionPart_PropertiesGroupLabel);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesSection.setLayoutData(propertiesSectionData);
Composite propertiesGroup = widgetFactory.createComposite(propertiesSection);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 3;
propertiesGroup.setLayout(propertiesGroupLayout);
propertiesSection.setClient(propertiesGroup);
return propertiesGroup;
}
/**
* @generated NOT
*/
protected Composite createSequenceTypeEMFComboViewer(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType, EsbMessages.AbstractCommonTargetPropertiesEditionPart_SequenceTypeLabel);
sequenceType = new EMFComboViewer(parent);
sequenceType.setContentProvider(new ArrayContentProvider());
sequenceType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory()));
GridData sequenceTypeData = new GridData(GridData.FILL_HORIZONTAL);
sequenceType.getCombo().setLayoutData(sequenceTypeData);
sequenceType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() {
@Override
public void handleEvent(Event arg0) {
arg0.doit = false;
}
});
sequenceType.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(AbstractCommonTargetPropertiesEditionPartForm.this, EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getSequenceType()));
}
});
sequenceType.setID(EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createSequenceTypeEMFComboViewer
// End of user code
return parent;
}
/**
* @param parent the parent composite
* @param widgetFactory factory to use to instanciante widget of the form
*
*/
protected Composite createSequenceFlatComboViewer(Composite parent, FormToolkit widgetFactory) {
createDescription(parent, EsbViewsRepository.AbstractCommonTarget.Properties.sequence, EsbMessages.AbstractCommonTargetPropertiesEditionPart_SequenceLabel);
sequence = new EObjectFlatComboViewer(parent, !propertiesEditionComponent.isRequired(EsbViewsRepository.AbstractCommonTarget.Properties.sequence, EsbViewsRepository.FORM_KIND));
widgetFactory.adapt(sequence);
sequence.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory));
GridData sequenceData = new GridData(GridData.FILL_HORIZONTAL);
sequence.setLayoutData(sequenceData);
sequence.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(AbstractCommonTargetPropertiesEditionPartForm.this, EsbViewsRepository.AbstractCommonTarget.Properties.sequence, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getSequence()));
}
});
sequence.setID(EsbViewsRepository.AbstractCommonTarget.Properties.sequence);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.AbstractCommonTarget.Properties.sequence, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createSequenceFlatComboViewer
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createEndpointTypeEMFComboViewer(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.AbstractCommonTarget.Properties.endpointType, EsbMessages.AbstractCommonTargetPropertiesEditionPart_EndpointTypeLabel);
endpointType = new EMFComboViewer(parent);
endpointType.setContentProvider(new ArrayContentProvider());
endpointType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory()));
GridData endpointTypeData = new GridData(GridData.FILL_HORIZONTAL);
endpointType.getCombo().setLayoutData(endpointTypeData);
endpointType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() {
@Override
public void handleEvent(Event arg0) {
arg0.doit = false;
}
});
endpointType.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(AbstractCommonTargetPropertiesEditionPartForm.this, EsbViewsRepository.AbstractCommonTarget.Properties.endpointType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getEndpointType()));
}
});
endpointType.setID(EsbViewsRepository.AbstractCommonTarget.Properties.endpointType);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.AbstractCommonTarget.Properties.endpointType, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createEndpointTypeEMFComboViewer
// End of user code
return parent;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void firePropertiesChanged(IPropertiesEditionEvent event) {
// Start of user code for tab synchronization
// End of user code
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#getSequenceType()
*
*/
public Enumerator getSequenceType() {
Enumerator selection = (Enumerator) ((StructuredSelection) sequenceType.getSelection()).getFirstElement();
return selection;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#initSequenceType(Object input, Enumerator current)
*/
public void initSequenceType(Object input, Enumerator current) {
sequenceType.setInput(input);
sequenceType.modelUpdating(new StructuredSelection(current));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType);
if (eefElementEditorReadOnlyState && sequenceType.isEnabled()) {
sequenceType.setEnabled(false);
sequenceType.setToolTipText(EsbMessages.AbstractCommonTarget_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !sequenceType.isEnabled()) {
sequenceType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#setSequenceType(Enumerator newValue)
*
*/
public void setSequenceType(Enumerator newValue) {
sequenceType.modelUpdating(new StructuredSelection(newValue));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.AbstractCommonTarget.Properties.sequenceType);
if (eefElementEditorReadOnlyState && sequenceType.isEnabled()) {
sequenceType.setEnabled(false);
sequenceType.setToolTipText(EsbMessages.AbstractCommonTarget_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !sequenceType.isEnabled()) {
sequenceType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#getSequence()
*
*/
public EObject getSequence() {
if (sequence.getSelection() instanceof StructuredSelection) {
Object firstElement = ((StructuredSelection) sequence.getSelection()).getFirstElement();
if (firstElement instanceof EObject)
return (EObject) firstElement;
}
return null;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#initSequence(EObjectFlatComboSettings)
*/
public void initSequence(EObjectFlatComboSettings settings) {
sequence.setInput(settings);
if (current != null) {
sequence.setSelection(new StructuredSelection(settings.getValue()));
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.AbstractCommonTarget.Properties.sequence);
if (eefElementEditorReadOnlyState && sequence.isEnabled()) {
sequence.setEnabled(false);
sequence.setToolTipText(EsbMessages.AbstractCommonTarget_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !sequence.isEnabled()) {
sequence.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#setSequence(EObject newValue)
*
*/
public void setSequence(EObject newValue) {
if (newValue != null) {
sequence.setSelection(new StructuredSelection(newValue));
} else {
sequence.setSelection(new StructuredSelection()); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.AbstractCommonTarget.Properties.sequence);
if (eefElementEditorReadOnlyState && sequence.isEnabled()) {
sequence.setEnabled(false);
sequence.setToolTipText(EsbMessages.AbstractCommonTarget_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !sequence.isEnabled()) {
sequence.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#setSequenceButtonMode(ButtonsModeEnum newValue)
*/
public void setSequenceButtonMode(ButtonsModeEnum newValue) {
sequence.setButtonMode(newValue);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#addFilterSequence(ViewerFilter filter)
*
*/
public void addFilterToSequence(ViewerFilter filter) {
sequence.addFilter(filter);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#addBusinessFilterSequence(ViewerFilter filter)
*
*/
public void addBusinessFilterToSequence(ViewerFilter filter) {
sequence.addBusinessRuleFilter(filter);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#getEndpointType()
*
*/
public Enumerator getEndpointType() {
Enumerator selection = (Enumerator) ((StructuredSelection) endpointType.getSelection()).getFirstElement();
return selection;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#initEndpointType(Object input, Enumerator current)
*/
public void initEndpointType(Object input, Enumerator current) {
endpointType.setInput(input);
endpointType.modelUpdating(new StructuredSelection(current));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.AbstractCommonTarget.Properties.endpointType);
if (eefElementEditorReadOnlyState && endpointType.isEnabled()) {
endpointType.setEnabled(false);
endpointType.setToolTipText(EsbMessages.AbstractCommonTarget_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !endpointType.isEnabled()) {
endpointType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.AbstractCommonTargetPropertiesEditionPart#setEndpointType(Enumerator newValue)
*
*/
public void setEndpointType(Enumerator newValue) {
endpointType.modelUpdating(new StructuredSelection(newValue));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.AbstractCommonTarget.Properties.endpointType);
if (eefElementEditorReadOnlyState && endpointType.isEnabled()) {
endpointType.setEnabled(false);
endpointType.setToolTipText(EsbMessages.AbstractCommonTarget_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !endpointType.isEnabled()) {
endpointType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle()
*
*/
public String getTitle() {
return EsbMessages.AbstractCommonTarget_Part_Title;
}
// Start of user code additional methods
// End of user code
}
| |
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.server;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import azkaban.executor.ExecutionOptions;
import azkaban.executor.ExecutionOptions.FailureAction;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.mail.DefaultMailCreator;
import azkaban.project.DirectoryFlowLoader;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
import azkaban.user.Permission;
import azkaban.user.Permission.Type;
import azkaban.user.Role;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.utils.JSONUtils;
public class HttpRequestUtils {
public static ExecutionOptions parseFlowOptions(HttpServletRequest req)
throws ServletException {
ExecutionOptions execOptions = new ExecutionOptions();
if (hasParam(req, "failureAction")) {
String option = getParam(req, "failureAction");
if (option.equals("finishCurrent")) {
execOptions.setFailureAction(FailureAction.FINISH_CURRENTLY_RUNNING);
} else if (option.equals("cancelImmediately")) {
execOptions.setFailureAction(FailureAction.CANCEL_ALL);
} else if (option.equals("finishPossible")) {
execOptions.setFailureAction(FailureAction.FINISH_ALL_POSSIBLE);
}
}
if (hasParam(req, "failureEmailsOverride")) {
boolean override = getBooleanParam(req, "failureEmailsOverride", false);
execOptions.setFailureEmailsOverridden(override);
}
if (hasParam(req, "successEmailsOverride")) {
boolean override = getBooleanParam(req, "successEmailsOverride", false);
execOptions.setSuccessEmailsOverridden(override);
}
if (hasParam(req, "failureEmails")) {
String emails = getParam(req, "failureEmails");
if (!emails.isEmpty()) {
String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
execOptions.setFailureEmails(Arrays.asList(emailSplit));
}
}
if (hasParam(req, "successEmails")) {
String emails = getParam(req, "successEmails");
if (!emails.isEmpty()) {
String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
execOptions.setSuccessEmails(Arrays.asList(emailSplit));
}
}
if (hasParam(req, "notifyFailureFirst")) {
execOptions.setNotifyOnFirstFailure(Boolean.parseBoolean(getParam(req,
"notifyFailureFirst")));
}
if (hasParam(req, "notifyFailureLast")) {
execOptions.setNotifyOnLastFailure(Boolean.parseBoolean(getParam(req,
"notifyFailureLast")));
}
String concurrentOption = "skip";
if (hasParam(req, "concurrentOption")) {
concurrentOption = getParam(req, "concurrentOption");
execOptions.setConcurrentOption(concurrentOption);
if (concurrentOption.equals("pipeline")) {
int pipelineLevel = getIntParam(req, "pipelineLevel");
execOptions.setPipelineLevel(pipelineLevel);
} else if (concurrentOption.equals("queue")) {
// Not yet implemented
int queueLevel = getIntParam(req, "queueLevel", 1);
execOptions.setPipelineLevel(queueLevel);
}
}
String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
if (hasParam(req, "mailCreator")) {
mailCreator = getParam(req, "mailCreator");
execOptions.setMailCreator(mailCreator);
}
Map<String, String> flowParamGroup = getParamGroup(req, "flowOverride");
execOptions.addAllFlowParameters(flowParamGroup);
if (hasParam(req, "disabled")) {
String disabled = getParam(req, "disabled");
if (!disabled.isEmpty()) {
@SuppressWarnings("unchecked")
List<Object> disabledList =
(List<Object>) JSONUtils.parseJSONFromStringQuiet(disabled);
execOptions.setDisabledJobs(disabledList);
}
}
return execOptions;
}
/**
* <pre>
* Remove following flow param if submitting user is not an Azkaban admin
* FLOW_PRIORITY
* USE_EXECUTOR
* @param userManager
* @param options
* @param user
* </pre>
*/
public static void filterAdminOnlyFlowParams(UserManager userManager,
ExecutionOptions options, User user) throws ExecutorManagerException {
if (options == null || options.getFlowParameters() == null)
return;
Map<String, String> params = options.getFlowParameters();
// is azkaban Admin
if (!hasPermission(userManager, user, Type.ADMIN)) {
params.remove(ExecutionOptions.FLOW_PRIORITY);
params.remove(ExecutionOptions.USE_EXECUTOR);
} else {
validateIntegerParam(params, ExecutionOptions.FLOW_PRIORITY);
validateIntegerParam(params, ExecutionOptions.USE_EXECUTOR);
}
}
/**
* parse a string as number and throws exception if parsed value is not a
* valid integer
* @param params
* @param paramName
* @throws ExecutorManagerException if paramName is not a valid integer
*/
public static boolean validateIntegerParam(Map<String, String> params,
String paramName) throws ExecutorManagerException {
if (params != null && params.containsKey(paramName)
&& !StringUtils.isNumeric(params.get(paramName))) {
throw new ExecutorManagerException(paramName + " should be an integer");
}
return true;
}
/**
* returns true if user has access of type
*
* @param userManager
* @param user
* @param type
* @return
*/
public static boolean hasPermission(UserManager userManager, User user,
Permission.Type type) {
for (String roleName : user.getRoles()) {
Role role = userManager.getRole(roleName);
if (role.getPermission().isPermissionSet(type)
|| role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
return true;
}
}
return false;
}
/**
* Checks for the existance of the parameter in the request
*
* @param request
* @param param
* @return
*/
public static boolean hasParam(HttpServletRequest request, String param) {
return request.getParameter(param) != null;
}
/**
* Retrieves the param from the http servlet request. Will throw an exception
* if not found
*
* @param request
* @param name
* @return
* @throws ServletException
*/
public static String getParam(HttpServletRequest request, String name)
throws ServletException {
String p = request.getParameter(name);
if (p == null) {
throw new ServletException("Missing required parameter '" + name + "'.");
} else {
return p;
}
}
/**
* Retrieves the param from the http servlet request.
*
* @param request
* @param name
* @param default
*
* @return
*/
public static String getParam(HttpServletRequest request, String name,
String defaultVal) {
String p = request.getParameter(name);
if (p == null) {
return defaultVal;
}
return p;
}
/**
* Returns the param and parses it into an int. Will throw an exception if not
* found, or a parse error if the type is incorrect.
*
* @param request
* @param name
* @return
* @throws ServletException
*/
public static int getIntParam(HttpServletRequest request, String name)
throws ServletException {
String p = getParam(request, name);
return Integer.parseInt(p);
}
public static int getIntParam(HttpServletRequest request, String name,
int defaultVal) {
if (hasParam(request, name)) {
try {
return getIntParam(request, name);
} catch (Exception e) {
return defaultVal;
}
}
return defaultVal;
}
public static boolean getBooleanParam(HttpServletRequest request, String name)
throws ServletException {
String p = getParam(request, name);
return Boolean.parseBoolean(p);
}
public static boolean getBooleanParam(HttpServletRequest request,
String name, boolean defaultVal) {
if (hasParam(request, name)) {
try {
return getBooleanParam(request, name);
} catch (Exception e) {
return defaultVal;
}
}
return defaultVal;
}
public static long getLongParam(HttpServletRequest request, String name)
throws ServletException {
String p = getParam(request, name);
return Long.valueOf(p);
}
public static long getLongParam(HttpServletRequest request, String name,
long defaultVal) {
if (hasParam(request, name)) {
try {
return getLongParam(request, name);
} catch (Exception e) {
return defaultVal;
}
}
return defaultVal;
}
public static Map<String, String> getParamGroup(HttpServletRequest request,
String groupName) throws ServletException {
@SuppressWarnings("unchecked")
Enumeration<Object> enumerate =
(Enumeration<Object>) request.getParameterNames();
String matchString = groupName + "[";
HashMap<String, String> groupParam = new HashMap<String, String>();
while (enumerate.hasMoreElements()) {
String str = (String) enumerate.nextElement();
if (str.startsWith(matchString)) {
groupParam.put(str.substring(matchString.length(), str.length() - 1),
request.getParameter(str));
}
}
return groupParam;
}
/**
* Set correct trigger spec using runtime-config or .json file
*
* @param flowParams
* @param metaData
* @throws IllegalArgumentException
*/
public static void setTriggerSpecification(Map<String, String> flowParams,
Map<String, Object> metaData) {
// User specific TRIGGER_SPEC takes higher priority
if (flowParams != null
&& !flowParams.containsKey(ExecutionOptions.TRIGGER_SPEC)
&& flowParams.containsKey(ExecutionOptions.TRIGGER_FILE)
&& metaData != null) {
if (!metaData.containsKey(ProjectManager.TRIGGER_DATA)) {
throw new IllegalArgumentException("No trigger file in project zip");
}
String triggerName = flowParams.get(ExecutionOptions.TRIGGER_FILE);
@SuppressWarnings("unchecked")
Map<String, String> triggers =
(Map<String, String>) metaData.get(ProjectManager.TRIGGER_DATA);
if (triggers.containsKey(triggerName)) {
flowParams.put(ExecutionOptions.TRIGGER_SPEC,
triggers.get(triggerName));
} else if (triggers
.containsKey(triggerName + DirectoryFlowLoader.TRIGGER_SUFFIX)) {
flowParams.put(ExecutionOptions.TRIGGER_SPEC,
triggers.get(triggerName + DirectoryFlowLoader.TRIGGER_SUFFIX));
} else {
throw new IllegalArgumentException("Unknown trigger file " + triggerName);
}
}
}
}
| |
/*
* Copyright (C) 2015 Brian Wernick
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.devbrackets.android.exomedia;
import android.app.PendingIntent;
import android.app.Service;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.support.annotation.DrawableRes;
import android.support.v4.media.MediaMetadataCompat;
import android.support.v4.media.session.MediaSessionCompat;
import android.support.v4.media.session.PlaybackStateCompat;
import android.util.Log;
import com.devbrackets.android.exomedia.receiver.MediaControlsReceiver;
/**
* A class to help simplify lock screen artwork and playback
* controls similar to how the {@link EMNotification} simplifies notifications
*/
public class EMLockScreen {
private static final String TAG = "EMLockScreen";
public static final String SESSION_TAG = "EMLockScreen.Session";
public static final String RECEIVER_EXTRA_CLASS = "com.devbrackets.android.exomedia.RECEIVER_EXTRA_CLASS";
private Context context;
private Class<? extends Service> mediaServiceClass;
private boolean showLockScreen = true;
private Bitmap appIconBitmap;
private MediaSessionCompat mediaSession;
/**
* Creates a new EMLockScreen object
*
* @param context The context to use for holding a MediaSession and sending action intents
* @param mediaServiceClass The class for the service that owns the backing MediaService and to notify of playback actions
*/
public EMLockScreen(Context context, Class<? extends Service> mediaServiceClass) {
this.context = context;
this.mediaServiceClass = mediaServiceClass;
ComponentName componentName = new ComponentName(context, MediaControlsReceiver.class.getName());
mediaSession = new MediaSessionCompat(context, SESSION_TAG, componentName, getMediaButtonReceiverPendingIntent(componentName));
mediaSession.setFlags(MediaSessionCompat.FLAG_HANDLES_MEDIA_BUTTONS | MediaSessionCompat.FLAG_HANDLES_TRANSPORT_CONTROLS);
mediaSession.setCallback(new SessionCallback());
}
public void release() {
if (mediaSession != null) {
mediaSession.release();
}
}
/**
* Sets weather the lock screen is shown when audio is playing or
* ready for playback (e.g. paused). The information
* will need to be updated by calling {@link #setLockScreenBaseInformation(int)}
* and {@link #updateLockScreenInformation(String, String, String, Bitmap, EMNotification.NotificationMediaState)}
*
* @param enabled True if the lock screen should be shown
*/
public void setLockScreenEnabled(boolean enabled) {
if (showLockScreen == enabled) {
return;
}
showLockScreen = enabled;
//Remove the lock screen when disabling
if (!enabled) {
mediaSession.setActive(false);
}
}
/**
* Sets the basic information for the lock screen that doesn't need to be updated. Additionally, when
* the mediaServiceClass is set the big notification will send intents to that service to notify of
* button clicks. These intents will have an action from
* <ul>
* <li>{@link EMRemoteActions#ACTION_PLAY_PAUSE}</li>
* <li>{@link EMRemoteActions#ACTION_PREVIOUS}</li>
* <li>{@link EMRemoteActions#ACTION_NEXT}</li>
* </ul>
*
* @param appIcon The applications icon resource
*/
public void setLockScreenBaseInformation(@DrawableRes int appIcon) {
appIconBitmap = BitmapFactory.decodeResource(context.getResources(), appIcon);
}
/**
* Sets the volatile information for the lock screen controls. This information is expected to
* change frequently.
*
* @param title The title to display for the notification (e.g. A song name)
* @param album The name of the album the media is found in
* @param artist The name of the artist for the media item
* @param notificationMediaState The current media state for the expanded (big) notification
*/
public void updateLockScreenInformation(String title, String album, String artist, Bitmap mediaArtwork, EMNotification.NotificationMediaState notificationMediaState) {
//Updates the current media MetaData
MediaMetadataCompat.Builder metaDataBuilder = new MediaMetadataCompat.Builder();
metaDataBuilder.putBitmap(MediaMetadataCompat.METADATA_KEY_DISPLAY_ICON, appIconBitmap);
metaDataBuilder.putString(MediaMetadataCompat.METADATA_KEY_TITLE, title);
metaDataBuilder.putString(MediaMetadataCompat.METADATA_KEY_ALBUM, album);
metaDataBuilder.putString(MediaMetadataCompat.METADATA_KEY_ARTIST, artist);
if (mediaArtwork != null) {
metaDataBuilder.putBitmap(MediaMetadataCompat.METADATA_KEY_ALBUM_ART, mediaArtwork);
}
mediaSession.setMetadata(metaDataBuilder.build());
//Updates the available playback controls
PlaybackStateCompat.Builder playbackStateBuilder = new PlaybackStateCompat.Builder();
playbackStateBuilder.setActions(getPlaybackOptions(notificationMediaState));
playbackStateBuilder.setState(getPlaybackState(notificationMediaState.isPlaying()), PlaybackStateCompat.PLAYBACK_POSITION_UNKNOWN, 1.0f);
mediaSession.setPlaybackState(playbackStateBuilder.build());
Log.d(TAG, "update, controller is null ? " + (mediaSession.getController() == null ? "true" : "false"));
if (showLockScreen && !mediaSession.isActive()) {
mediaSession.setActive(true);
}
}
private PendingIntent getMediaButtonReceiverPendingIntent(ComponentName componentName) {
Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
mediaButtonIntent.setComponent(componentName);
mediaButtonIntent.putExtra(RECEIVER_EXTRA_CLASS, mediaServiceClass.getName());
return PendingIntent.getBroadcast(context, 0, mediaButtonIntent, PendingIntent.FLAG_CANCEL_CURRENT);
}
private int getPlaybackState(boolean isPlaying) {
return isPlaying ? PlaybackStateCompat.STATE_PLAYING : PlaybackStateCompat.STATE_PAUSED;
}
/**
* Determines the available playback commands supported for the current media state
*
* @param mediaState The current media playback state
* @return The available playback options
*/
private long getPlaybackOptions(EMNotification.NotificationMediaState mediaState) {
long availableActions = PlaybackStateCompat.ACTION_PLAY | PlaybackStateCompat.ACTION_PAUSE | PlaybackStateCompat.ACTION_PLAY_PAUSE;
if (mediaState.isNextEnabled()) {
availableActions |= PlaybackStateCompat.ACTION_SKIP_TO_NEXT;
}
if (mediaState.isPreviousEnabled()) {
availableActions |= PlaybackStateCompat.ACTION_SKIP_TO_PREVIOUS;
}
return availableActions;
}
/**
* Creates a PendingIntent for the given action to the specified service
*
* @param action The action to use
* @param serviceClass The service class to notify of intents
* @return The resulting PendingIntent
*/
private PendingIntent createPendingIntent(String action, Class<? extends Service> serviceClass) {
Intent intent = new Intent(context, serviceClass);
intent.setAction(action);
return PendingIntent.getService(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
}
/**
* A simple callback class to listen to the notifications received from the lock screen
* and forward them to the {@link #mediaServiceClass}
*/
private class SessionCallback extends MediaSessionCompat.Callback {
private PendingIntent playPausePendingIntent, nextPendingIntent, previousPendingIntent;
public SessionCallback() {
super();
playPausePendingIntent = createPendingIntent(EMRemoteActions.ACTION_PLAY_PAUSE, mediaServiceClass);
nextPendingIntent = createPendingIntent(EMRemoteActions.ACTION_NEXT, mediaServiceClass);
previousPendingIntent = createPendingIntent(EMRemoteActions.ACTION_PREVIOUS, mediaServiceClass);
}
@Override
public void onPlay() {
sendPendingIntent(playPausePendingIntent);
}
@Override
public void onPause() {
sendPendingIntent(playPausePendingIntent);
}
@Override
public void onSkipToNext() {
sendPendingIntent(nextPendingIntent);
}
@Override
public void onSkipToPrevious() {
sendPendingIntent(previousPendingIntent);
}
private void sendPendingIntent(PendingIntent pi) {
try {
pi.send();
} catch (Exception e) {
Log.d(TAG, "Error sending lock screen pending intent", e);
}
}
}
}
| |
/*
* Copyright (c) 2020. Red Hat, Inc. and/or its affiliates.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import org.drools.core.base.ClassFieldAccessorCache;
import org.drools.core.base.ClassFieldAccessorStore;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.ReteEvaluator;
import org.drools.core.definitions.InternalKnowledgePackage;
import org.drools.core.definitions.impl.KnowledgePackageImpl;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.kiesession.rulebase.InternalKnowledgeBase;
import org.drools.core.rule.Pattern;
import org.drools.core.spi.AlphaNodeFieldConstraint;
import org.drools.core.spi.Consequence;
import org.drools.core.spi.KnowledgeHelper;
import org.drools.core.test.model.Cheese;
import org.drools.kiesession.rulebase.KnowledgeBaseFactory;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.api.event.kiebase.AfterFunctionRemovedEvent;
import org.kie.api.event.kiebase.AfterKieBaseLockedEvent;
import org.kie.api.event.kiebase.AfterKieBaseUnlockedEvent;
import org.kie.api.event.kiebase.AfterKiePackageAddedEvent;
import org.kie.api.event.kiebase.AfterKiePackageRemovedEvent;
import org.kie.api.event.kiebase.AfterProcessAddedEvent;
import org.kie.api.event.kiebase.AfterProcessRemovedEvent;
import org.kie.api.event.kiebase.AfterRuleAddedEvent;
import org.kie.api.event.kiebase.AfterRuleRemovedEvent;
import org.kie.api.event.kiebase.BeforeFunctionRemovedEvent;
import org.kie.api.event.kiebase.BeforeKieBaseLockedEvent;
import org.kie.api.event.kiebase.BeforeKieBaseUnlockedEvent;
import org.kie.api.event.kiebase.BeforeKiePackageAddedEvent;
import org.kie.api.event.kiebase.BeforeKiePackageRemovedEvent;
import org.kie.api.event.kiebase.BeforeProcessAddedEvent;
import org.kie.api.event.kiebase.BeforeProcessRemovedEvent;
import org.kie.api.event.kiebase.BeforeRuleAddedEvent;
import org.kie.api.event.kiebase.BeforeRuleRemovedEvent;
import org.kie.api.event.kiebase.KieBaseEventListener;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class KnowledgeBaseEventSupportTest {
private InternalKnowledgeBase kbase;
private TestRuleBaseListener listener1;
private TestRuleBaseListener listener2;
private InternalKnowledgePackage pkg;
private final boolean useLambdaConstraint;
public KnowledgeBaseEventSupportTest(boolean useLambdaConstraint) {
this.useLambdaConstraint = useLambdaConstraint;
}
@Parameterized.Parameters(name = "useLambdaConstraint={0}")
public static Collection<Object[]> getParameters() {
Collection<Object[]> parameters = new ArrayList<>();
parameters.add(new Object[]{false});
parameters.add(new Object[]{true});
return parameters;
}
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
@Before
public void setUp() throws Exception {
kbase = KnowledgeBaseFactory.newKnowledgeBase();
listener1 = new TestRuleBaseListener( "(listener-1) " );
listener2 = new TestRuleBaseListener( "(listener-2) " );
kbase.addEventListener( listener1 );
kbase.addEventListener( listener2 );
final RuleImpl rule1 = new RuleImpl( "test1" );
final ClassObjectType cheeseObjectType = new ClassObjectType( Cheese.class );
final Pattern pattern = new Pattern( 0,
cheeseObjectType );
ClassFieldAccessorStore store = new ClassFieldAccessorStore();
store.setClassFieldAccessorCache( new ClassFieldAccessorCache( Thread.currentThread().getContextClassLoader() ) );
store.setEagerWire( true );
AlphaNodeFieldConstraint constraint = ConstraintTestUtil.createCheeseTypeEqualsConstraint(store, "cheddar", useLambdaConstraint);
pattern.addConstraint( constraint );
rule1.addPattern( pattern );
rule1.setConsequence( new Consequence() {
private static final long serialVersionUID = 510l;
public void evaluate(final KnowledgeHelper knowledgeHelper,
final ReteEvaluator reteEvaluator) throws Exception {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
} );
final RuleImpl rule2 = new RuleImpl( "test2" );
final ClassObjectType cheeseObjectType2 = new ClassObjectType( Cheese.class );
final Pattern pattern2 = new Pattern( 0,
cheeseObjectType2 );
AlphaNodeFieldConstraint constraint2 = ConstraintTestUtil.createCheeseTypeEqualsConstraint(store, "stilton", useLambdaConstraint);
pattern2.addConstraint( constraint2 );
rule2.addPattern( pattern2 );
rule2.setConsequence( new Consequence() {
private static final long serialVersionUID = 510l;
public void evaluate(final KnowledgeHelper knowledgeHelper,
final ReteEvaluator reteEvaluator) throws Exception {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
} );
pkg = new KnowledgePackageImpl( "org.drools.test1" );
pkg.addRule( rule1 );
pkg.addRule( rule2 );
}
@Test
public void testAddPackageEvents() throws Exception {
assertEquals( 0,
listener1.getBeforePackageAdded() );
assertEquals( 0,
listener1.getAfterPackageAdded() );
assertEquals( 0,
listener2.getBeforePackageAdded() );
assertEquals( 0,
listener2.getAfterPackageAdded() );
assertEquals( 0,
listener1.getBeforeRuleAdded() );
assertEquals( 0,
listener1.getAfterRuleAdded() );
assertEquals( 0,
listener2.getBeforeRuleAdded() );
assertEquals( 0,
listener2.getAfterRuleAdded() );
this.kbase.addPackages( Collections.singleton( pkg ) );
assertEquals( 1,
listener1.getBeforePackageAdded() );
assertEquals( 1,
listener1.getAfterPackageAdded() );
assertEquals( 1,
listener2.getBeforePackageAdded() );
assertEquals( 1,
listener2.getAfterPackageAdded() );
assertEquals( 2,
listener1.getBeforeRuleAdded() );
assertEquals( 2,
listener1.getAfterRuleAdded() );
assertEquals( 2,
listener2.getBeforeRuleAdded() );
assertEquals( 2,
listener2.getAfterRuleAdded() );
}
@Test
public void testRemovePackageEvents() throws Exception {
this.kbase.addPackages( Collections.singleton( pkg ) );
assertEquals( 0,
listener1.getBeforeKnowledgePackageRemoved() );
assertEquals( 0,
listener1.getAfterKnowledgePackageRemoved() );
assertEquals( 0,
listener2.getBeforeKnowledgePackageRemoved() );
assertEquals( 0,
listener2.getAfterKnowledgePackageRemoved() );
assertEquals( 0,
listener1.getBeforeRuleRemoved() );
assertEquals( 0,
listener1.getAfterRuleRemoved() );
assertEquals( 0,
listener2.getBeforeRuleRemoved() );
assertEquals( 0,
listener2.getAfterRuleRemoved() );
this.kbase.removeKiePackage( "org.drools.test1" );
assertEquals( 1,
listener1.getBeforeKnowledgePackageRemoved() );
assertEquals( 1,
listener1.getAfterKnowledgePackageRemoved() );
assertEquals( 1,
listener2.getBeforeKnowledgePackageRemoved() );
assertEquals( 1,
listener2.getAfterKnowledgePackageRemoved() );
assertEquals( 2,
listener1.getBeforeRuleRemoved() );
assertEquals( 2,
listener1.getAfterRuleRemoved() );
assertEquals( 2,
listener2.getBeforeRuleRemoved() );
assertEquals( 2,
listener2.getAfterRuleRemoved() );
}
public static class TestRuleBaseListener
implements
KieBaseEventListener {
private String id;
private int beforePackageAdded = 0;
private int afterPackageAdded = 0;
private int beforePackageRemoved = 0;
private int afterPackageRemoved = 0;
private int beforeRuleAdded = 0;
private int afterRuleAdded = 0;
private int beforeRuleRemoved = 0;
private int afterRuleRemoved = 0;
public TestRuleBaseListener() {
}
public TestRuleBaseListener(String id) {
super();
this.id = id;
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
id = (String) in.readObject();
beforePackageAdded = in.readInt();
afterPackageAdded = in.readInt();
beforePackageRemoved = in.readInt();
afterPackageRemoved = in.readInt();
beforeRuleAdded = in.readInt();
afterRuleAdded = in.readInt();
beforeRuleRemoved = in.readInt();
afterRuleRemoved = in.readInt();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject( id );
out.writeInt( beforePackageAdded );
out.writeInt( afterPackageAdded );
out.writeInt( beforePackageRemoved );
out.writeInt( afterPackageRemoved );
out.writeInt( beforeRuleAdded );
out.writeInt( afterRuleAdded );
out.writeInt( beforeRuleRemoved );
out.writeInt( afterRuleRemoved );
}
public void afterKiePackageAdded(AfterKiePackageAddedEvent event) {
// System.out.println( this.id + event );
this.afterPackageAdded++;
}
public void beforeKiePackageAdded(BeforeKiePackageAddedEvent event) {
// System.out.println( this.id + event );
this.beforePackageAdded++;
}
protected int getAfterPackageAdded() {
return afterPackageAdded;
}
protected int getBeforePackageAdded() {
return beforePackageAdded;
}
protected String getId() {
return id;
}
public void afterKiePackageRemoved(AfterKiePackageRemovedEvent event) {
// System.out.println( this.id + event );
this.afterPackageRemoved++;
}
public void beforeKiePackageRemoved(BeforeKiePackageRemovedEvent event) {
// System.out.println( this.id + event );
this.beforePackageRemoved++;
}
protected int getAfterKnowledgePackageRemoved() {
return afterPackageRemoved;
}
protected int getBeforeKnowledgePackageRemoved() {
return beforePackageRemoved;
}
public int getAfterRuleAdded() {
return afterRuleAdded;
}
public int getBeforeRuleAdded() {
return beforeRuleAdded;
}
public void afterRuleAdded(AfterRuleAddedEvent event) {
// System.out.println( this.id + event );
this.afterRuleAdded++;
}
public void beforeRuleAdded(BeforeRuleAddedEvent event) {
// System.out.println( this.id + event );
this.beforeRuleAdded++;
}
public int getAfterRuleRemoved() {
return afterRuleRemoved;
}
public int getBeforeRuleRemoved() {
return beforeRuleRemoved;
}
public void afterRuleRemoved(AfterRuleRemovedEvent event) {
// System.out.println( this.id + event );
this.afterRuleRemoved++;
}
public void beforeRuleRemoved(BeforeRuleRemovedEvent event) {
// System.out.println( this.id + event );
this.beforeRuleRemoved++;
}
public void afterFunctionRemoved(AfterFunctionRemovedEvent event) {
// TODO Auto-generated method stub
}
public void afterKieBaseLocked(AfterKieBaseLockedEvent event) {
// TODO Auto-generated method stub
}
public void afterKieBaseUnlocked(AfterKieBaseUnlockedEvent event) {
// TODO Auto-generated method stub
}
public void beforeFunctionRemoved(BeforeFunctionRemovedEvent event) {
// TODO Auto-generated method stub
}
public void beforeKieBaseLocked(BeforeKieBaseLockedEvent event) {
// TODO Auto-generated method stub
}
public void beforeKieBaseUnlocked(BeforeKieBaseUnlockedEvent event) {
// TODO Auto-generated method stub
}
public void afterProcessAdded(AfterProcessAddedEvent arg0) {
// TODO Auto-generated method stub
}
public void afterProcessRemoved(AfterProcessRemovedEvent arg0) {
// TODO Auto-generated method stub
}
public void beforeProcessAdded(BeforeProcessAddedEvent arg0) {
// TODO Auto-generated method stub
}
public void beforeProcessRemoved(BeforeProcessRemovedEvent arg0) {
// TODO Auto-generated method stub
}
}
}
| |
package services.sso.token;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.InvalidParameterSpecException;
import java.security.spec.KeySpec;
import java.util.UUID;
/**
* AES password based encryptor/decryptor. May use 128-bit encryption without JCE Unlimited Strength Jurisdiction
* Policy jars. 192 and 256-bit encryption available with these jars installed.
* Encrypted message contains: key size/128, generated salt size and salt itself, initialization vector size and vector
* itself, encrypted message.
*/
public class AesPasswordBasedEncryptor implements PasswordBasedEncryptor {
/**
* Encryption algorithm.
*/
private static final String ALGORITHM = "AES/CBC/PKCS5Padding";
/**
* Encryption algorithm for key.
*/
private static final String ALGORITHM_FOR_KEY_SPEC = "AES";
/**
* Key generator algorithm.
*/
private static final String KEY_GENERATOR = "PBKDF2WithHmacSHA1";
/**
* Using thread local {@link SecureRandom} for efficiency and uniform distribution.
*/
private static final ThreadLocal<SecureRandom> secureRandom =
ThreadLocal.withInitial(() ->
new SecureRandom(UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8)));
/**
* Password for encryption.
*/
private final char[] password;
/**
* Key size.
*/
private final short keySize;
/**
* Salt size.
*/
private final short saltSize;
/**
* Number of iterations to generate hash from the password.
*/
private final int passwordIterations;
/**
* Buffer size for reading.
*/
private final int readBufferSize;
/**
* Constructs 128-bit AES PBE with 1024 iterations, salt of 16 bytes and 512 bytes for read buffer.
*
* @param password Password
*/
public AesPasswordBasedEncryptor(char[] password) {
this(password, 1024, (short) 128, (short) 16, 512);
}
/**
* Constructs arbitrary AES PBE with 1024 iterations, salt of 16 bytes and 512 bytes for read buffer.
*
* @param password Password.
* @param keySize Key size
*/
public AesPasswordBasedEncryptor(char[] password, short keySize) {
this(password, 1024, keySize, (short) 16, 512);
}
/**
* Constructs arbitrary AES password based encryptor (PBE).
*
* @param password Password.
* @param passwordIterations Iterations to construct a hash from password.
* @param keySize Key size in bits.
* @param saltSize Salt size.
* @param readBufferSize Read buffer size for encryption/decryption.
*/
public AesPasswordBasedEncryptor(char[] password, int passwordIterations, short keySize, short saltSize,
int readBufferSize) {
this.password = password;
this.keySize = keySize;
this.saltSize = saltSize;
this.passwordIterations = passwordIterations;
this.readBufferSize = readBufferSize;
}
@Override
public void encrypt(InputStream inputStream, OutputStream outputStream) throws IOException, EncryptionException {
try {
Cipher cipher = Cipher.getInstance(ALGORITHM);
KeySpecAndSalt keySpecAndSalt = new KeySpecAndSalt(password, passwordIterations, keySize, saltSize);
cipher.init(Cipher.ENCRYPT_MODE, keySpecAndSalt.encryptionKeySpec);
byte[] iv = cipher.getParameters().getParameterSpec(IvParameterSpec.class).getIV();
DataOutputStream dos = new DataOutputStream(outputStream);
// 1. Key size into the data stream.
// Since this is power of two and no one needs keys less than 128 bits we can divide it
// by 2^7. This allows to use keys up to 16384 bits and have it packed in single byte.
dos.write(keySize >>> 7);
// 2. Write key specification + salt length.
dos.writeShort(keySpecAndSalt.salt.length);
// 3. Write initialization vector length.
dos.writeShort(iv.length);
// 4. Write key specification and salt.
dos.write(keySpecAndSalt.salt);
// 5. Write initialization vector.
dos.write(iv);
// 6. Write encrypted data until the end of the input stream.
byte[] readBuffer = new byte[readBufferSize];
int readBytes;
byte[] encrypted;
while ((readBytes = inputStream.read(readBuffer)) > 0) {
encrypted = cipher.update(readBuffer, 0, readBytes);
if (encrypted != null) {
outputStream.write(encrypted);
}
}
encrypted = cipher.doFinal();
if (encrypted != null) {
outputStream.write(encrypted);
}
} catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidParameterSpecException |
BadPaddingException | IllegalBlockSizeException e) {
throw new EncryptionException(e.getMessage(), e);
} catch (InvalidKeyException ike) {
throw new EncryptionException("Unable to use strong encryption.", ike);
}
}
@Override
public void decrypt(InputStream inputStream, OutputStream outputStream) throws IOException, DecryptionException {
DataInputStream dis = new DataInputStream(inputStream);
// 1. Read key size and restore it by multiplying it by 2^7.
short encryptedKeySize = (short) (dis.read() << 7);
// 2. Read key specification + salt length.
short encryptedSaltLength = dis.readShort();
// 3. Read initialization vector length.
short ivLength = dis.readShort();
// 4. Create buffer for salt and read it.
byte[] salt = new byte[encryptedSaltLength];
dis.read(salt);
// 5. Create buffer for initialization vector and read it.
byte[] iv = new byte[ivLength];
dis.read(iv);
try {
// 6. Initialize cipher, key specification and salt.
Cipher cipher = Cipher.getInstance(ALGORITHM);
KeySpecAndSalt keySpecAndSalt = new KeySpecAndSalt(password, passwordIterations, encryptedKeySize, salt);
cipher.init(Cipher.DECRYPT_MODE, keySpecAndSalt.encryptionKeySpec, new IvParameterSpec(iv));
// 7. Create read buffer.
byte[] readBuffer = new byte[readBufferSize];
int readBytes;
byte[] decrypted;
// 8. Read and decrypt data.
while ((readBytes = dis.read(readBuffer)) > 0) {
decrypted = cipher.update(readBuffer, 0, readBytes);
if (decrypted != null) {
outputStream.write(decrypted);
}
}
decrypted = cipher.doFinal();
if (decrypted != null) {
outputStream.write(decrypted);
}
} catch (NoSuchAlgorithmException | NoSuchPaddingException | BadPaddingException | IllegalBlockSizeException |
InvalidAlgorithmParameterException e) {
throw new DecryptionException(e.getMessage(), e);
} catch (InvalidKeyException ike) {
throw new DecryptionException("Unable to use strong encryption.", ike);
}
}
/**
* Encryption/decryption key plus salt.
*/
private static class KeySpecAndSalt {
/**
* Encryption specification.
*/
private final SecretKey encryptionKeySpec;
/**
* Salt.
*/
private final byte[] salt;
/**
* Constructs key spec and with generated salt of given size.
*
* @param password Password for key.
* @param passwordIterations Number of iterations to use for building safe hash.
* @param keyLength Key length (128, 192 or 256).
* @param saltSize Salt size.
* @throws EncryptionException Exception in case of key building.
*/
public KeySpecAndSalt(char[] password, int passwordIterations, short keyLength, short saltSize)
throws EncryptionException {
byte[] newSalt = new byte[saltSize];
secureRandom.get().nextBytes(newSalt);
try {
SecretKeyFactory factory = SecretKeyFactory.getInstance(KEY_GENERATOR);
KeySpec keySpec = new PBEKeySpec(password, newSalt, passwordIterations, keyLength);
SecretKey secretKey = factory.generateSecret(keySpec);
this.encryptionKeySpec = new SecretKeySpec(secretKey.getEncoded(), ALGORITHM_FOR_KEY_SPEC);
this.salt = newSalt;
} catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
throw new EncryptionException("Initialization error.", e);
}
}
/**
* Constructs key spec with given salt.
*
* @param password Password for key.
* @param passwordIterations Number of iterations to use for building safe hash.
* @param keyLength Key length (128, 192 or 256).
* @param salt Salt.
* @throws DecryptionException Exception in case of key building.
*/
public KeySpecAndSalt(char[] password, int passwordIterations, short keyLength, byte[] salt)
throws DecryptionException {
try {
SecretKeyFactory factory = SecretKeyFactory.getInstance(KEY_GENERATOR);
KeySpec keySpec = new PBEKeySpec(password, salt, passwordIterations, keyLength);
SecretKey secretKey = factory.generateSecret(keySpec);
this.encryptionKeySpec = new SecretKeySpec(secretKey.getEncoded(), ALGORITHM_FOR_KEY_SPEC);
this.salt = salt;
} catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
throw new DecryptionException("Initialization error.", e);
}
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.karaf.itest.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieModule;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieBaseModel;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.conf.EqualityBehaviorOption;
import org.kie.api.conf.EventProcessingOption;
import org.kie.api.io.Resource;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.conf.ClockTypeOption;
import org.kie.scanner.KieMavenRepository;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Utility methods for testing KIE-Scanner.
*/
public class KieScannerTestUtils {
private final FileManager fileManager;
public KieScannerTestUtils() {
this.fileManager = new FileManager();
}
public void setUp() {
this.fileManager.setUp();
}
public void tearDown() {
this.fileManager.tearDown();
}
public void createAndInstallKJar(final ReleaseId releaseId, final String... rules) {
KieServices ks = KieServices.Factory.get();
try {
InternalKieModule kJar1 = createKieJar(ks, releaseId, rules);
ks.newKieContainer(releaseId);
KieMavenRepository repository = KieMavenRepository.getKieMavenRepository();
repository.installArtifact(releaseId, kJar1, createKPom(fileManager, releaseId));
} catch (IOException e) {
throw new RuntimeException("Failed to install artifact " + releaseId, e);
}
}
public void createAndInstallKJarWithDependencies(final ReleaseId releaseId, final String rule, ReleaseId... dependencies) {
KieServices ks = KieServices.Factory.get();
try {
InternalKieModule kJar1 = createKieJarWithDependencies(ks, releaseId, true, rule, dependencies);
ks.newKieContainer(releaseId);
KieMavenRepository repository = KieMavenRepository.getKieMavenRepository();
repository.installArtifact(releaseId, kJar1, createKPom(fileManager, releaseId));
} catch (IOException e) {
throw new RuntimeException("Failed to install artifact " + releaseId, e);
}
}
protected InternalKieModule createKieJar(KieServices ks, ReleaseId releaseId, boolean isdefault, String... rules) throws IOException {
KieFileSystem kfs = createKieFileSystemWithKProject(ks, isdefault);
kfs.writePomXML(getPom(releaseId));
for (String rule : rules) {
String file = "org/test/" + rule + ".drl";
kfs.write("src/main/resources/KBase1/" + file, createDRL(rule));
}
KieBuilder kieBuilder = ks.newKieBuilder(kfs);
assertTrue(kieBuilder.buildAll().getResults().getMessages().isEmpty());
return (InternalKieModule) kieBuilder.getKieModule();
}
protected InternalKieModule createKieJarWithDependencies(KieServices ks, ReleaseId releaseId, boolean isdefault,
String rule, ReleaseId... dependencies) throws IOException {
KieFileSystem kfs = createKieFileSystemWithKProject(ks, isdefault);
kfs.writePomXML(getPom(releaseId, dependencies));
String file = "org/test/rules.drl";
final InputStream ruleStream = KieScannerTestUtils.class.getResourceAsStream(rule);
final String ruleContent = new BufferedReader(new InputStreamReader(ruleStream))
.lines().collect(Collectors.joining("\n"));;
kfs.write("src/main/resources/KBase1/" + file, ruleContent);
KieBuilder kieBuilder = ks.newKieBuilder(kfs);
assertTrue(kieBuilder.buildAll().getResults().getMessages().isEmpty());
return (InternalKieModule) kieBuilder.getKieModule();
}
protected InternalKieModule createKieJar(KieServices ks, ReleaseId releaseId, String pomXml, boolean isdefault, String... rules) throws IOException {
KieFileSystem kfs = createKieFileSystemWithKProject(ks, isdefault);
kfs.writePomXML(pomXml);
for (String rule : rules) {
String file = "org/test/" + rule + ".drl";
kfs.write("src/main/resources/KBase1/" + file, createDRL(rule));
}
KieBuilder kieBuilder = ks.newKieBuilder(kfs);
assertTrue(kieBuilder.buildAll().getResults().getMessages().isEmpty());
return (InternalKieModule) kieBuilder.getKieModule();
}
protected InternalKieModule createKieJar(KieServices ks, ReleaseId releaseId, String... rules) throws IOException {
return createKieJar(ks, releaseId, false, rules);
}
protected InternalKieModule createKieJarWithClass(KieServices ks, ReleaseId releaseId, boolean useTypeDeclaration, int value, int factor, ReleaseId... dependencies) throws IOException {
KieFileSystem kfs = createKieFileSystemWithKProject(ks, false);
kfs.writePomXML(getPom(releaseId, dependencies));
if (useTypeDeclaration) {
kfs.write("src/main/resources/KBase1/rule1.drl", createDRLWithTypeDeclaration(value, factor));
} else {
kfs.write("src/main/resources/KBase1/rule1.drl", createDRLForJavaSource(value))
.write("src/main/java/org/kie/test/Bean.java", createJavaSource(factor));
}
KieBuilder kieBuilder = ks.newKieBuilder(kfs);
assertTrue(kieBuilder.buildAll().getResults().getMessages().isEmpty());
return (InternalKieModule) kieBuilder.getKieModule();
}
protected KieFileSystem createKieFileSystemWithKProject(KieServices ks) {
return createKieFileSystemWithKProject(ks, false);
}
protected KieFileSystem createKieFileSystemWithKProject(KieServices ks, boolean isdefault) {
KieModuleModel kproj = ks.newKieModuleModel();
KieBaseModel kieBaseModel1 = kproj.newKieBaseModel("KBase1").setDefault(isdefault)
.setEqualsBehavior(EqualityBehaviorOption.EQUALITY)
.setEventProcessingMode(EventProcessingOption.STREAM);
kieBaseModel1.newKieSessionModel("KSession1").setDefault(isdefault)
.setType(KieSessionModel.KieSessionType.STATEFUL)
.setClockType(ClockTypeOption.get("realtime"));
KieFileSystem kfs = ks.newKieFileSystem();
kfs.writeKModuleXML(kproj.toXML());
return kfs;
}
protected String getPom(ReleaseId releaseId, ReleaseId... dependencies) {
String pom =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <groupId>" + releaseId.getGroupId() + "</groupId>\n" +
" <artifactId>" + releaseId.getArtifactId() + "</artifactId>\n" +
" <version>" + releaseId.getVersion() + "</version>\n" +
"\n";
if (dependencies != null && dependencies.length > 0) {
pom += "<dependencies>\n";
for (ReleaseId dep : dependencies) {
pom += "<dependency>\n";
pom += " <groupId>" + dep.getGroupId() + "</groupId>\n";
pom += " <artifactId>" + dep.getArtifactId() + "</artifactId>\n";
pom += " <version>" + dep.getVersion() + "</version>\n";
pom += "</dependency>\n";
}
pom += "</dependencies>\n";
}
pom += "</project>";
return pom;
}
protected String createDRL(String ruleName) {
return "package org.kie.test\n" +
"global java.util.List list\n" +
"rule " + ruleName + "\n" +
"when\n" +
"then\n" +
"list.add( drools.getRule().getName() );\n" +
"end\n";
}
private String createJavaSource(int factor) {
return "package org.kie.test;\n" +
"import org.kie.api.definition.type.Role;\n" +
"@Role(Role.Type.EVENT)\n" +
"public class Bean {\n" +
" private final int value;\n" +
" public Bean(int value) {\n" +
" this.value = value;\n" +
" }\n" +
" public int getValue() {\n" +
" return value * " + factor + ";\n" +
" }\n" +
"}";
}
private String createDRLForJavaSource(int value) {
return "package org.kie.test\n" +
"global java.util.List list\n" +
"rule Init salience 100\n" +
"when\n" +
"then\n" +
"insert( new Bean(" + value + ") );\n" +
"end\n" +
"rule R1\n" +
"when\n" +
" $b : Bean( value > 0 )\n" +
"then\n" +
" list.add( $b.getValue() );\n" +
"end\n";
}
protected String createDRLWithTypeDeclaration(int value, int factor) {
return "package org.kie.test\n" +
getDRLWithType() +
getDRLWithRules(value, factor);
}
protected String getDRLWithType() {
return "declare Bean @role(event)\n" +
" value : int\n" +
"end\n";
}
protected String getDRLWithRules(int value, int factor) {
return "global java.util.List list\n" +
"rule Init salience 100\n" +
"when\n" +
"then\n" +
"insert( new Bean(" + value + ") );\n" +
"end\n" +
"rule R1\n" +
"when\n" +
" $b : Bean()\n" +
"then\n" +
" list.add( $b.getValue() * " + factor + " );\n" +
"end\n";
}
public static byte[] createKJar(KieServices ks,
ReleaseId releaseId,
String pom,
String... drls) {
KieFileSystem kfs = ks.newKieFileSystem();
if (pom != null) {
kfs.write("pom.xml", pom);
} else {
kfs.generateAndWritePomXML(releaseId);
}
for (int i = 0; i < drls.length; i++) {
if (drls[i] != null) {
kfs.write("src/main/resources/r" + i + ".drl", drls[i]);
}
}
KieBuilder kb = ks.newKieBuilder(kfs).buildAll();
if (kb.getResults().hasMessages(org.kie.api.builder.Message.Level.ERROR)) {
for (org.kie.api.builder.Message result : kb.getResults().getMessages()) {
System.out.println(result.getText());
}
return null;
}
InternalKieModule kieModule = (InternalKieModule) ks.getRepository()
.getKieModule(releaseId);
byte[] jar = kieModule.getBytes();
return jar;
}
public static KieModule deployJar(KieServices ks, byte[] jar) {
// Deploy jar into the repository
Resource jarRes = ks.getResources().newByteArrayResource(jar);
KieModule km = ks.getRepository().addKieModule(jarRes);
return km;
}
protected File createKPom(FileManager fileManager, ReleaseId releaseId, ReleaseId... dependencies) throws IOException {
File pomFile = fileManager.newFile("pom.xml");
fileManager.write(pomFile, getPom(releaseId, dependencies));
return pomFile;
}
public void checkKSession(boolean dispose, KieSession ksession, Object... results) {
List<String> list = new ArrayList<String>();
ksession.setGlobal( "list", list );
ksession.fireAllRules();
if (dispose) {
ksession.dispose();
}
assertEquals(results.length, list.size());
for (Object result : results) {
assertTrue( String.format( "Expected to contain: %s, got: %s", result, Arrays.toString( list.toArray() ) ),
list.contains( result ) );
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.pool.impl;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Stack;
import org.apache.commons.pool.BaseObjectPool;
import org.apache.commons.pool.ObjectPool;
import org.apache.commons.pool.PoolUtils;
import org.apache.commons.pool.PoolableObjectFactory;
/**
* A simple, {@link java.util.Stack Stack}-based {@link ObjectPool} implementation.
* <p>
* Given a {@link PoolableObjectFactory}, this class will maintain
* a simple pool of instances. A finite number of "sleeping"
* or idle instances is enforced, but when the pool is
* empty, new instances are created to support the new load.
* Hence this class places no limit on the number of "active"
* instances created by the pool, but is quite useful for
* re-using <tt>Object</tt>s without introducing
* artificial limits.
*
* @author Rodney Waldhoff
* @author Dirk Verbeeck
* @author Sandy McArthur
* @version $Revision: 965338 $ $Date: 2010-07-18 18:09:29 -0700 (Sun, 18 Jul 2010) $
* @since Pool 1.0
*/
public class StackObjectPool extends BaseObjectPool implements ObjectPool {
/**
* Create a new pool using no factory. Clients must first
* {@link #setFactory(PoolableObjectFactory) set the factory} or
* else this pool will not behave correctly. Clients may first populate the pool
* using {@link #returnObject(java.lang.Object)} before they can be {@link #borrowObject borrowed}
* but this usage is <strong>discouraged</strong>.
*
* @see #StackObjectPool(PoolableObjectFactory)
* @deprecated to be removed in pool 2.0 - use {@link #StackObjectPool(PoolableObjectFactory)}
*/
public StackObjectPool() {
this((PoolableObjectFactory)null,DEFAULT_MAX_SLEEPING,DEFAULT_INIT_SLEEPING_CAPACITY);
}
/**
* Create a new pool using no factory.
* Clients must first {@link #setFactory(PoolableObjectFactory) set the factory} or
* else this pool will not behave correctly. Clients may first populate the pool
* using {@link #returnObject(java.lang.Object)} before they can be {@link #borrowObject borrowed}
* but this usage is <strong>discouraged</strong>.
*
* @param maxIdle cap on the number of "sleeping" instances in the pool
* @see #StackObjectPool(PoolableObjectFactory, int)
* @deprecated to be removed in pool 2.0 - use {@link #StackObjectPool(PoolableObjectFactory, int)}
*/
public StackObjectPool(int maxIdle) {
this((PoolableObjectFactory)null,maxIdle,DEFAULT_INIT_SLEEPING_CAPACITY);
}
/**
* Create a new pool using no factory.
* Clients must first {@link #setFactory(PoolableObjectFactory) set the factory} or
* else this pool will not behave correctly. Clients may first populate the pool
* using {@link #returnObject(java.lang.Object)} before they can be {@link #borrowObject borrowed}
* but this usage is <strong>discouraged</strong>.
*
* @param maxIdle cap on the number of "sleeping" instances in the pool
* @param initIdleCapacity initial size of the pool (this specifies the size of the container,
* it does not cause the pool to be pre-populated.)
* @see #StackObjectPool(PoolableObjectFactory, int, int)
* @deprecated to be removed in pool 2.0 - use {@link #StackObjectPool(PoolableObjectFactory, int, int)}
*/
public StackObjectPool(int maxIdle, int initIdleCapacity) {
this((PoolableObjectFactory)null,maxIdle,initIdleCapacity);
}
/**
* Create a new <tt>StackObjectPool</tt> using the specified <i>factory</i> to create new instances.
*
* @param factory the {@link PoolableObjectFactory} used to populate the pool
*/
public StackObjectPool(PoolableObjectFactory factory) {
this(factory,DEFAULT_MAX_SLEEPING,DEFAULT_INIT_SLEEPING_CAPACITY);
}
/**
* Create a new <tt>SimpleObjectPool</tt> using the specified <i>factory</i> to create new instances,
* capping the number of "sleeping" instances to <i>maxIdle</i>.
*
* @param factory the {@link PoolableObjectFactory} used to populate the pool
* @param maxIdle cap on the number of "sleeping" instances in the pool
*/
public StackObjectPool(PoolableObjectFactory factory, int maxIdle) {
this(factory,maxIdle,DEFAULT_INIT_SLEEPING_CAPACITY);
}
/**
* <p>Create a new <tt>StackObjectPool</tt> using the specified <code>factory</code> to create new instances,
* capping the number of "sleeping" instances to <code>maxIdle</code>, and initially allocating a container
* capable of containing at least <code>initIdleCapacity</code> instances. The pool is not pre-populated.
* The <code>initIdleCapacity</code> parameter just determines the initial size of the underlying
* container, which can increase beyond this value if <code>maxIdle > initIdleCapacity.</code></p>
*
* <p>Negative values of <code>maxIdle</code> are ignored (i.e., the pool is created using
* {@link #DEFAULT_MAX_SLEEPING}) as are non-positive values for <code>initIdleCapacity.</code>
*
* @param factory the {@link PoolableObjectFactory} used to populate the pool
* @param maxIdle cap on the number of "sleeping" instances in the pool
* @param initIdleCapacity initial size of the pool (this specifies the size of the container,
* it does not cause the pool to be pre-populated.)
*/
public StackObjectPool(PoolableObjectFactory factory, int maxIdle, int initIdleCapacity) {
_factory = factory;
_maxSleeping = (maxIdle < 0 ? DEFAULT_MAX_SLEEPING : maxIdle);
int initcapacity = (initIdleCapacity < 1 ? DEFAULT_INIT_SLEEPING_CAPACITY : initIdleCapacity);
_pool = new Stack();
_pool.ensureCapacity( initcapacity > _maxSleeping ? _maxSleeping : initcapacity);
}
/**
* <p>Borrows an object from the pool. If there are idle instances available on the stack,
* the top element of the stack is popped to activate, validate and return to the client. If there
* are no idle instances available, the {@link PoolableObjectFactory#makeObject() makeObject}
* method of the pool's {@link PoolableObjectFactory} is invoked to create a new instance.</p>
*
* <p>All instances are {@link PoolableObjectFactory#activateObject(Object) activated} and
* {@link PoolableObjectFactory#validateObject(Object) validated} before being returned to the
* client. If validation fails or an exception occurs activating or validating an instance
* popped from the idle instance stack, the failing instance is
* {@link PoolableObjectFactory#destroyObject(Object) destroyed} and the next instance on
* the stack is popped, validated and activated. This process continues until either the
* stack is empty or an instance passes validation. If the stack is empty on activation or
* it does not contain any valid instances, the factory's <code>makeObject</code> method is used
* to create a new instance. If a null instance is returned by the factory or the created
* instance either raises an exception on activation or fails validation, <code>NoSuchElementException</code>
* is thrown. Exceptions thrown by <code>MakeObject</code> are propagated to the caller; but
* other than <code>ThreadDeath</code> or <code>VirtualMachineError</code>, exceptions generated by
* activation, validation or destroy methods are swallowed silently.</p>
*
* @return an instance from the pool
*/
public synchronized Object borrowObject() throws Exception {
assertOpen();
Object obj = null;
boolean newlyCreated = false;
while (null == obj) {
if (!_pool.empty()) {
obj = _pool.pop();
} else {
if(null == _factory) {
throw new NoSuchElementException();
} else {
obj = _factory.makeObject();
newlyCreated = true;
if (obj == null) {
throw new NoSuchElementException("PoolableObjectFactory.makeObject() returned null.");
}
}
}
if (null != _factory && null != obj) {
try {
_factory.activateObject(obj);
if (!_factory.validateObject(obj)) {
throw new Exception("ValidateObject failed");
}
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
try {
_factory.destroyObject(obj);
} catch (Throwable t2) {
PoolUtils.checkRethrow(t2);
// swallowed
} finally {
obj = null;
}
if (newlyCreated) {
throw new NoSuchElementException(
"Could not create a validated object, cause: " +
t.getMessage());
}
}
}
}
_numActive++;
return obj;
}
/**
* <p>Returns an instance to the pool, pushing it on top of the idle instance stack after successful
* validation and passivation. The returning instance is destroyed if any of the following are true:<ul>
* <li>the pool is closed</li>
* <li>{@link PoolableObjectFactory#validateObject(Object) validation} fails</li>
* <li>{@link PoolableObjectFactory#passivateObject(Object) passivation} throws an exception</li>
* </ul>
* If adding a validated, passivated returning instance to the stack would cause
* {@link #getMaxSleeping() maxSleeping} to be exceeded, the oldest (bottom) instance on the stack
* is destroyed to make room for the returning instance, which is pushed on top of the stack.</p>
*
* <p>Exceptions passivating or destroying instances are silently swallowed. Exceptions validating
* instances are propagated to the client.</p>
*
* @param obj instance to return to the pool
*/
public synchronized void returnObject(Object obj) throws Exception {
boolean success = !isClosed();
if(null != _factory) {
if(!_factory.validateObject(obj)) {
success = false;
} else {
try {
_factory.passivateObject(obj);
} catch(Exception e) {
success = false;
}
}
}
boolean shouldDestroy = !success;
_numActive--;
if (success) {
Object toBeDestroyed = null;
if(_pool.size() >= _maxSleeping) {
shouldDestroy = true;
toBeDestroyed = _pool.remove(0); // remove the stalest object
}
_pool.push(obj);
obj = toBeDestroyed; // swap returned obj with the stalest one so it can be destroyed
}
notifyAll(); // _numActive has changed
if(shouldDestroy) { // by constructor, shouldDestroy is false when _factory is null
try {
_factory.destroyObject(obj);
} catch(Exception e) {
// ignored
}
}
}
/**
* {@inheritDoc}
*/
public synchronized void invalidateObject(Object obj) throws Exception {
_numActive--;
if (null != _factory) {
_factory.destroyObject(obj);
}
notifyAll(); // _numActive has changed
}
/**
* Return the number of instances
* currently idle in this pool.
*
* @return the number of instances currently idle in this pool
*/
public synchronized int getNumIdle() {
return _pool.size();
}
/**
* Return the number of instances currently borrowed from this pool.
*
* @return the number of instances currently borrowed from this pool
*/
public synchronized int getNumActive() {
return _numActive;
}
/**
* Clears any objects sitting idle in the pool. Silently swallows any
* exceptions thrown by {@link PoolableObjectFactory#destroyObject(Object)}.
*/
public synchronized void clear() {
if(null != _factory) {
Iterator it = _pool.iterator();
while(it.hasNext()) {
try {
_factory.destroyObject(it.next());
} catch(Exception e) {
// ignore error, keep destroying the rest
}
}
}
_pool.clear();
}
/**
* <p>Close this pool, and free any resources associated with it. Invokes
* {@link #clear()} to destroy and remove instances in the pool.</p>
*
* <p>Calling {@link #addObject} or {@link #borrowObject} after invoking
* this method on a pool will cause them to throw an
* {@link IllegalStateException}.</p>
*
* @throws Exception never - exceptions clearing the pool are swallowed
*/
public void close() throws Exception {
super.close();
clear();
}
/**
* <p>Create an object, and place it on top of the stack.
* This method is useful for "pre-loading" a pool with idle objects.</p>
*
* <p>Before being added to the pool, the newly created instance is
* {@link PoolableObjectFactory#validateObject(Object) validated} and
* {@link PoolableObjectFactory#passivateObject(Object) passivated}. If validation
* fails, the new instance is {@link PoolableObjectFactory#destroyObject(Object) destroyed}.
* Exceptions generated by the factory <code>makeObject</code> or <code>passivate</code> are
* propagated to the caller. Exceptions destroying instances are silently swallowed.</p>
*
* <p>If a new instance is created and successfully validated and passivated and adding this
* instance to the pool causes {@link #getMaxSleeping() maxSleeping} to be exceeded, the oldest
* (bottom) instance in the pool is destroyed to make room for the newly created instance, which
* is pushed on top of the stack.
*
* @throws Exception when the {@link #getFactory() factory} has a problem creating or passivating an object.
*/
public synchronized void addObject() throws Exception {
assertOpen();
if (_factory == null) {
throw new IllegalStateException("Cannot add objects without a factory.");
}
Object obj = _factory.makeObject();
boolean success = true;
if(!_factory.validateObject(obj)) {
success = false;
} else {
_factory.passivateObject(obj);
}
boolean shouldDestroy = !success;
if (success) {
Object toBeDestroyed = null;
if(_pool.size() >= _maxSleeping) {
shouldDestroy = true;
toBeDestroyed = _pool.remove(0); // remove the stalest object
}
_pool.push(obj);
obj = toBeDestroyed; // swap returned obj with the stalest one so it can be destroyed
}
notifyAll(); // _numIdle has changed
if(shouldDestroy) { // by constructor, shouldDestroy is false when _factory is null
try {
_factory.destroyObject(obj);
} catch(Exception e) {
// ignored
}
}
}
/**
* Sets the {@link PoolableObjectFactory factory} this pool uses
* to create new instances. Trying to change
* the <code>factory</code> while there are borrowed objects will
* throw an {@link IllegalStateException}.
*
* @param factory the {@link PoolableObjectFactory} used to create new instances.
* @throws IllegalStateException when the factory cannot be set at this time
* @deprecated to be removed in pool 2.0
*/
public synchronized void setFactory(PoolableObjectFactory factory) throws IllegalStateException {
assertOpen();
if(0 < getNumActive()) {
throw new IllegalStateException("Objects are already active");
} else {
clear();
_factory = factory;
}
}
/**
* The cap on the number of "sleeping" instances in the pool.
*/
protected static final int DEFAULT_MAX_SLEEPING = 8;
/**
* The default initial size of the pool
* (this specifies the size of the container, it does not
* cause the pool to be pre-populated.)
*/
protected static final int DEFAULT_INIT_SLEEPING_CAPACITY = 4;
/**
* My pool.
* @deprecated to be made private in pool 2.0
*/
protected Stack _pool = null;
/**
* My {@link PoolableObjectFactory}.
* @deprecated to be made private in pool 2.0 - use {@link #getFactory()}
*/
protected PoolableObjectFactory _factory = null;
/**
* The cap on the number of "sleeping" instances in the pool.
* @deprecated to be made private in pool 2.0 - use {@link #getMaxSleeping()}
*/
protected int _maxSleeping = DEFAULT_MAX_SLEEPING;
/**
* Number of objects borrowed but not yet returned to the pool.
* @deprecated to be made private in pool 2.0 - use {@link #getNumActive()}
*/
protected int _numActive = 0;
/**
* Returns the {@link PoolableObjectFactory} used by this pool to create and manage object instances.
*
* @return the factory
* @since 1.5.5
*/
public synchronized PoolableObjectFactory getFactory() {
return _factory;
}
/**
* Returns the maximum number of idle instances in the pool.
*
* @return maxSleeping
* @since 1.5.5
*/
public int getMaxSleeping() {
return _maxSleeping;
}
}
| |
/*
* Copyright 2018 The Data Transfer Project Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.datatransferproject.datatransfer.flickr.photos;
import com.flickr4java.flickr.Flickr;
import com.flickr4java.flickr.FlickrException;
import com.flickr4java.flickr.REST;
import com.flickr4java.flickr.RequestContext;
import com.flickr4java.flickr.auth.Auth;
import com.flickr4java.flickr.photosets.Photoset;
import com.flickr4java.flickr.photosets.PhotosetsInterface;
import com.flickr4java.flickr.uploader.UploadMetaData;
import com.flickr4java.flickr.uploader.Uploader;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.util.concurrent.RateLimiter;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Collection;
import java.util.UUID;
import org.datatransferproject.api.launcher.Monitor;
import org.datatransferproject.spi.cloud.storage.TemporaryPerJobDataStore;
import org.datatransferproject.spi.cloud.storage.TemporaryPerJobDataStore.InputStreamWrapper;
import org.datatransferproject.spi.transfer.idempotentexecutor.IdempotentImportExecutor;
import org.datatransferproject.spi.transfer.idempotentexecutor.IdempotentImportExecutorHelper;
import org.datatransferproject.spi.transfer.provider.ImportResult;
import org.datatransferproject.spi.transfer.provider.Importer;
import org.datatransferproject.spi.transfer.types.DestinationMemoryFullException;
import org.datatransferproject.types.common.models.photos.PhotoAlbum;
import org.datatransferproject.types.common.models.photos.PhotoModel;
import org.datatransferproject.types.common.models.photos.PhotosContainerResource;
import org.datatransferproject.types.transfer.auth.AppCredentials;
import org.datatransferproject.types.transfer.auth.AuthData;
import org.datatransferproject.types.transfer.serviceconfig.TransferServiceConfig;
public class FlickrPhotosImporter implements Importer<AuthData, PhotosContainerResource> {
@VisibleForTesting static final String ORIGINAL_ALBUM_PREFIX = "original-album-";
private final TemporaryPerJobDataStore jobStore;
private final Flickr flickr;
private final Uploader uploader;
private final ImageStreamProvider imageStreamProvider;
private final PhotosetsInterface photosetsInterface;
private final Monitor monitor;
private final RateLimiter perUserRateLimiter;
public FlickrPhotosImporter(
AppCredentials appCredentials,
TemporaryPerJobDataStore jobStore,
Monitor monitor,
TransferServiceConfig serviceConfig) {
this.jobStore = jobStore;
this.flickr = new Flickr(appCredentials.getKey(), appCredentials.getSecret(), new REST());
this.uploader = flickr.getUploader();
this.imageStreamProvider = new ImageStreamProvider();
this.photosetsInterface = flickr.getPhotosetsInterface();
this.monitor = monitor;
this.perUserRateLimiter = serviceConfig.getPerUserRateLimiter();
}
@VisibleForTesting
FlickrPhotosImporter(
Flickr flickr,
TemporaryPerJobDataStore jobstore,
ImageStreamProvider imageStreamProvider,
Monitor monitor,
TransferServiceConfig serviceConfig) {
this.flickr = flickr;
this.imageStreamProvider = imageStreamProvider;
this.jobStore = jobstore;
this.uploader = flickr.getUploader();
this.photosetsInterface = flickr.getPhotosetsInterface();
this.monitor = monitor;
this.perUserRateLimiter = serviceConfig.getPerUserRateLimiter();
}
@Override
public ImportResult importItem(
UUID jobId,
IdempotentImportExecutor idempotentExecutor,
AuthData authData,
PhotosContainerResource data)
throws Exception, IOException {
Auth auth;
try {
auth = FlickrUtils.getAuth(authData, flickr);
} catch (FlickrException e) {
return new ImportResult(e);
}
RequestContext.getRequestContext().setAuth(auth);
Preconditions.checkArgument(
data.getAlbums() != null || data.getPhotos() != null, "Error: There is no data to import");
if (data.getAlbums() != null) {
storeAlbums(jobId, data.getAlbums());
}
if (data.getPhotos() != null) {
for (PhotoModel photo : data.getPhotos()) {
try {
importSinglePhoto(idempotentExecutor, jobId, photo);
} catch (FlickrException e) {
if (e.getMessage().contains("Upload limit reached")) {
throw new DestinationMemoryFullException("Flickr destination memory reached", e);
} else if (e.getMessage().contains("Photo already in set")) {
// This can happen if we got a server error on our end, but the request went through.
// When our retry strategy kicked in the request was complete and the photo already
// uploaded
continue;
}
throw new IOException(e);
}
}
}
return new ImportResult(ImportResult.ResultType.OK);
}
// Store any album data in the cache because Flickr only allows you to create an album with a
// photo in it, so we have to wait for the first photo to create the album
private void storeAlbums(UUID jobId, Collection<PhotoAlbum> albums) throws IOException {
for (PhotoAlbum album : albums) {
jobStore.create(
jobId,
ORIGINAL_ALBUM_PREFIX + album.getId(),
new FlickrTempPhotoData(album.getName(), album.getDescription()));
}
}
private void importSinglePhoto(
IdempotentImportExecutor idempotentExecutor, UUID id, PhotoModel photo) throws Exception {
String photoId =
idempotentExecutor.executeAndSwallowIOExceptions(IdempotentImportExecutorHelper.getPhotoIdempotentId(photo),
photo.getTitle(),
() -> uploadPhoto(photo, id));
if (photoId == null) {
return;
}
String oldAlbumId = photo.getAlbumId();
// If the photo wasn't associated with an album, we don't have to do anything else, since we've
// already uploaded it above. This will mean it lives in the user's cameraroll and not in an
// album.
// If the uploadPhoto() call fails above, an exception will be thrown, so we don't have to worry
// about the photo not being uploaded here.
if (Strings.isNullOrEmpty(oldAlbumId)) {
return;
}
createOrAddToAlbum(idempotentExecutor, id, photo.getAlbumId(), photoId);
}
private void createOrAddToAlbum(
IdempotentImportExecutor idempotentExecutor, UUID jobId, String oldAlbumId, String photoId)
throws Exception {
if (idempotentExecutor.isKeyCached(oldAlbumId)) {
String newAlbumId = idempotentExecutor.getCachedValue(oldAlbumId);
// We've already created the album this photo belongs in, simply add it to the new album
photosetsInterface.addPhoto(newAlbumId, photoId);
} else {
createAlbum(idempotentExecutor, jobId, oldAlbumId, photoId);
}
}
private void createAlbum(
IdempotentImportExecutor idempotentExecutor,
UUID jobId,
String oldAlbumId,
String firstPhotoId)
throws Exception {
// This means that we havent created the new album yet, create the photoset
FlickrTempPhotoData album =
jobStore.findData(jobId, ORIGINAL_ALBUM_PREFIX + oldAlbumId, FlickrTempPhotoData.class);
// TODO: handle what happens if the album doesn't exist. One of the things we can do here is
// throw them into a default album or add a finalize() step in the Importer which can deal
// with these (in case the album exists later).
Preconditions.checkNotNull(album, "Album not found: " + oldAlbumId);
idempotentExecutor.executeAndSwallowIOExceptions(
oldAlbumId,
album.getName(),
() -> {
String albumName =
Strings.isNullOrEmpty(album.getName()) ? "untitled" : album.getName();
String albumDescription = cleanString(album.getDescription());
perUserRateLimiter.acquire();
Photoset photoset = photosetsInterface.create(albumName, albumDescription, firstPhotoId);
monitor.debug(() -> String.format("Flickr importer created album: %s", album));
return photoset.getId();
});
}
private String uploadPhoto(PhotoModel photo, UUID jobId) throws IOException, FlickrException {
InputStream inStream;
if (photo.isInTempStore()) {
final InputStreamWrapper streamWrapper =
jobStore.getStream(jobId, photo.getFetchableUrl());
inStream = streamWrapper.getStream();
} else {
inStream = imageStreamProvider.get(photo.getFetchableUrl());
}
String photoTitle =
Strings.isNullOrEmpty(photo.getTitle()) ? "" : photo.getTitle();
String photoDescription = cleanString(photo.getDescription());
UploadMetaData uploadMetaData =
new UploadMetaData()
.setAsync(false)
.setPublicFlag(false)
.setFriendFlag(false)
.setFamilyFlag(false)
.setTitle(photoTitle)
.setDescription(photoDescription);
perUserRateLimiter.acquire();
String uploadResult = uploader.upload(inStream, uploadMetaData);
inStream.close();
monitor.debug(() -> String.format("%s: Flickr importer uploading photo: %s", jobId, photo));
return uploadResult;
}
private static String cleanString(String string) {
return Strings.isNullOrEmpty(string) ? "" : string;
}
@VisibleForTesting
class ImageStreamProvider {
/**
* Gets an input stream to an image, given its URL. Used by {@link FlickrPhotosImporter} to
* upload the image.
*/
public BufferedInputStream get(String urlStr) throws IOException {
URL url = new URL(urlStr);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.connect();
return new BufferedInputStream(conn.getInputStream());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.benchmark.query;
import com.fasterxml.jackson.databind.InjectableValues.Std;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.BenchmarkSchemas;
import org.apache.druid.benchmark.datagen.SegmentGenerator;
import org.apache.druid.client.CachingClusteredClient;
import org.apache.druid.client.DruidServer;
import org.apache.druid.client.ImmutableDruidServer;
import org.apache.druid.client.TimelineServerView;
import org.apache.druid.client.cache.CacheConfig;
import org.apache.druid.client.cache.CachePopulatorStats;
import org.apache.druid.client.cache.ForegroundCachePopulator;
import org.apache.druid.client.cache.MapCache;
import org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy;
import org.apache.druid.client.selector.QueryableDruidServer;
import org.apache.druid.client.selector.RandomServerSelectorStrategy;
import org.apache.druid.client.selector.ServerSelector;
import org.apache.druid.client.selector.TierSelectorStrategy;
import org.apache.druid.collections.BlockingPool;
import org.apache.druid.collections.DefaultBlockingPool;
import org.apache.druid.collections.NonBlockingPool;
import org.apache.druid.collections.StupidPool;
import org.apache.druid.data.input.Row;
import org.apache.druid.guice.http.DruidHttpClientConfig;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.BySegmentQueryRunner;
import org.apache.druid.query.DataSource;
import org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate;
import org.apache.druid.query.DruidProcessingConfig;
import org.apache.druid.query.Druids;
import org.apache.druid.query.FinalizeResultsQueryRunner;
import org.apache.druid.query.FluentQueryRunnerBuilder;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactory;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.QueryToolChest;
import org.apache.druid.query.QueryToolChestWarehouse;
import org.apache.druid.query.Result;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.context.ResponseContext;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryEngine;
import org.apache.druid.query.groupby.GroupByQueryQueryToolChest;
import org.apache.druid.query.groupby.GroupByQueryRunnerFactory;
import org.apache.druid.query.groupby.GroupByQueryRunnerTest;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.query.timeseries.TimeseriesQuery;
import org.apache.druid.query.timeseries.TimeseriesQueryEngine;
import org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest;
import org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory;
import org.apache.druid.query.timeseries.TimeseriesResultValue;
import org.apache.druid.query.topn.TopNQuery;
import org.apache.druid.query.topn.TopNQueryBuilder;
import org.apache.druid.query.topn.TopNQueryConfig;
import org.apache.druid.query.topn.TopNQueryQueryToolChest;
import org.apache.druid.query.topn.TopNQueryRunnerFactory;
import org.apache.druid.query.topn.TopNResultValue;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexSegment;
import org.apache.druid.server.coordination.ServerType;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.DataSegment.PruneSpecsHolder;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.timeline.TimelineLookup;
import org.apache.druid.timeline.VersionedIntervalTimeline;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import javax.annotation.Nullable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
@Fork(value = 1, jvmArgsAppend = "-XX:+UseG1GC")
@Warmup(iterations = 15)
@Measurement(iterations = 30)
public class CachingClusteredClientBenchmark
{
private static final Logger LOG = new Logger(CachingClusteredClientBenchmark.class);
private static final int PROCESSING_BUFFER_SIZE = 10 * 1024 * 1024; // ~10MB
private static final String DATA_SOURCE = "ds";
public static final ObjectMapper JSON_MAPPER;
@Param({"8"})
private int numServers;
@Param({"4", "2", "1"})
private int numProcessingThreads;
@Param({"75000"})
private int rowsPerSegment;
@Param({"all"})
private String queryGranularity;
private QueryToolChestWarehouse toolChestWarehouse;
private QueryRunnerFactoryConglomerate conglomerate;
private CachingClusteredClient cachingClusteredClient;
private ExecutorService processingPool;
private Query query;
private final Closer closer = Closer.create();
private final BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
private final QuerySegmentSpec basicSchemaIntervalSpec = new MultipleIntervalSegmentSpec(
Collections.singletonList(basicSchema.getDataInterval())
);
static {
JSON_MAPPER = new DefaultObjectMapper();
JSON_MAPPER.setInjectableValues(
new Std()
.addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE)
.addValue(ObjectMapper.class.getName(), JSON_MAPPER)
.addValue(PruneSpecsHolder.class, PruneSpecsHolder.DEFAULT)
);
}
@Setup(Level.Trial)
public void setup()
{
final String schemaName = "basic";
BenchmarkSchemaInfo schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schemaName);
Map<DataSegment, QueryableIndex> queryableIndexes = new HashMap<>(numServers);
for (int i = 0; i < numServers; i++) {
final DataSegment dataSegment = DataSegment.builder()
.dataSource(DATA_SOURCE)
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(i))
.build();
final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
LOG.info("Starting benchmark setup using cacheDir[%s], rows[%,d].", segmentGenerator.getCacheDir(), rowsPerSegment);
final QueryableIndex index = segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment);
queryableIndexes.put(dataSegment, index);
}
final DruidProcessingConfig processingConfig = new DruidProcessingConfig()
{
@Override
public String getFormatString()
{
return null;
}
@Override
public int intermediateComputeSizeBytes()
{
return PROCESSING_BUFFER_SIZE;
}
@Override
public int getNumMergeBuffers()
{
return 1;
}
@Override
public int getNumThreads()
{
return numProcessingThreads;
}
};
conglomerate = new DefaultQueryRunnerFactoryConglomerate(
ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>builder()
.put(
TimeseriesQuery.class,
new TimeseriesQueryRunnerFactory(
new TimeseriesQueryQueryToolChest(
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
),
new TimeseriesQueryEngine(),
QueryRunnerTestHelper.NOOP_QUERYWATCHER
)
)
.put(
TopNQuery.class,
new TopNQueryRunnerFactory(
new StupidPool<>(
"TopNQueryRunnerFactory-bufferPool",
() -> ByteBuffer.allocate(PROCESSING_BUFFER_SIZE)
),
new TopNQueryQueryToolChest(
new TopNQueryConfig(),
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
),
QueryRunnerTestHelper.NOOP_QUERYWATCHER
)
)
.put(
GroupByQuery.class,
makeGroupByQueryRunnerFactory(
GroupByQueryRunnerTest.DEFAULT_MAPPER,
new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return GroupByStrategySelector.STRATEGY_V2;
}
},
processingConfig
)
)
.build()
);
toolChestWarehouse = new QueryToolChestWarehouse()
{
@Override
public <T, QueryType extends Query<T>> QueryToolChest<T, QueryType> getToolChest(final QueryType query)
{
return conglomerate.findFactory(query).getToolchest();
}
};
SimpleServerView serverView = new SimpleServerView();
int serverSuffx = 1;
for (Entry<DataSegment, QueryableIndex> entry : queryableIndexes.entrySet()) {
serverView.addServer(
createServer(serverSuffx++),
entry.getKey(),
entry.getValue()
);
}
processingPool = Execs.multiThreaded(processingConfig.getNumThreads(), "caching-clustered-client-benchmark");
cachingClusteredClient = new CachingClusteredClient(
toolChestWarehouse,
serverView,
MapCache.create(0),
JSON_MAPPER,
new ForegroundCachePopulator(JSON_MAPPER, new CachePopulatorStats(), 0),
new CacheConfig(),
new DruidHttpClientConfig()
);
}
private static GroupByQueryRunnerFactory makeGroupByQueryRunnerFactory(
final ObjectMapper mapper,
final GroupByQueryConfig config,
final DruidProcessingConfig processingConfig
)
{
final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final Supplier<ByteBuffer> bufferSupplier =
() -> ByteBuffer.allocateDirect(processingConfig.intermediateComputeSizeBytes());
final NonBlockingPool<ByteBuffer> bufferPool = new StupidPool<>(
"GroupByQueryEngine-bufferPool",
bufferSupplier
);
final BlockingPool<ByteBuffer> mergeBufferPool = new DefaultBlockingPool<>(
bufferSupplier,
processingConfig.getNumMergeBuffers()
);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, bufferPool),
QueryRunnerTestHelper.NOOP_QUERYWATCHER,
bufferPool
),
new GroupByStrategyV2(
processingConfig,
configSupplier,
bufferPool,
mergeBufferPool,
mapper,
QueryRunnerTestHelper.NOOP_QUERYWATCHER
)
);
final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest(
strategySelector,
QueryRunnerTestHelper.sameThreadIntervalChunkingQueryRunnerDecorator()
);
return new GroupByQueryRunnerFactory(
strategySelector,
toolChest
);
}
@TearDown(Level.Trial)
public void tearDown() throws IOException
{
closer.close();
processingPool.shutdown();
}
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void timeseriesQuery(Blackhole blackhole)
{
query = Druids.newTimeseriesQueryBuilder()
.dataSource(DATA_SOURCE)
.intervals(basicSchemaIntervalSpec)
.aggregators(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"))
.granularity(Granularity.fromString(queryGranularity))
.build();
final List<Result<TimeseriesResultValue>> results = runQuery();
for (Result<TimeseriesResultValue> result : results) {
blackhole.consume(result);
}
}
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void topNQuery(Blackhole blackhole)
{
query = new TopNQueryBuilder()
.dataSource(DATA_SOURCE)
.intervals(basicSchemaIntervalSpec)
.dimension(new DefaultDimensionSpec("dimUniform", null))
.aggregators(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"))
.granularity(Granularity.fromString(queryGranularity))
.metric("sumLongSequential")
.threshold(10_000) // we are primarily measuring 'broker' merge time, so collect a significant number of results
.build();
final List<Result<TopNResultValue>> results = runQuery();
for (Result<TopNResultValue> result : results) {
blackhole.consume(result);
}
}
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void groupByQuery(Blackhole blackhole)
{
query = GroupByQuery
.builder()
.setDataSource(DATA_SOURCE)
.setQuerySegmentSpec(basicSchemaIntervalSpec)
.setDimensions(
new DefaultDimensionSpec("dimUniform", null),
new DefaultDimensionSpec("dimZipf", null)
)
.setAggregatorSpecs(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"))
.setGranularity(Granularity.fromString(queryGranularity))
.build();
final List<Row> results = runQuery();
for (Row result : results) {
blackhole.consume(result);
}
}
private <T> List<T> runQuery()
{
//noinspection unchecked
QueryRunner<T> theRunner = new FluentQueryRunnerBuilder<>(toolChestWarehouse.getToolChest(query))
.create(cachingClusteredClient.getQueryRunnerForIntervals(query, query.getIntervals()))
.applyPreMergeDecoration()
.mergeResults()
.applyPostMergeDecoration();
//noinspection unchecked
Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
return queryResult.toList();
}
private class SimpleServerView implements TimelineServerView
{
private final TierSelectorStrategy tierSelectorStrategy = new HighestPriorityTierSelectorStrategy(
new RandomServerSelectorStrategy()
);
// server -> queryRunner
private final Map<DruidServer, SingleSegmentDruidServer> servers = new HashMap<>();
// segmentId -> serverSelector
private final Map<String, ServerSelector> selectors = new HashMap<>();
// dataSource -> version -> serverSelector
private final Map<String, VersionedIntervalTimeline<String, ServerSelector>> timelines = new HashMap<>();
void addServer(DruidServer server, DataSegment dataSegment, QueryableIndex queryableIndex)
{
servers.put(
server,
new SingleSegmentDruidServer(
server,
new SimpleQueryRunner(
conglomerate,
dataSegment.getId(),
queryableIndex
)
)
);
addSegmentToServer(server, dataSegment);
}
void addSegmentToServer(DruidServer server, DataSegment segment)
{
final ServerSelector selector = selectors.computeIfAbsent(
segment.getId().toString(),
k -> new ServerSelector(segment, tierSelectorStrategy)
);
selector.addServerAndUpdateSegment(servers.get(server), segment);
timelines.computeIfAbsent(segment.getDataSource(), k -> new VersionedIntervalTimeline<>(Ordering.natural()))
.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(selector));
}
@Nullable
@Override
public TimelineLookup<String, ServerSelector> getTimeline(DataSource dataSource)
{
final String table = Iterables.getOnlyElement(dataSource.getNames());
return timelines.get(table);
}
@Override
public List<ImmutableDruidServer> getDruidServers()
{
return Collections.emptyList();
}
@Override
public <T> QueryRunner<T> getQueryRunner(DruidServer server)
{
final SingleSegmentDruidServer queryableDruidServer = Preconditions.checkNotNull(servers.get(server), "server");
return (QueryRunner<T>) queryableDruidServer.getQueryRunner();
}
@Override
public void registerTimelineCallback(Executor exec, TimelineCallback callback)
{
// do nothing
}
@Override
public void registerServerRemovedCallback(Executor exec, ServerRemovedCallback callback)
{
// do nothing
}
@Override
public void registerSegmentCallback(Executor exec, SegmentCallback callback)
{
// do nothing
}
}
private class SimpleQueryRunner implements QueryRunner<Object>
{
private final QueryRunnerFactoryConglomerate conglomerate;
private final QueryableIndexSegment segment;
public SimpleQueryRunner(QueryRunnerFactoryConglomerate conglomerate, SegmentId segmentId, QueryableIndex queryableIndex)
{
this.conglomerate = conglomerate;
this.segment = new QueryableIndexSegment(queryableIndex, segmentId);
}
@Override
public Sequence<Object> run(QueryPlus<Object> queryPlus, ResponseContext responseContext)
{
final QueryRunnerFactory factory = conglomerate.findFactory(queryPlus.getQuery());
//noinspection unchecked
return factory.getToolchest().preMergeQueryDecoration(
new FinalizeResultsQueryRunner<>(
new BySegmentQueryRunner<>(
segment.getId(),
segment.getDataInterval().getStart(),
factory.createRunner(segment)
),
factory.getToolchest()
)
).run(queryPlus, responseContext);
}
}
private class SingleSegmentDruidServer extends QueryableDruidServer<SimpleQueryRunner>
{
SingleSegmentDruidServer(DruidServer server, SimpleQueryRunner runner)
{
super(server, runner);
}
}
private static DruidServer createServer(int nameSuiffix)
{
return new DruidServer(
"server_" + nameSuiffix,
"127.0.0." + nameSuiffix,
null,
Long.MAX_VALUE,
ServerType.HISTORICAL,
"default",
0
);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.AggregationPath;
import org.elasticsearch.search.aggregations.support.AggregationPath.PathElement;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
*
*/
public class AggregatorFactories {
public static final AggregatorFactories EMPTY = new AggregatorFactories(null, new AggregatorFactory<?>[0],
new ArrayList<PipelineAggregatorBuilder<?>>());
private AggregatorFactory<?> parent;
private AggregatorFactory<?>[] factories;
private List<PipelineAggregatorBuilder<?>> pipelineAggregatorFactories;
public static Builder builder() {
return new Builder();
}
private AggregatorFactories(AggregatorFactory<?> parent, AggregatorFactory<?>[] factories,
List<PipelineAggregatorBuilder<?>> pipelineAggregators) {
this.parent = parent;
this.factories = factories;
this.pipelineAggregatorFactories = pipelineAggregators;
}
public List<PipelineAggregator> createPipelineAggregators() throws IOException {
List<PipelineAggregator> pipelineAggregators = new ArrayList<>();
for (PipelineAggregatorBuilder<?> factory : this.pipelineAggregatorFactories) {
pipelineAggregators.add(factory.create());
}
return pipelineAggregators;
}
/**
* Create all aggregators so that they can be consumed with multiple
* buckets.
*/
public Aggregator[] createSubAggregators(Aggregator parent) throws IOException {
Aggregator[] aggregators = new Aggregator[countAggregators()];
for (int i = 0; i < factories.length; ++i) {
// TODO: sometimes even sub aggregations always get called with bucket 0, eg. if
// you have a terms agg under a top-level filter agg. We should have a way to
// propagate the fact that only bucket 0 will be collected with single-bucket
// aggs
final boolean collectsFromSingleBucket = false;
aggregators[i] = factories[i].create(parent, collectsFromSingleBucket);
}
return aggregators;
}
public Aggregator[] createTopLevelAggregators() throws IOException {
// These aggregators are going to be used with a single bucket ordinal, no need to wrap the PER_BUCKET ones
Aggregator[] aggregators = new Aggregator[factories.length];
for (int i = 0; i < factories.length; i++) {
// top-level aggs only get called with bucket 0
final boolean collectsFromSingleBucket = true;
aggregators[i] = factories[i].create(null, collectsFromSingleBucket);
}
return aggregators;
}
/**
* @return the number of sub-aggregator factories not including pipeline
* aggregator factories
*/
public int countAggregators() {
return factories.length;
}
/**
* @return the number of pipeline aggregator factories
*/
public int countPipelineAggregators() {
return pipelineAggregatorFactories.size();
}
public void validate() {
for (AggregatorFactory<?> factory : factories) {
factory.validate();
}
for (PipelineAggregatorBuilder<?> factory : pipelineAggregatorFactories) {
factory.validate(parent, factories, pipelineAggregatorFactories);
}
}
public static class Builder extends ToXContentToBytes implements Writeable<Builder> {
public final static Builder PROTOTYPE = new Builder();
private final Set<String> names = new HashSet<>();
private final List<AggregatorBuilder<?>> aggregatorBuilders = new ArrayList<>();
private final List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders = new ArrayList<>();
private boolean skipResolveOrder;
public Builder addAggregators(AggregatorFactories factories) {
throw new UnsupportedOperationException("This needs to be removed");
}
public Builder addAggregator(AggregatorBuilder<?> factory) {
if (!names.add(factory.name)) {
throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
}
aggregatorBuilders.add(factory);
return this;
}
public Builder addPipelineAggregator(PipelineAggregatorBuilder<?> pipelineAggregatorFactory) {
this.pipelineAggregatorBuilders.add(pipelineAggregatorFactory);
return this;
}
/**
* FOR TESTING ONLY
*/
Builder skipResolveOrder() {
this.skipResolveOrder = true;
return this;
}
public AggregatorFactories build(AggregationContext context, AggregatorFactory<?> parent) throws IOException {
if (aggregatorBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) {
return EMPTY;
}
List<PipelineAggregatorBuilder<?>> orderedpipelineAggregators = null;
if (skipResolveOrder) {
orderedpipelineAggregators = new ArrayList<>(pipelineAggregatorBuilders);
} else {
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregatorBuilders);
}
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregatorBuilders.size()];
for (int i = 0; i < aggregatorBuilders.size(); i++) {
aggFactories[i] = aggregatorBuilders.get(i).build(context, parent);
}
return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators);
}
private List<PipelineAggregatorBuilder<?>> resolvePipelineAggregatorOrder(
List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders, List<AggregatorBuilder<?>> aggBuilders) {
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap = new HashMap<>();
for (PipelineAggregatorBuilder<?> builder : pipelineAggregatorBuilders) {
pipelineAggregatorBuildersMap.put(builder.getName(), builder);
}
Map<String, AggregatorBuilder<?>> aggBuildersMap = new HashMap<>();
for (AggregatorBuilder<?> aggBuilder : aggBuilders) {
aggBuildersMap.put(aggBuilder.name, aggBuilder);
}
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregatorrs = new LinkedList<>();
List<PipelineAggregatorBuilder<?>> unmarkedBuilders = new ArrayList<PipelineAggregatorBuilder<?>>(pipelineAggregatorBuilders);
Set<PipelineAggregatorBuilder<?>> temporarilyMarked = new HashSet<PipelineAggregatorBuilder<?>>();
while (!unmarkedBuilders.isEmpty()) {
PipelineAggregatorBuilder<?> builder = unmarkedBuilders.get(0);
resolvePipelineAggregatorOrder(aggBuildersMap, pipelineAggregatorBuildersMap, orderedPipelineAggregatorrs, unmarkedBuilders,
temporarilyMarked, builder);
}
return orderedPipelineAggregatorrs;
}
private void resolvePipelineAggregatorOrder(Map<String, AggregatorBuilder<?>> aggBuildersMap,
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap,
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregators, List<PipelineAggregatorBuilder<?>> unmarkedBuilders,
Set<PipelineAggregatorBuilder<?>> temporarilyMarked, PipelineAggregatorBuilder<?> builder) {
if (temporarilyMarked.contains(builder)) {
throw new IllegalArgumentException("Cyclical dependency found with pipeline aggregator [" + builder.getName() + "]");
} else if (unmarkedBuilders.contains(builder)) {
temporarilyMarked.add(builder);
String[] bucketsPaths = builder.getBucketsPaths();
for (String bucketsPath : bucketsPaths) {
List<AggregationPath.PathElement> bucketsPathElements = AggregationPath.parse(bucketsPath).getPathElements();
String firstAggName = bucketsPathElements.get(0).name;
if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) {
continue;
} else if (aggBuildersMap.containsKey(firstAggName)) {
AggregatorBuilder<?> aggBuilder = aggBuildersMap.get(firstAggName);
for (int i = 1; i < bucketsPathElements.size(); i++) {
PathElement pathElement = bucketsPathElements.get(i);
String aggName = pathElement.name;
if ((i == bucketsPathElements.size() - 1) && (aggName.equalsIgnoreCase("_key") || aggName.equals("_count"))) {
break;
} else {
// Check the non-pipeline sub-aggregator
// factories
AggregatorBuilder<?>[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories();
boolean foundSubBuilder = false;
for (AggregatorBuilder<?> subBuilder : subBuilders) {
if (aggName.equals(subBuilder.name)) {
aggBuilder = subBuilder;
foundSubBuilder = true;
break;
}
}
// Check the pipeline sub-aggregator factories
if (!foundSubBuilder && (i == bucketsPathElements.size() - 1)) {
List<PipelineAggregatorBuilder<?>> subPipelineBuilders = aggBuilder.factoriesBuilder.pipelineAggregatorBuilders;
for (PipelineAggregatorBuilder<?> subFactory : subPipelineBuilders) {
if (aggName.equals(subFactory.name())) {
foundSubBuilder = true;
break;
}
}
}
if (!foundSubBuilder) {
throw new IllegalArgumentException("No aggregation [" + aggName + "] found for path [" + bucketsPath
+ "]");
}
}
}
continue;
} else {
PipelineAggregatorBuilder<?> matchingBuilder = pipelineAggregatorBuildersMap.get(firstAggName);
if (matchingBuilder != null) {
resolvePipelineAggregatorOrder(aggBuildersMap, pipelineAggregatorBuildersMap, orderedPipelineAggregators,
unmarkedBuilders, temporarilyMarked, matchingBuilder);
} else {
throw new IllegalArgumentException("No aggregation found for path [" + bucketsPath + "]");
}
}
}
unmarkedBuilders.remove(builder);
temporarilyMarked.remove(builder);
orderedPipelineAggregators.add(builder);
}
}
AggregatorBuilder<?>[] getAggregatorFactories() {
return this.aggregatorBuilders.toArray(new AggregatorBuilder<?>[this.aggregatorBuilders.size()]);
}
List<PipelineAggregatorBuilder<?>> getPipelineAggregatorFactories() {
return this.pipelineAggregatorBuilders;
}
public int count() {
return aggregatorBuilders.size() + pipelineAggregatorBuilders.size();
}
@Override
public Builder readFrom(StreamInput in) throws IOException {
Builder builder = new Builder();
int factoriesSize = in.readVInt();
for (int i = 0; i < factoriesSize; i++) {
builder.addAggregator(in.readNamedWriteable(AggregatorBuilder.class));
}
int pipelineFactoriesSize = in.readVInt();
for (int i = 0; i < pipelineFactoriesSize; i++) {
builder.addPipelineAggregator(in.readNamedWriteable(PipelineAggregatorBuilder.class));
}
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.aggregatorBuilders.size());
for (AggregatorBuilder<?> builder : aggregatorBuilders) {
out.writeNamedWriteable(builder);
}
out.writeVInt(this.pipelineAggregatorBuilders.size());
for (PipelineAggregatorBuilder<?> builder : pipelineAggregatorBuilders) {
out.writeNamedWriteable(builder);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (aggregatorBuilders != null) {
for (AggregatorBuilder<?> subAgg : aggregatorBuilders) {
subAgg.toXContent(builder, params);
}
}
if (pipelineAggregatorBuilders != null) {
for (PipelineAggregatorBuilder<?> subAgg : pipelineAggregatorBuilders) {
subAgg.toXContent(builder, params);
}
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(aggregatorBuilders, pipelineAggregatorBuilders);
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Builder other = (Builder) obj;
if (!Objects.equals(aggregatorBuilders, other.aggregatorBuilders))
return false;
if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders))
return false;
return true;
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.project;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.xmlb.annotations.Property;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.idea.maven.execution.MavenRunnerSettings;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public class MavenImportingSettings implements Cloneable {
private static final Logger LOG = Logger.getInstance(MavenImportingSettings.class);
private static final String PROCESS_RESOURCES_PHASE = "process-resources";
public static final String[] UPDATE_FOLDERS_PHASES = new String[]{
"generate-sources",
"process-sources",
"generate-resources",
PROCESS_RESOURCES_PHASE,
"generate-test-sources",
"process-test-sources",
"generate-test-resources",
"process-test-resources"};
public static final String UPDATE_FOLDERS_DEFAULT_PHASE = PROCESS_RESOURCES_PHASE;
public static final String DEFAULT_DEPENDENCY_TYPES =
"jar, test-jar, maven-plugin, ejb, ejb-client, jboss-har, jboss-sar, war, ear, bundle";
@NotNull @NlsSafe private String dedicatedModuleDir = "";
private boolean lookForNested = false;
private boolean importAutomatically = false;
private boolean createModulesForAggregators = true;
private boolean createModuleGroups = false;
private boolean excludeTargetFolder = true;
private boolean keepSourceFolders = true;
private boolean useMavenOutput = true;
@NlsSafe private String updateFoldersOnImportPhase = UPDATE_FOLDERS_DEFAULT_PHASE;
private boolean downloadSourcesAutomatically = false;
private boolean downloadDocsAutomatically = false;
private boolean downloadAnnotationsAutomatically = false;
private boolean autoDetectCompiler = true;
private GeneratedSourcesFolder generatedSourcesFolder = GeneratedSourcesFolder.AUTODETECT;
private String dependencyTypes = DEFAULT_DEPENDENCY_TYPES;
private Set<String> myDependencyTypesAsSet;
@NotNull @NlsSafe private String vmOptionsForImporter = "";
@NotNull @NlsSafe private String jdkForImporter = MavenRunnerSettings.USE_PROJECT_JDK;
private List<Listener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
public enum GeneratedSourcesFolder {
IGNORE("maven.settings.generated.folder.ignore"),
AUTODETECT("maven.settings.generated.folder.autodetect"),
GENERATED_SOURCE_FOLDER("maven.settings.generated.folder.targerdir"),
SUBFOLDER("maven.settings.generated.folder.targersubdir");
public final String myMessageKey;
GeneratedSourcesFolder(String messageKey) {
myMessageKey = messageKey;
}
@NlsContexts.ListItem
public String getTitle() {
return MavenConfigurableBundle.message(myMessageKey);
}
}
@NotNull
@NlsSafe
public String getDedicatedModuleDir() {
return dedicatedModuleDir;
}
public void setDedicatedModuleDir(@NotNull String dedicatedModuleDir) {
this.dedicatedModuleDir = dedicatedModuleDir;
}
public boolean isLookForNested() {
return lookForNested;
}
public void setLookForNested(boolean lookForNested) {
this.lookForNested = lookForNested;
}
/**
* @deprecated see {@link MavenImportingSettings#setImportAutomatically(boolean)} for details
*/
@SuppressWarnings("DeprecatedIsStillUsed")
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
public boolean isImportAutomatically() {
return importAutomatically;
}
/**
* @see com.intellij.openapi.externalSystem.autoimport.ExternalSystemProjectTracker for details
* @deprecated Auto-import cannot be disabled
*/
@SuppressWarnings("DeprecatedIsStillUsed")
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
public void setImportAutomatically(@SuppressWarnings("unused") boolean importAutomatically) {
this.importAutomatically = importAutomatically;
}
@NotNull
public String getDependencyTypes() {
return dependencyTypes;
}
public void setDependencyTypes(@NotNull String dependencyTypes) {
this.dependencyTypes = dependencyTypes;
myDependencyTypesAsSet = null;
}
@NotNull
public Set<String> getDependencyTypesAsSet() {
if (myDependencyTypesAsSet == null) {
Set<String> res = new LinkedHashSet<>();
for (String type : StringUtil.tokenize(dependencyTypes, " \n\r\t,;")) {
res.add(type);
}
myDependencyTypesAsSet = res;
}
return myDependencyTypesAsSet;
}
public boolean isCreateModuleGroups() {
return createModuleGroups;
}
public void setCreateModuleGroups(boolean createModuleGroups) {
this.createModuleGroups = createModuleGroups;
fireCreateModuleGroupsChanged();
}
public boolean isCreateModulesForAggregators() {
return createModulesForAggregators;
}
public void setCreateModulesForAggregators(boolean createModulesForAggregators) {
this.createModulesForAggregators = createModulesForAggregators;
fireCreateModuleForAggregatorsChanged();
}
public boolean isKeepSourceFolders() {
return keepSourceFolders;
}
public void setKeepSourceFolders(boolean keepSourceFolders) {
this.keepSourceFolders = keepSourceFolders;
}
public boolean isExcludeTargetFolder() {
return excludeTargetFolder;
}
public void setExcludeTargetFolder(boolean excludeTargetFolder) {
this.excludeTargetFolder = excludeTargetFolder;
}
public boolean isUseMavenOutput() {
return useMavenOutput;
}
public void setUseMavenOutput(boolean useMavenOutput) {
this.useMavenOutput = useMavenOutput;
}
@NlsSafe
public String getUpdateFoldersOnImportPhase() {
return updateFoldersOnImportPhase;
}
public void setUpdateFoldersOnImportPhase(String updateFoldersOnImportPhase) {
this.updateFoldersOnImportPhase = updateFoldersOnImportPhase;
}
public boolean isDownloadSourcesAutomatically() {
return downloadSourcesAutomatically;
}
public void setDownloadSourcesAutomatically(boolean Value) {
this.downloadSourcesAutomatically = Value;
}
public boolean isDownloadDocsAutomatically() {
return downloadDocsAutomatically;
}
public void setDownloadDocsAutomatically(boolean value) {
this.downloadDocsAutomatically = value;
}
public boolean isDownloadAnnotationsAutomatically() {
return downloadAnnotationsAutomatically;
}
public void setDownloadAnnotationsAutomatically(boolean value) {
this.downloadAnnotationsAutomatically = value;
}
public boolean isAutoDetectCompiler() {
return autoDetectCompiler;
}
public void setAutoDetectCompiler(boolean autoDetectCompiler) {
this.autoDetectCompiler = autoDetectCompiler;
}
@Property
@NotNull
public GeneratedSourcesFolder getGeneratedSourcesFolder() {
return generatedSourcesFolder;
}
public void setGeneratedSourcesFolder(GeneratedSourcesFolder generatedSourcesFolder) {
if (generatedSourcesFolder == null) return; // null may come from deserializator
this.generatedSourcesFolder = generatedSourcesFolder;
}
@NotNull
public String getVmOptionsForImporter() {
return vmOptionsForImporter;
}
public void setVmOptionsForImporter(String vmOptionsForImporter) {
this.vmOptionsForImporter = StringUtil.notNullize(vmOptionsForImporter);
}
@NotNull
public String getJdkForImporter() {
return jdkForImporter;
}
public void setJdkForImporter(@NotNull String jdkForImporter) {
this.jdkForImporter = jdkForImporter;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MavenImportingSettings that = (MavenImportingSettings)o;
if (createModuleGroups != that.createModuleGroups) return false;
if (createModulesForAggregators != that.createModulesForAggregators) return false;
if (!dependencyTypes.equals(that.dependencyTypes)) return false;
if (downloadDocsAutomatically != that.downloadDocsAutomatically) return false;
if (downloadSourcesAutomatically != that.downloadSourcesAutomatically) return false;
if (downloadAnnotationsAutomatically != that.downloadAnnotationsAutomatically) return false;
if (autoDetectCompiler != that.autoDetectCompiler) return false;
//if (lookForNested != that.lookForNested) return false;
if (keepSourceFolders != that.keepSourceFolders) return false;
if (excludeTargetFolder != that.excludeTargetFolder) return false;
if (useMavenOutput != that.useMavenOutput) return false;
if (generatedSourcesFolder != that.generatedSourcesFolder) return false;
if (!dedicatedModuleDir.equals(that.dedicatedModuleDir)) return false;
if (!jdkForImporter.equals(that.jdkForImporter)) return false;
if (!vmOptionsForImporter.equals(that.vmOptionsForImporter)) return false;
if (updateFoldersOnImportPhase != null
? !updateFoldersOnImportPhase.equals(that.updateFoldersOnImportPhase)
: that.updateFoldersOnImportPhase != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = 0;
//if (lookForNested) result++;
//result <<= 1;
if (createModulesForAggregators) result++;
result <<= 1;
if (createModuleGroups) result++;
result <<= 1;
if (keepSourceFolders) result++;
result <<= 1;
if (useMavenOutput) result++;
result <<= 1;
if (downloadSourcesAutomatically) result++;
result <<= 1;
if (downloadDocsAutomatically) result++;
result <<= 1;
if (downloadAnnotationsAutomatically) result++;
result <<= 1;
if (autoDetectCompiler) result++;
result <<= 1;
result = 31 * result + (updateFoldersOnImportPhase != null ? updateFoldersOnImportPhase.hashCode() : 0);
result = 31 * result + dedicatedModuleDir.hashCode();
result = 31 * result + generatedSourcesFolder.hashCode();
result = 31 * result + dependencyTypes.hashCode();
return result;
}
@Override
public MavenImportingSettings clone() {
try {
MavenImportingSettings result = (MavenImportingSettings)super.clone();
result.myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
return result;
}
catch (CloneNotSupportedException e) {
throw new Error(e);
}
}
public void addListener(Listener l) {
myListeners.add(l);
}
public void removeListener(Listener l) {
myListeners.remove(l);
}
private void fireCreateModuleGroupsChanged() {
for (Listener each : myListeners) {
each.createModuleGroupsChanged();
}
}
private void fireCreateModuleForAggregatorsChanged() {
for (Listener each : myListeners) {
each.createModuleForAggregatorsChanged();
}
}
public interface Listener {
void createModuleGroupsChanged();
void createModuleForAggregatorsChanged();
}
}
| |
/*
* DownloadClient Geodateninfrastruktur Bayern
*
* (c) 2016 GSt. GDI-BY (gdi.bayern.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.bayern.gdi.gui.controller;
import de.bayern.gdi.config.ApplicationSettings;
import de.bayern.gdi.config.Config;
import de.bayern.gdi.config.Credentials;
import de.bayern.gdi.gui.ServiceModel;
import de.bayern.gdi.services.Atom;
import de.bayern.gdi.services.Service;
import de.bayern.gdi.services.WFSMeta;
import de.bayern.gdi.services.WFSMetaExtractor;
import de.bayern.gdi.config.DownloadConfig;
import de.bayern.gdi.utils.I18n;
import de.bayern.gdi.utils.ServiceChecker;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.concurrent.Task;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.scene.Cursor;
import javafx.scene.control.Button;
import javafx.scene.control.CheckBox;
import javafx.scene.control.ListView;
import javafx.scene.control.ProgressIndicator;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.List;
/**
* @author <a href="mailto:goltz@lat-lon.de">Lyn Goltz </a>
*/
@Named
@Singleton
public class ServiceSelectionController {
private static final Logger LOG = LoggerFactory.getLogger(ServiceSelectionController.class.getName());
private static final String STATUS_SERVICE_BROKEN = "status.service.broken";
private static final String STATUS_READY = "status.ready";
@Inject
private Controller controller;
@Inject
private StatusLogController statusLogController;
@Inject
private ServiceTypeSelectionController serviceTypeSelectionController;
@Inject
private ProcessingChainController processingChainController;
@FXML
private TextField searchField;
@FXML
private Button searchButton;
@FXML
private ProgressIndicator progressSearch;
@FXML
private ListView serviceList;
@FXML
private TextField serviceURL;
@FXML
private Button serviceSelectionBt;
@FXML
private CheckBox serviceAuthenticationCbx;
@FXML
private TextField serviceUser;
@FXML
private TextField servicePW;
/**
* Handle the service selection button event.
*
* @param event
* The mouse click event.
*/
@FXML
protected void handleServiceSelectButton(MouseEvent event) {
if (event.getButton().equals(MouseButton.PRIMARY)) {
controller.downloadConfig = null;
doSelectService();
}
}
/**
* Handle the service selection.
*
* @param event
* The mouse click event.
*/
@FXML
protected void handleServiceSelect(MouseEvent event) {
if (event.getEventType().equals(MouseEvent.MOUSE_CLICKED)) {
if (event.getClickCount() == 1) {
clearUserNamePassword();
ServiceModel serviceModel =
(ServiceModel) this.serviceList.getSelectionModel()
.getSelectedItems().get(0);
if (serviceModel != null) {
serviceSelectionBt.setDisable(true);
serviceURL.getScene().setCursor(Cursor.WAIT);
statusLogController.setStatusTextUI(
I18n.format("status.checking-auth"));
Task task = new Task() {
protected Integer call() {
try {
selectService(serviceModel.getItem());
return 0;
} finally {
serviceSelectionBt.setDisable(false);
serviceURL.getScene().setCursor(Cursor.DEFAULT);
}
}
};
Thread th = new Thread(task);
th.setDaemon(true);
th.start();
}
} else if (event.getClickCount() > 1) {
clearUserNamePassword();
controller.resetGui();
}
}
}
/**
* Handle authentication required selection.
*
* @param event
* the event
*/
@FXML
protected void handleAuthenticationRequired(ActionEvent event) {
boolean flag = !this.serviceAuthenticationCbx.isSelected();
this.serviceUser.setDisable(flag);
this.servicePW.setDisable(flag);
}
/**
* Handle search button clicks.
* Hide search button and start search
*
* @param event
* the event
*/
@FXML
protected void handleSearchButtonClick(MouseEvent event) {
handleSearch(null);
}
/**
* Handle search and filter the service list.
*
* @param event
* the event
*/
@FXML
protected void handleSearch(KeyEvent event) {
if (!controller.catalogReachable) {
statusLogController.setStatusTextUI(I18n.getMsg("status.catalog-not-available"));
}
String currentText = this.searchField.getText();
this.serviceList.getItems().clear();
controller.dataBean.resetCatalogLists();
if (currentText == null || currentText.isEmpty()) {
setServices(controller.dataBean.getServicesAsList());
}
String searchValue = currentText == null
? ""
: currentText.toUpperCase();
ObservableList<ServiceModel> subentries
= FXCollections.observableArrayList();
ObservableList<ServiceModel> all = controller.dataBean.getServicesAsList();
for (ServiceModel entry : all) {
boolean match
= entry.getName().toUpperCase().contains(searchValue);
if (match) {
subentries.add(entry);
}
}
if (currentText != null && currentText.length() > 2) {
Task task = new Task() {
@Override
protected Integer call()
throws Exception {
Platform.runLater(() -> {
searchButton.setVisible(false);
searchButton.setManaged(false);
progressSearch.setVisible(true);
progressSearch.setManaged(true);
});
if (controller.catalogReachable) {
List<Service> catalog =
controller.dataBean.getCatalogService()
.getServicesByFilter(currentText);
for (Service entry : catalog) {
controller.dataBean.addCatalogServiceToList(entry);
}
Platform.runLater(() -> {
for (Service entry : catalog) {
subentries.add(new ServiceModel(entry));
}
});
}
Platform.runLater(() -> {
progressSearch.setVisible(false);
progressSearch.setManaged(false);
searchButton.setManaged(true);
searchButton.setVisible(true);
});
return 0;
}
};
Thread th = new Thread(task);
if (controller.catalogReachable) {
statusLogController.setStatusTextUI(I18n.getMsg("status.calling-service"));
}
th.setDaemon(true);
th.start();
}
setServices(subentries);
}
/**
* Initialise the GUI with the passed url and config.
*
* @param url
* never <code>null</code>
* @param downloadConfig
* never <code>null</code>
*/
public void loadDownloadConfig(String url, DownloadConfig downloadConfig) {
this.serviceURL.setText(url);
doSelectService(downloadConfig);
}
/**
* Sets the services.
*
* @param servicesAsList
* may be <code>empty</code>> but never <code>null</code>
*/
public void setServices(ObservableList<ServiceModel> servicesAsList) {
this.serviceList.setItems(servicesAsList);
}
/**
* Resets GUI.
*/
public void resetGui() {
this.progressSearch.setVisible(false);
this.serviceUser.setDisable(true);
this.servicePW.setDisable(true);
}
private void clearUserNamePassword() {
this.serviceUser.setText("");
this.servicePW.setText("");
}
private void setUserNamePasswordFromServiceOrConfig(Service selectedService) {
ApplicationSettings settings = Config
.getInstance()
.getApplicationSettings();
Credentials credentials = settings.getCredentials();
if (selectedService.getUsername() != null && !selectedService.getUsername().isEmpty()) {
this.serviceUser.setText(selectedService.getUsername());
this.servicePW.setText(selectedService.getPassword());
} else if (credentials != null) {
this.serviceUser.setText(credentials.getUsername());
this.servicePW.setText(credentials.getPassword());
}
}
/**
* Select a service according to service url textfield.
*/
private void doSelectService() {
doSelectService(null);
}
/**
* Select a service according to service url textfield.
*
* @param downloadConf
* Loaded download config, null if a service is chosen
* from an URL or the service List
*/
private void doSelectService(DownloadConfig downloadConf) {
LOG.info("Using download config: " + downloadConf);
controller.dataBean.resetSelectedService();
serviceSelectionBt.setDisable(true);
serviceURL.getScene().setCursor(Cursor.WAIT);
serviceURL.setDisable(true);
controller.resetGui();
new Thread(() -> {
try {
ObservableList selectedItems = serviceList.
getSelectionModel().getSelectedItems();
ServiceModel serviceModel = selectedItems.isEmpty() ? null
: (ServiceModel) selectedItems.get(0);
Service service = null;
if (serviceModel != null
&& serviceModel.getUrl().toString().equals(
serviceURL.getText())
) {
if (ServiceChecker.isReachable(serviceModel
.getItem().getServiceURL())) {
service = serviceModel.getItem();
service.setPassword(servicePW.getText());
service.setUsername(serviceUser.getText());
}
} else {
URL sURL = new URL(serviceURL.getText());
LOG.info("Connecting " + sURL + "...");
if (ServiceChecker.isReachable(sURL)) {
service = new Service(
sURL,
"",
true,
serviceUser.getText(),
servicePW.getText());
}
}
if (service == null) {
statusLogController.setStatusTextUI(
I18n.format("status.service-timeout"));
controller.dataBean.setSelectedService(null);
serviceSelectionBt.setDisable(false);
serviceURL.setDisable(false);
serviceURL.getScene().setCursor(Cursor.DEFAULT);
return;
}
serviceSelectionBt.setDisable(true);
serviceURL.getScene().setCursor(Cursor.WAIT);
statusLogController.setStatusTextUI(
I18n.format("status.checking-auth"));
serviceURL.setDisable(true);
Service finalService = service;
Task task = new Task() {
protected Integer call() {
try {
boolean serviceSelected = selectService(
finalService);
if (serviceSelected) {
chooseSelectedService(downloadConf);
}
persistUsernameAndPasswordInSettingsXml(finalService);
return 0;
} finally {
serviceSelectionBt.setDisable(false);
serviceURL.getScene()
.setCursor(Cursor.DEFAULT);
serviceURL.setDisable(false);
processingChainController.validateChainContainerItems();
}
}
};
Thread th = new Thread(task);
th.setDaemon(true);
th.start();
} catch (MalformedURLException e) {
statusLogController.setStatusTextUI(
I18n.format("status.no-url"));
LOG.error(e.getMessage(), e);
serviceSelectionBt.setDisable(false);
serviceURL.getScene()
.setCursor(Cursor.DEFAULT);
serviceURL.setDisable(false);
}
}).start();
}
private boolean selectService(Service service) {
LOG.info("User selected: " + service.toString());
if (ServiceChecker.isReachable(service.getServiceURL())) {
try {
service.load();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.format(STATUS_SERVICE_BROKEN))
);
return false;
}
} else {
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.format("status.service-not-available"))
);
return false;
}
if (controller.dataBean.getSelectedService() != null
&& controller.dataBean.getSelectedService().equals(service)) {
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.format(STATUS_READY))
);
return true;
}
controller.dataBean.setSelectedService(service);
Platform.runLater(() -> {
controller.resetGui();
this.serviceURL.setText(
controller.dataBean.getSelectedService().getServiceURL().toString()
);
});
//Check if Username and Password are given
if (((controller.dataBean.getSelectedService().getUsername() != null
&& controller.dataBean.getSelectedService().getPassword() != null)
|| (controller.dataBean.getSelectedService().getUsername().isEmpty()
&& controller.dataBean.getSelectedService().getPassword().isEmpty()))
&& controller.dataBean.getSelectedService().isRestricted()) {
Platform.runLater(() -> {
statusLogController.setStatusTextUI(
I18n.format("status.service-needs-auth"));
this.serviceAuthenticationCbx.setSelected(true);
this.serviceUser.setDisable(false);
this.servicePW.setDisable(false);
setUserNamePasswordFromServiceOrConfig(controller.dataBean.getSelectedService());
});
return false;
} else {
Platform.runLater(() -> {
this.serviceAuthenticationCbx.setSelected(false);
this.serviceUser.setDisable(true);
this.servicePW.setDisable(true);
clearUserNamePassword();
});
}
//Check if this thing could be loaded
if (controller.dataBean.getSelectedService().getServiceType() == null) {
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.format(STATUS_SERVICE_BROKEN))
);
return false;
}
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.format(STATUS_READY))
);
return true;
}
/**
* Use selection to request the service data and fill the UI.
*
* @param downloadConf
* Loaded download config, null if service
* was chosen from an URL or the service list
*/
private void chooseSelectedService(DownloadConfig downloadConf) {
switch (controller.dataBean.getSelectedService().getServiceType()) {
case ATOM:
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.getMsg("status.type.atom"))
);
Atom atom = null;
try {
atom = new Atom(
controller.dataBean.getSelectedService()
.getServiceURL().toString(),
controller.dataBean.getSelectedService().getUsername(),
controller.dataBean.getSelectedService().getPassword());
} catch (IllegalArgumentException
| URISyntaxException
| ParserConfigurationException
| IOException e) {
LOG.error(e.getMessage(), e);
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.getMsg(STATUS_SERVICE_BROKEN)
)
);
controller.resetGui();
return;
} finally {
controller.dataBean.setAtomService(atom);
}
break;
case WFS_ONE:
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.getMsg("status.type.wfsone"))
);
WFSMetaExtractor wfsOne =
new WFSMetaExtractor(
controller.dataBean.getSelectedService()
.getServiceURL().toString(),
controller.dataBean.getSelectedService().getUsername(),
controller.dataBean.getSelectedService().getPassword());
WFSMeta metaOne = null;
try {
metaOne = wfsOne.parse();
} catch (IOException
| URISyntaxException e) {
LOG.error(e.getMessage(), e);
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.getMsg(STATUS_SERVICE_BROKEN)
)
);
} finally {
controller.dataBean.setWFSService(metaOne);
}
break;
case WFS_TWO:
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.getMsg("status.type.wfstwo"))
);
WFSMetaExtractor extractor =
new WFSMetaExtractor(
controller.dataBean.getSelectedService()
.getServiceURL().toString(),
controller.dataBean.getSelectedService().getUsername(),
controller.dataBean.getSelectedService().getPassword());
WFSMeta meta = null;
try {
meta = extractor.parse();
} catch (IOException
| URISyntaxException e) {
LOG.error(e.getMessage(), e);
Platform.runLater(() ->
statusLogController.setStatusTextUI(
I18n.getMsg(STATUS_SERVICE_BROKEN))
);
} finally {
controller.dataBean.setWFSService(meta);
}
break;
default:
LOG.warn(
"Could not determine URL",
controller.dataBean.getSelectedService());
Platform.runLater(() ->
statusLogController.setStatusTextUI(I18n.getMsg("status.no-url"))
);
break;
}
if (controller.dataBean.isWebServiceSet()) {
Platform.runLater(serviceTypeSelectionController::setServiceTypes);
} else {
return;
}
Platform.runLater(() -> {
serviceTypeSelectionController.loadDownloadConfig(downloadConf);
statusLogController.setStatusTextUI(I18n.getMsg(STATUS_READY));
});
return;
}
private void persistUsernameAndPasswordInSettingsXml(Service service) {
ApplicationSettings settings = Config
.getInstance()
.getApplicationSettings();
if ((service.getUsername() != null
&& !service.getUsername().isEmpty())) {
Credentials credentials = new Credentials(service.getUsername(), service.getPassword());
settings.persistCredentials(credentials);
}
}
}
| |
/*
* Copyright 2017-2021 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.gchq.gaffer.store.library;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import uk.gov.gchq.gaffer.commonutil.JsonAssert;
import uk.gov.gchq.gaffer.commonutil.JsonUtil;
import uk.gov.gchq.gaffer.commonutil.TestGroups;
import uk.gov.gchq.gaffer.commonutil.exception.OverwritingException;
import uk.gov.gchq.gaffer.commonutil.pair.Pair;
import uk.gov.gchq.gaffer.store.StoreProperties;
import uk.gov.gchq.gaffer.store.schema.Schema;
import uk.gov.gchq.gaffer.store.schema.Schema.Builder;
import uk.gov.gchq.gaffer.store.schema.SchemaEdgeDefinition;
import uk.gov.gchq.gaffer.store.schema.SchemaEntityDefinition;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public abstract class AbstractGraphLibraryTest {
protected GraphLibrary graphLibrary;
private static final String TEST_GRAPH_ID = "testGraphId";
private static final String TEST_GRAPH_ID_1 = "testGraphId1";
private static final String TEST_UNKNOWN_ID = "unknownId";
private static final String TEST_SCHEMA_ID = "testSchemaId";
private static final String TEST_PROPERTIES_ID = "testPropertiesId";
private static final String EXCEPTION_EXPECTED = "Exception expected";
private Schema schema = new Schema.Builder().build();
private Schema schema1 = new Schema.Builder().build();
private StoreProperties storeProperties = new StoreProperties();
private StoreProperties storeProperties1 = new StoreProperties();
public abstract GraphLibrary createGraphLibraryInstance();
@BeforeEach
public void beforeEach() {
graphLibrary = createGraphLibraryInstance();
if (graphLibrary instanceof HashMapGraphLibrary) {
HashMapGraphLibrary.clear();
}
}
@Test
public void shouldAddAndGetMultipleIdsInGraphLibrary() {
// When
graphLibrary.add(TEST_GRAPH_ID, schema, storeProperties);
graphLibrary.add(TEST_GRAPH_ID_1, schema1, storeProperties1);
assertEquals(new Pair<>(TEST_GRAPH_ID, TEST_GRAPH_ID), graphLibrary.getIds(TEST_GRAPH_ID));
assertEquals(new Pair<>(TEST_GRAPH_ID_1, TEST_GRAPH_ID_1), graphLibrary.getIds(TEST_GRAPH_ID_1));
}
@Test
public void shouldAddAndGetIdsInGraphLibrary() {
// When
graphLibrary.add(TEST_GRAPH_ID, schema, storeProperties);
// Then
assertEquals(new Pair<>(TEST_GRAPH_ID, TEST_GRAPH_ID), graphLibrary.getIds(TEST_GRAPH_ID));
}
@Test
public void shouldThrowExceptionWithInvalidGraphId() {
// When / Then
assertThatIllegalArgumentException().isThrownBy(() -> graphLibrary.add(TEST_GRAPH_ID + "@#", schema, storeProperties)).extracting("message").isNotNull();
}
@Test
public void shouldAddAndGetSchema() {
// When
graphLibrary.addSchema(TEST_SCHEMA_ID, schema);
// Then
JsonAssert.assertEquals(schema.toJson(false), graphLibrary.getSchema(TEST_SCHEMA_ID).toJson(false));
}
@Test
public void shouldNotAddNullSchema() {
// When / Then
try {
graphLibrary.addSchema(null, null);
} catch (final IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Schema cannot be null"));
}
}
@Test
public void shouldThrowExceptionWhenGraphIdWithDifferentSchemaExists() {
// Given
graphLibrary.add(TEST_GRAPH_ID, schema, storeProperties);
Schema tempSchema = new Schema.Builder().edge("testEdge", new SchemaEdgeDefinition()).build();
// When / Then
assertThatExceptionOfType(OverwritingException.class)
.isThrownBy(() -> graphLibrary.add(TEST_GRAPH_ID, tempSchema, storeProperties))
.withMessageContaining("already exists with a different schema");
}
@Test
public void shouldUpdateSchema() {
// Given
graphLibrary.addOrUpdateSchema(TEST_SCHEMA_ID, schema);
Schema tempSchema = new Schema.Builder().edge("testEdge", new SchemaEdgeDefinition()).build();
// Then
JsonAssert.assertEquals(schema.toJson(false), graphLibrary.getSchema(TEST_SCHEMA_ID).toJson(false));
// When
graphLibrary.addOrUpdateSchema(TEST_SCHEMA_ID, tempSchema);
// Then
JsonAssert.assertEquals(tempSchema.toJson(false), graphLibrary.getSchema(TEST_SCHEMA_ID).toJson(false));
}
@Test
public void shouldAddAndGetProperties() {
// When
graphLibrary.addProperties(TEST_PROPERTIES_ID, storeProperties);
// Then
assertEquals(storeProperties, graphLibrary.getProperties(TEST_PROPERTIES_ID));
}
@Test
public void shouldNotAddNullProperties() {
// When / Then
try {
graphLibrary.addProperties(null, null);
} catch (final IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Store properties cannot be null"));
}
}
@Test
public void shouldThrowExceptionWhenGraphIdWithDifferentPropertiesExists() {
// Given
graphLibrary.add(TEST_GRAPH_ID, schema, storeProperties);
StoreProperties tempStoreProperties = storeProperties.clone();
tempStoreProperties.set("testKey", "testValue");
// When / Then
assertThatExceptionOfType(Exception.class)
.isThrownBy(() -> graphLibrary.add(TEST_GRAPH_ID, schema, tempStoreProperties))
.withMessageContaining("already exists with a different store properties");
}
@Test
public void shouldUpdateStoreProperties() {
// Given
graphLibrary.addOrUpdateProperties(TEST_PROPERTIES_ID, storeProperties);
StoreProperties tempStoreProperties = storeProperties.clone();
tempStoreProperties.set("testKey", "testValue");
// Then
assertEquals(storeProperties.getProperties(), graphLibrary.getProperties(TEST_PROPERTIES_ID).getProperties());
// When
graphLibrary.addOrUpdateProperties(TEST_PROPERTIES_ID, tempStoreProperties);
// Then
assertEquals(tempStoreProperties.getProperties(), graphLibrary.getProperties(TEST_PROPERTIES_ID).getProperties());
}
@Test
public void shouldNotThrowExceptionWhenGraphIdWithSameSchemaExists() {
// Given
graphLibrary.add(TEST_GRAPH_ID, schema1, storeProperties);
final Schema schema1Clone = schema1.clone();
// When
graphLibrary.checkExisting(TEST_GRAPH_ID, schema1Clone, storeProperties);
// Then - no exceptions
}
@Test
public void shouldNotThrowExceptionWhenGraphIdWithSamePropertiesExists() {
// Given
graphLibrary.add(TEST_GRAPH_ID, schema1, storeProperties);
final StoreProperties storePropertiesClone = storeProperties.clone();
// When
graphLibrary.checkExisting(TEST_GRAPH_ID, schema1, storePropertiesClone);
// Then - no exceptions
}
@Test
public void shouldUpdateWhenGraphIdExists() {
// When
graphLibrary.addOrUpdate(TEST_GRAPH_ID, schema, storeProperties);
// Then
assertEquals(storeProperties, graphLibrary.getProperties(TEST_GRAPH_ID));
// When
graphLibrary.addOrUpdate(TEST_GRAPH_ID, schema, storeProperties1);
// Then
assertEquals(storeProperties1, graphLibrary.getProperties(TEST_GRAPH_ID));
}
@Test
public void shouldReturnNullWhenPropertyIdIsNotFound() {
// When
final StoreProperties unknownStoreProperties = graphLibrary.getProperties(TEST_UNKNOWN_ID);
// Then
assertNull(unknownStoreProperties);
}
@Test
public void shouldReturnNullWhenSchemaIdIsNotFound() {
// When
final Schema unknownSchema = graphLibrary.getSchema(TEST_UNKNOWN_ID);
// Then
assertNull(unknownSchema);
}
@Test
public void shouldThrowExceptionWhenNewStorePropertiesAreAddedWithSamePropertiesIdAndDifferentProperties() {
// Given
final StoreProperties tempStoreProperties = storeProperties.clone();
tempStoreProperties.set("randomKey", "randomValue");
// When
graphLibrary.addProperties(TEST_PROPERTIES_ID, storeProperties);
// Then
assertThatExceptionOfType(OverwritingException.class)
.isThrownBy(() -> graphLibrary.addProperties(TEST_PROPERTIES_ID, tempStoreProperties))
.withMessageContaining("already exists with a different store properties");
}
@Test
public void shouldThrowExceptionWhenNewSchemaIsAddedWithSameSchemaIdAndDifferentSchema() {
// Given
final Schema tempSchema = new Schema.Builder()
.edge(TestGroups.ENTITY, new SchemaEdgeDefinition.Builder()
.build())
.build();
// When
graphLibrary.addSchema(TEST_SCHEMA_ID, schema);
// Then
assertThatExceptionOfType(OverwritingException.class)
.isThrownBy(() -> graphLibrary.addSchema(TEST_SCHEMA_ID, tempSchema))
.withMessageContaining("already exists with a different schema");
}
@Test
public void shouldIgnoreDuplicateAdditionWhenStorePropertiesAreIdentical() {
// Given
final StoreProperties tempStoreProperties = storeProperties.clone();
// When
graphLibrary.addProperties(TEST_PROPERTIES_ID, storeProperties);
graphLibrary.addProperties(TEST_PROPERTIES_ID, tempStoreProperties);
// Then - no exception
}
@Test
public void shouldIgnoreDuplicateAdditionWhenSchemasAreIdentical() {
// Given
final Schema tempSchema = schema.clone();
// When
graphLibrary.addSchema(TEST_SCHEMA_ID, schema);
graphLibrary.addSchema(TEST_SCHEMA_ID, tempSchema);
// Then - no exceptions
}
@Test
public void shouldNotOverwriteSchemaWithClashingName() throws Exception {
final String clashingId = "clashingId";
byte[] entitySchema = new Builder().entity("e1", new SchemaEntityDefinition.Builder().property("p1", "string").build()).type("string", String.class).build().toJson(true);
byte[] edgeSchema = new Builder().edge("e1", new SchemaEdgeDefinition.Builder().property("p1", "string").build()).type("string", String.class).build().toJson(true);
graphLibrary.addSchema(clashingId, Schema.fromJson(entitySchema));
assertThatExceptionOfType(OverwritingException.class)
.isThrownBy(() -> graphLibrary.add("graph", clashingId, Schema.fromJson(edgeSchema), TEST_PROPERTIES_ID, new StoreProperties()))
.withMessageContaining("schemaId clashingId already exists with a different schema");
Schema schemaFromLibrary = graphLibrary.getSchema(clashingId);
assertTrue(JsonUtil.equals(entitySchema, schemaFromLibrary.toJson(true)));
assertFalse(JsonUtil.equals(schemaFromLibrary.toJson(true), edgeSchema));
}
@Test
public void shouldNotOverwriteStorePropertiesWithClashingName() throws Exception {
final String clashingId = "clashingId";
StoreProperties propsA = new StoreProperties();
propsA.set("a", "a");
StoreProperties propsB = new StoreProperties();
propsB.set("b", "b");
graphLibrary.addProperties(clashingId, propsA);
assertThatExceptionOfType(OverwritingException.class)
.isThrownBy(() -> graphLibrary.add("graph", TEST_SCHEMA_ID, new Schema(), clashingId, propsB))
.withMessageContaining("propertiesId clashingId already exists with a different store properties");
StoreProperties storePropertiesFromLibrary = graphLibrary.getProperties(clashingId);
assertEquals(propsA.getProperties(), storePropertiesFromLibrary.getProperties());
assertNotEquals(propsB.getProperties(), storePropertiesFromLibrary.getProperties());
}
@Test
public void shouldThrowExceptionWhenAddingAFullLibraryWithNullSchema() throws Exception {
assertThatIllegalArgumentException().isThrownBy(() -> graphLibrary.add(TEST_GRAPH_ID, null, storeProperties)).withMessage(String.format(GraphLibrary.A_GRAPH_LIBRARY_CAN_T_BE_ADDED_WITH_A_NULL_S_GRAPH_ID_S, Schema.class.getSimpleName(), TEST_GRAPH_ID));
}
@Test
public void shouldThrowExceptionWhenAddingAFullLibraryWithNullStoreProperties() throws Exception {
assertThatIllegalArgumentException().isThrownBy(() -> graphLibrary.add(TEST_GRAPH_ID, schema, null)).withMessage(String.format(GraphLibrary.A_GRAPH_LIBRARY_CAN_T_BE_ADDED_WITH_A_NULL_S_GRAPH_ID_S, StoreProperties.class.getSimpleName(), TEST_GRAPH_ID));
}
@Test
public void shouldThrowExceptionWhenAddingAFullLibraryWithNullSchemaAndStoreProperties() throws Exception {
assertThatIllegalArgumentException().isThrownBy(() -> graphLibrary.add(TEST_GRAPH_ID, null, null)).withMessage(String.format(GraphLibrary.A_GRAPH_LIBRARY_CAN_T_BE_ADDED_WITH_A_NULL_S_GRAPH_ID_S, Schema.class.getSimpleName() + " and " + StoreProperties.class.getSimpleName(), TEST_GRAPH_ID));
}
}
| |
package jenkins.model;
import hudson.Functions;
import hudson.init.InitMilestone;
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Ignore;
import org.junit.Test;
import org.jvnet.hudson.test.Issue;
import org.jvnet.hudson.test.LoggerRule;
import org.jvnet.hudson.test.MockFolder;
import org.jvnet.hudson.test.RestartableJenkinsRule;
import org.jvnet.hudson.test.recipes.LocalData;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import java.util.stream.Stream;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
/**
* Since JENKINS-50164, Jenkins#workspacesDir and Jenkins#buildsDir had their associated UI deleted.
* So instead of configuring through the UI, we now have to use sysprops for this.
* <p>
* So this test class uses a {@link RestartableJenkinsRule} to check the behaviour of this sysprop being
* present or not between two restarts.
*/
public class JenkinsBuildsAndWorkspacesDirectoriesTest {
private static final String LOG_WHEN_CHANGING_BUILDS_DIR = "Changing builds directories from ";
private static final String LOG_WHEN_CHANGING_WORKSPACES_DIR = "Changing workspaces directories from ";
@Rule
public RestartableJenkinsRule story = new RestartableJenkinsRule();
@Rule
public LoggerRule loggerRule = new LoggerRule();
@Before
public void before() {
clearSystemProperties();
}
@After
public void after() {
clearSystemProperties();
}
private void clearSystemProperties() {
Stream.of(Jenkins.BUILDS_DIR_PROP, Jenkins.WORKSPACES_DIR_PROP)
.forEach(System::clearProperty);
}
@Issue("JENKINS-53284")
@Test
public void changeWorkspacesDirLog() throws Exception {
loggerRule.record(Jenkins.class, Level.WARNING)
.record(Jenkins.class, Level.INFO).capture(1000);
story.then(step -> {
assertFalse(logWasFound(LOG_WHEN_CHANGING_WORKSPACES_DIR));
setWorkspacesDirProperty("testdir1");
});
story.then(step -> {
assertTrue(logWasFoundAtLevel(LOG_WHEN_CHANGING_WORKSPACES_DIR,
Level.WARNING));
setWorkspacesDirProperty("testdir2");
});
story.then(step -> {
assertTrue(logWasFoundAtLevel(LOG_WHEN_CHANGING_WORKSPACES_DIR,
Level.WARNING));
});
}
@Issue("JENKINS-50164")
@Test
public void badValueForBuildsDir() {
story.then(rule -> {
final List<String> badValues = new ArrayList<>(Arrays.asList(
"blah",
"$JENKINS_HOME",
"$JENKINS_HOME/builds",
"$ITEM_FULL_NAME",
"/path/to/builds",
"/invalid/$JENKINS_HOME",
"relative/ITEM_FULL_NAME"));
if (!new File("/").canWrite()) {
badValues.add("/foo/$ITEM_FULL_NAME");
badValues.add("/$ITEM_FULLNAME");
} // else perhaps running as root
for (String badValue : badValues) {
try {
Jenkins.checkRawBuildsDir(badValue);
fail(badValue + " should have been rejected");
} catch (InvalidBuildsDir invalidBuildsDir) {
// expected failure
}
}
});
}
@Issue("JENKINS-50164")
@Test
public void goodValueForBuildsDir() {
story.then(rule -> {
final List<String> badValues = Arrays.asList(
"$JENKINS_HOME/foo/$ITEM_FULL_NAME",
"${ITEM_ROOTDIR}/builds");
for (String goodValue : badValues) {
Jenkins.checkRawBuildsDir(goodValue);
}
});
}
@Issue("JENKINS-50164")
@Test
public void jenkinsDoesNotStartWithBadSysProp() {
loggerRule.record(Jenkins.class, Level.WARNING)
.record(Jenkins.class, Level.INFO)
.capture(100);
story.then(rule -> {
assertTrue(story.j.getInstance().isDefaultBuildDir());
setBuildsDirProperty("/bluh");
});
story.thenDoesNotStart();
}
@Issue("JENKINS-50164")
@Test
public void jenkinsDoesNotStartWithScrewedUpConfigXml() {
loggerRule.record(Jenkins.class, Level.WARNING)
.record(Jenkins.class, Level.INFO)
.capture(100);
story.then(rule -> {
assertTrue(story.j.getInstance().isDefaultBuildDir());
// Now screw up the value by writing into the file directly, like one could do using external XML manipulation tools
final File configFile = new File(rule.jenkins.getRootDir(), "config.xml");
final String screwedUp = FileUtils.readFileToString(configFile).
replaceFirst("<buildsDir>.*</buildsDir>", "<buildsDir>eeeeeeeeek</buildsDir>");
FileUtils.write(configFile, screwedUp);
});
story.thenDoesNotStart();
}
@Issue("JENKINS-50164")
@Test
public void buildsDir() throws Exception {
loggerRule.record(Jenkins.class, Level.WARNING)
.record(Jenkins.class, Level.INFO)
.capture(100);
story.then(step -> {
assertFalse(logWasFound("Using non default builds directories"));
}
);
story.then(steps -> {
assertTrue(story.j.getInstance().isDefaultBuildDir());
setBuildsDirProperty("$JENKINS_HOME/plouf/$ITEM_FULL_NAME/bluh");
assertFalse(JenkinsBuildsAndWorkspacesDirectoriesTest.this.logWasFound(LOG_WHEN_CHANGING_BUILDS_DIR));
});
story.then(step -> {
assertFalse(story.j.getInstance().isDefaultBuildDir());
assertEquals("$JENKINS_HOME/plouf/$ITEM_FULL_NAME/bluh", story.j.getInstance().getRawBuildsDir());
assertTrue(logWasFound("Changing builds directories from "));
}
);
story.then(step -> assertTrue(logWasFound("Using non default builds directories"))
);
}
@Issue("JENKINS-50164")
@Test
public void workspacesDir() throws Exception {
loggerRule.record(Jenkins.class, Level.WARNING)
.record(Jenkins.class, Level.INFO)
.capture(1000);
story.then(step -> assertFalse(logWasFound("Using non default workspaces directories")));
story.then(step -> {
assertTrue(story.j.getInstance().isDefaultWorkspaceDir());
final String workspacesDir = "bluh";
setWorkspacesDirProperty(workspacesDir);
assertFalse(logWasFound("Changing workspaces directories from "));
});
story.then(step -> {
assertFalse(story.j.getInstance().isDefaultWorkspaceDir());
assertEquals("bluh", story.j.getInstance().getRawWorkspaceDir());
assertTrue(logWasFound("Changing workspaces directories from "));
});
story.then(step -> {
assertFalse(story.j.getInstance().isDefaultWorkspaceDir());
assertTrue(logWasFound("Using non default workspaces directories"));
}
);
}
@Ignore("TODO calling restart seems to break Surefire")
@Issue("JENKINS-50164")
@LocalData
@Test
public void fromPreviousCustomSetup() {
assumeFalse("Default Windows lifecycle does not support restart.", Functions.isWindows());
// check starting point and change config for next run
final String newBuildsDirValueBySysprop = "/tmp/${ITEM_ROOTDIR}/bluh";
story.then(j -> {
assertEquals("${ITEM_ROOTDIR}/ze-previous-custom-builds", j.jenkins.getRawBuildsDir());
setBuildsDirProperty(newBuildsDirValueBySysprop);
});
// Check the sysprop setting was taken in account
story.then(j -> {
assertEquals(newBuildsDirValueBySysprop, j.jenkins.getRawBuildsDir());
// ** HACK AROUND JENKINS-50422: manually restarting ** //
// Check the disk (cannot just restart normally with the rule, )
assertThat(FileUtils.readFileToString(new File(j.jenkins.getRootDir(), "config.xml")),
containsString("<buildsDir>" + newBuildsDirValueBySysprop + "</buildsDir>"));
String rootDirBeforeRestart = j.jenkins.getRootDir().toString();
clearSystemProperties();
j.jenkins.restart();
int maxLoops = 50;
while (j.jenkins.getInitLevel() != InitMilestone.COMPLETED && maxLoops-- > 0) {
Thread.sleep(300);
}
assertEquals(rootDirBeforeRestart, j.jenkins.getRootDir().toString());
assertThat(FileUtils.readFileToString(new File(j.jenkins.getRootDir(), "config.xml")),
containsString("<buildsDir>" + newBuildsDirValueBySysprop + "</buildsDir>"));
assertEquals(newBuildsDirValueBySysprop, j.jenkins.getRawBuildsDir());
// ** END HACK ** //
});
}
private void setWorkspacesDirProperty(String workspacesDir) {
System.setProperty(Jenkins.WORKSPACES_DIR_PROP, workspacesDir);
}
private void setBuildsDirProperty(String buildsDir) {
System.setProperty(Jenkins.BUILDS_DIR_PROP, buildsDir);
}
private boolean logWasFound(String searched) {
return loggerRule.getRecords().stream()
.anyMatch(record -> record.getMessage().contains(searched));
}
private boolean logWasFoundAtLevel(String searched, Level level) {
return loggerRule.getRecords().stream()
.filter(record -> record.getMessage().contains(searched)).anyMatch(record -> record.getLevel().equals(level));
}
@Test
@Issue("JENKINS-17138")
public void externalBuildDirectoryRenameDelete() throws Exception {
// Hack to get String builds usable in lambda below
final List<String> builds = new ArrayList<>();
story.then(steps -> {
builds.add(story.j.createTmpDir().toString());
assertTrue(story.j.getInstance().isDefaultBuildDir());
setBuildsDirProperty(builds.get(0) + "/${ITEM_FULL_NAME}");
});
story.then(steps -> {
assertEquals(builds.get(0) + "/${ITEM_FULL_NAME}", story.j.jenkins.getRawBuildsDir());
FreeStyleProject p = story.j.jenkins.createProject(MockFolder.class, "d").createProject(FreeStyleProject.class, "prj");
FreeStyleBuild b = p.scheduleBuild2(0).get();
File oldBuildDir = new File(builds.get(0), "d/prj");
assertEquals(new File(oldBuildDir, b.getId()), b.getRootDir());
assertTrue(b.getRootDir().isDirectory());
p.renameTo("proj");
File newBuildDir = new File(builds.get(0), "d/proj");
assertEquals(new File(newBuildDir, b.getId()), b.getRootDir());
assertTrue(b.getRootDir().isDirectory());
p.delete();
assertFalse(b.getRootDir().isDirectory());
});
}
}
| |
/*
* Copyright (c) 2008, The Codehaus. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.codehaus.httpcache4j.cache;
import org.codehaus.httpcache4j.*;
import org.codehaus.httpcache4j.resolver.ResponseResolver;
import org.codehaus.httpcache4j.uri.URIBuilder;
import org.codehaus.httpcache4j.util.OptionalUtils;
import java.io.IOException;
import java.net.URI;
import java.time.LocalDateTime;
import java.util.Objects;
import java.util.Optional;
/**
* The main HTTPCache class.
*
* @author <a href="mailto:hamnis@codehaus.org">Erlend Hamnaberg</a>
*/
public class HTTPCache {
private final HTTPCacheHelper helper;
private final CacheStatistics statistics = new CacheStatistics();
private final CacheStorage storage;
private final ResponseResolver resolver;
private final Mutex<URI> mutex = new Mutex<>();
private boolean translateHEADToGET = false;
public HTTPCache(CacheStorage storage, ResponseResolver resolver) {
this.storage = Objects.requireNonNull(storage, "Cache storage may not be null");
this.resolver = Objects.requireNonNull(resolver, "Resolver may not be null");
helper = new HTTPCacheHelper(CacheHeaderBuilder.getBuilder());
}
public void clear() {
storage.clear();
}
public CacheStorage getStorage() {
return storage;
}
public ResponseResolver getResolver() {
return resolver;
}
public CacheStatistics getStatistics() {
return statistics;
}
public HTTPResponse execute(final HTTPRequest request) {
return execute(request, helper.isEndToEndReloadRequest(request));
}
public HTTPResponse executeRefresh(final HTTPRequest request) {
return execute(request, true);
}
public void shutdown() {
storage.shutdown();
resolver.shutdown();
}
private HTTPResponse execute(final HTTPRequest request, boolean force) {
HTTPResponse response;
if (!helper.isCacheableRequest(request)) {
response = unconditionalResolve(request);
} else {
//request is cacheable
boolean shouldUnlock = true;
try {
force = force || request.getMethod() == HTTPMethod.OPTIONS || request.getMethod() == HTTPMethod.TRACE;
if (mutex.acquire(request.getNormalizedURI())) {
response = doRequest(request, force || (OptionalUtils.exists(request.getHeaders().getCacheControl(), CacheControl::isNoStore)));
} else {
response = new HTTPResponse(null, Status.BAD_GATEWAY, new Headers());
shouldUnlock = false;
}
} finally {
if (shouldUnlock) {
mutex.release(request.getNormalizedURI());
}
}
}
if (response == null) {
throw new HTTPException("No response produced");
}
return response;
}
private HTTPResponse doRequest(HTTPRequest request, final boolean force) {
if (request.getMethod() == HTTPMethod.HEAD && isTranslateHEADToGET()) {
request = request.withMethod(HTTPMethod.GET);
}
HTTPResponse response;
if (force) {
response = unconditionalResolve(request);
} else {
response = getFromStorage(request);
}
return response;
}
private HTTPResponse getFromStorage(HTTPRequest request) {
final LocalDateTime requestTime = LocalDateTime.now();
HTTPResponse response;
final CacheItem item = storage.get(request);
if (item != null) {
statistics.hit();
final HTTPResponse cachedResponse = item.getResponse();
boolean mustRevalidate = cachedResponse.getHeaders().getCacheControl().orElse(CacheControl.empty()).isMustRevalidate();
if (mustRevalidate || item.isStale(requestTime)) {
//If the cached value is stale, execute the request and try to cache it.
//If the payload has been deleted for some reason, we want to do a unconditional GET
HTTPRequest conditionalRequest = maybePrepareConditionalResponse(request, cachedResponse);
response = handleStaleResponse(conditionalRequest, request, item, requestTime);
} else {
response = helper.rewriteResponse(request, cachedResponse, item.getAge(requestTime));
}
} else {
statistics.miss();
response = unconditionalResolve(request);
}
return response;
}
private HTTPResponse handleStaleResponse(HTTPRequest conditionalRequest, HTTPRequest originalRequest, CacheItem item, LocalDateTime requestTime) {
long age = item.getAge(LocalDateTime.now());
if (!helper.allowStale(item, originalRequest, requestTime)) {
HTTPResponse response = executeImpl(conditionalRequest, item);
return helper.rewriteResponse(originalRequest, response, age);
}
return helper.rewriteStaleResponse(originalRequest, item.getResponse(), age);
}
private HTTPRequest maybePrepareConditionalResponse(HTTPRequest request, HTTPResponse staleResponse) {
if (!staleResponse.hasPayload() || staleResponse.getPayload().get().isAvailable()) {
return helper.prepareConditionalGETRequest(request, staleResponse);
}
return request.headers(request.getHeaders().withConditionals(new Conditionals()));
}
private HTTPResponse unconditionalResolve(final HTTPRequest request) {
return helper.rewriteResponse(request, executeImpl(request, null), -1);
}
private HTTPResponse executeImpl(final HTTPRequest request, final CacheItem item) {
HTTPResponse response = null;
HTTPResponse resolvedResponse = null;
try {
resolvedResponse = resolver.resolve(request);
} catch (IOException e) {
//No cached item found, we throw an exception.
if (item == null) {
throw new HTTPException(e);
} else {
Headers headers = helper.warn(item.getResponse().getHeaders(), e);
response = item.getResponse().withHeaders(headers);
}
}
if (resolvedResponse != null) {
boolean updated = false;
if (isInvalidating(request, resolvedResponse, item)) {
response = resolvedResponse;
URI requestUri = request.getNormalizedURI();
storage.invalidate(requestUri);
updated = true;
if (!request.getMethod().isSafe()) {
// http://tools.ietf.org/html/rfc2616#section-13.10
invalidateIfSameHostAsRequest(resolvedResponse.getHeaders().getLocation(), requestUri);
invalidateIfSameHostAsRequest(resolvedResponse.getHeaders().getContentLocation(), requestUri);
}
}
else if (helper.isCacheableResponse(resolvedResponse) && helper.shouldBeStored(resolvedResponse)) {
response = storage.insert(request, resolvedResponse);
updated = true;
} else {
//Response could not be cached
response = resolvedResponse;
}
if (item != null) {
//from http://tools.ietf.org/html/rfc2616#section-13.5.3
if (resolvedResponse.getStatus() == Status.NOT_MODIFIED || resolvedResponse.getStatus() == Status.PARTIAL_CONTENT) {
response = updateHeadersFromResolved(request, item, resolvedResponse);
} else if (updated) {
Headers newHeaders = response.getHeaders().add(CacheHeaderBuilder.getBuilder().createMISSXCacheHeader());
response = response.withHeaders(newHeaders);
}
}
}
return response;
}
//http://tools.ietf.org/html/rfc2616#section-9.4
private boolean isInvalidatingHEADResponse(HTTPRequest request, CacheItem item, HTTPResponse resolvedResponse) {
return request.getMethod() == HTTPMethod.HEAD && item != null && resolvedResponse.getStatus() != Status.NOT_MODIFIED;
}
private boolean isInvalidating(HTTPRequest request, HTTPResponse resolvedResponse, CacheItem item) {
boolean invalidatingHEAD = isInvalidatingHEADResponse(request, item, resolvedResponse);
boolean unsafe = !request.getMethod().isSafe() && isSuccessfulResponseToUnsafeRequest(resolvedResponse);
return unsafe || invalidatingHEAD;
}
//http://tools.ietf.org/html/draft-ietf-httpbis-p6-cache-22#section-6
private boolean isSuccessfulResponseToUnsafeRequest(HTTPResponse resolvedResponse) {
Status.Category category = resolvedResponse.getStatus().getCategory();
return category == Status.Category.SUCCESS || category == Status.Category.REDIRECTION;
}
HTTPResponse updateHeadersFromResolved(final HTTPRequest request, final CacheItem item, final HTTPResponse resolvedResponse) {
HTTPResponse cachedResponse = item.getResponse();
Headers headers = new Headers(cachedResponse.getHeaders());
Headers headersToBeSet = helper.removeUnmodifiableHeaders(resolvedResponse.getHeaders());
HTTPResponse updatedResponse = cachedResponse.withHeaders(headers.set(headersToBeSet));
return storage.update(request, updatedResponse);
}
public boolean isTranslateHEADToGET() {
return translateHEADToGET;
}
public void setTranslateHEADToGET(boolean translateHEADToGET) {
this.translateHEADToGET = translateHEADToGET;
}
private void invalidateIfSameHostAsRequest(Optional<URI> uri, URI requestUri) {
if (uri.isPresent() && uri.get().getHost() != null && uri.get().getHost().equals(requestUri.getHost())) {
storage.invalidate(URIBuilder.fromURI(uri.get()).toNormalizedURI());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.objectinspector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ListStructObjectInspector works on struct data that is stored as a Java List
* or Java Array object. Basically, the fields are stored sequentially in the
* List object.
*
* The names of the struct fields and the internal structure of the struct
* fields are specified in the ctor of the StructObjectInspector.
*
* Always use the ObjectInspectorFactory to create new ObjectInspector objects,
* instead of directly creating an instance of this class.
*/
public class StandardStructObjectInspector extends
SettableStructObjectInspector {
public static final Logger LOG = LoggerFactory
.getLogger(StandardStructObjectInspector.class.getName());
protected static class MyField implements StructField {
protected int fieldID;
protected String fieldName;
protected ObjectInspector fieldObjectInspector;
protected String fieldComment;
protected MyField() {
super();
}
public MyField(int fieldID, String fieldName,
ObjectInspector fieldObjectInspector) {
this.fieldID = fieldID;
this.fieldName = fieldName.toLowerCase().intern();
this.fieldObjectInspector = fieldObjectInspector;
}
public MyField(int fieldID, String fieldName,
ObjectInspector fieldObjectInspector, String fieldComment) {
this(fieldID, fieldName, fieldObjectInspector);
this.fieldComment = fieldComment;
}
public int getFieldID() {
return fieldID;
}
public String getFieldName() {
return fieldName;
}
public ObjectInspector getFieldObjectInspector() {
return fieldObjectInspector;
}
public String getFieldComment() {
return fieldComment;
}
@Override
public String toString() {
return "" + fieldID + ":" + fieldName;
}
}
protected List<MyField> fields;
protected transient List<String> originalColumnNames;
protected StandardStructObjectInspector() {
super();
}
/**
* Call ObjectInspectorFactory.getStandardListObjectInspector instead.
*/
protected StandardStructObjectInspector(List<String> structFieldNames,
List<ObjectInspector> structFieldObjectInspectors) {
init(structFieldNames, structFieldObjectInspectors, null);
}
/**
* Call ObjectInspectorFactory.getStandardListObjectInspector instead.
*/
protected StandardStructObjectInspector(List<String> structFieldNames,
List<ObjectInspector> structFieldObjectInspectors,
List<String> structFieldComments) {
init(structFieldNames, structFieldObjectInspectors, structFieldComments);
}
protected void init(List<String> structFieldNames,
List<ObjectInspector> structFieldObjectInspectors,
List<String> structFieldComments) {
fields = new ArrayList<MyField>(structFieldNames.size());
originalColumnNames = new ArrayList<String>(structFieldNames.size());
for (int i = 0; i < structFieldNames.size(); i++) {
fields.add(new MyField(i, structFieldNames.get(i),
structFieldObjectInspectors.get(i),
structFieldComments == null ? null : structFieldComments.get(i)));
originalColumnNames.add(structFieldNames.get(i));
}
}
protected StandardStructObjectInspector(List<StructField> fields) {
init(fields);
}
protected void init(List<StructField> fields) {
this.fields = new ArrayList<MyField>(fields.size());
this.originalColumnNames = new ArrayList<String>(fields.size());
for (int i = 0; i < fields.size(); i++) {
this.fields.add(new MyField(i, fields.get(i).getFieldName(), fields
.get(i).getFieldObjectInspector()));
this.originalColumnNames.add(fields.get(i).getFieldName());
}
}
public String getTypeName() {
return ObjectInspectorUtils.getStandardStructTypeName(this);
}
public final Category getCategory() {
return Category.STRUCT;
}
// Without Data
@Override
public StructField getStructFieldRef(String fieldName) {
return ObjectInspectorUtils.getStandardStructFieldRef(fieldName, fields);
}
@Override
public List<? extends StructField> getAllStructFieldRefs() {
return fields;
}
boolean warned = false;
// With Data
@Override
@SuppressWarnings("unchecked")
public Object getStructFieldData(Object data, StructField fieldRef) {
if (data == null) {
return null;
}
// We support both List<Object> and Object[]
// so we have to do differently.
boolean isArray = data.getClass().isArray();
if (!isArray && !(data instanceof List)) {
if (!warned) {
LOG.warn("Invalid type for struct " + data.getClass());
LOG.warn("ignoring similar errors.");
warned = true;
}
return data;
}
int listSize = (isArray ? ((Object[]) data).length : ((List<Object>) data)
.size());
MyField f = (MyField) fieldRef;
if (fields.size() != listSize && !warned) {
// TODO: remove this
warned = true;
LOG.warn("Trying to access " + fields.size()
+ " fields inside a list of " + listSize + " elements: "
+ (isArray ? Arrays.asList((Object[]) data) : (List<Object>) data));
LOG.warn("ignoring similar errors.");
}
int fieldID = f.getFieldID();
if (fieldID >= listSize) {
return null;
} else if (isArray) {
return ((Object[]) data)[fieldID];
} else {
return ((List<Object>) data).get(fieldID);
}
}
@Override
@SuppressWarnings("unchecked")
public List<Object> getStructFieldsDataAsList(Object data) {
if (data == null) {
return null;
}
// We support both List<Object> and Object[]
// so we have to do differently.
if (! (data instanceof List)) {
data = java.util.Arrays.asList((Object[]) data);
}
List<Object> list = (List<Object>) data;
return list;
}
public List<String> getOriginalColumnNames() {
return originalColumnNames;
}
// /////////////////////////////
// SettableStructObjectInspector
@Override
public Object create() {
ArrayList<Object> a = new ArrayList<Object>(fields.size());
for (int i = 0; i < fields.size(); i++) {
a.add(null);
}
return a;
}
@Override
public Object setStructFieldData(Object struct, StructField field,
Object fieldValue) {
ArrayList<Object> a = (ArrayList<Object>) struct;
MyField myField = (MyField) field;
a.set(myField.fieldID, fieldValue);
return a;
}
}
| |
/*
* Copyright (c) 2008 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package nu.validator.collections;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.SortedSet;
public final class HeadBiasedSortedSet<E> extends AbstractSet<E> implements
SortedSet<E> {
private final class Node<F> {
public final F value;
public Node<F> next;
/**
* @param value
* @param next
*/
public Node(F value, Node<F> next) {
this.value = value;
this.next = next;
}
}
private final class IteratorImpl implements Iterator<E> {
private Node<E> next;
/**
* @param next
*/
IteratorImpl(Node<E> head) {
this.next = head;
}
public boolean hasNext() {
return next != null;
}
public E next() {
if (next == null) {
throw new NoSuchElementException();
}
E rv = next.value;
next = next.next;
return rv;
}
public void remove() {
throw new UnsupportedOperationException();
}
}
private final Comparator<? super E> comparator;
private final Node<E> head = new Node<E>(null, null);
private int size = 0;
/**
* @param comparator
*/
public HeadBiasedSortedSet(Comparator<? super E> comparator) {
this.comparator = comparator;
}
public HeadBiasedSortedSet() {
this.comparator = null;
}
public HeadBiasedSortedSet(SortedSet<E> set) {
this.comparator = set.comparator();
// XXX this is very inefficient
for (E e : set) {
this.add(e);
}
}
public HeadBiasedSortedSet(Collection<? extends E> collection) {
this.comparator = null;
// XXX this is very inefficient
for (E e : collection) {
this.add(e);
}
}
public Comparator<? super E> comparator() {
return comparator;
}
@Override
public Iterator<E> iterator() {
return new IteratorImpl(head.next);
}
@Override
public int size() {
return size;
}
public E first() {
Node<E> first = head.next;
if (first == null) {
throw new NoSuchElementException();
} else {
return first.value;
}
}
public SortedSet<E> headSet(E toElement) {
throw new UnsupportedOperationException();
}
public E last() {
Node<E> first = head.next;
if (first == null) {
throw new NoSuchElementException();
} else {
Node<E> prev = first;
while(prev.next != null) {
prev = prev.next;
}
return prev.value;
}
}
public SortedSet<E> subSet(E fromElement, E toElement) {
throw new UnsupportedOperationException();
}
public SortedSet<E> tailSet(E fromElement) {
throw new UnsupportedOperationException();
}
/**
* @see java.util.AbstractCollection#add(java.lang.Object)
*/
@Override
public boolean add(E o) {
Node<E> prev = head;
while (prev.next != null) {
int comp = compare(o, prev.next.value);
if (comp < 0) {
prev.next = new Node<E>(o, prev.next);
size++;
return true;
} else if (comp == 0) {
return false;
}
prev = prev.next;
}
// if we haven't returned yet, this is greater than
prev.next = new Node<E>(o, null);
size++;
return true;
}
private int compare(E one, E other) {
if (comparator == null) {
return ((Comparable<E>) one).compareTo(other);
} else {
return comparator.compare(one, other);
}
}
/**
* @see java.util.AbstractCollection#clear()
*/
@Override
public void clear() {
size = 0;
head.next = null;
}
}
| |
package de.fau.cs.mad.kwikshop.android.model.synchronization;
import android.content.Context;
import android.util.Log;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import javax.inject.Inject;
import de.fau.cs.mad.kwikshop.android.R;
import de.fau.cs.mad.kwikshop.android.model.interfaces.SimpleStorage;
import de.fau.cs.mad.kwikshop.android.restclient.LeaseResource;
import de.fau.cs.mad.kwikshop.android.restclient.RestClientFactory;
import de.fau.cs.mad.kwikshop.common.ArgumentNullException;
import de.fau.cs.mad.kwikshop.android.model.SessionHandler;
import de.fau.cs.mad.kwikshop.android.model.messages.SynchronizationEvent;
import de.fau.cs.mad.kwikshop.android.util.SharedPreferencesHelper;
import de.fau.cs.mad.kwikshop.android.viewmodel.common.ResourceProvider;
import de.fau.cs.mad.kwikshop.common.Recipe;
import de.fau.cs.mad.kwikshop.common.RecipeServer;
import de.fau.cs.mad.kwikshop.common.ShoppingList;
import de.fau.cs.mad.kwikshop.common.ShoppingListServer;
import de.fau.cs.mad.kwikshop.common.SynchronizationLease;
import de.fau.cs.mad.kwikshop.common.sorting.BoughtItem;
import de.fau.cs.mad.kwikshop.common.sorting.ItemOrderWrapper;
import de.greenrobot.event.EventBus;
import retrofit.RetrofitError;
/**
* Central class coordinating syncing of all synced data
*/
public class CompositeSynchronizer {
private final ConditionalSyncDataResetter syncDataResetter;
private final ListSynchronizer<ShoppingList, ShoppingListServer> shoppingListSynchronizer;
private final ListSynchronizer<Recipe, RecipeServer> recipeSynchronizer;
private final RestClientFactory restClientFactory;
private final ResourceProvider resourceProvider;
private final Context context;
private final SimpleStorage<BoughtItem> boughtItemStorage;
@Inject
public CompositeSynchronizer(ConditionalSyncDataResetter syncDataResetter,
ListSynchronizer<ShoppingList, ShoppingListServer> shoppingListSynchronizer,
ListSynchronizer<Recipe, RecipeServer> recipeSynchronizer,
SimpleStorage<BoughtItem> boughtItemStorage,
RestClientFactory restClientFactory,
ResourceProvider resourceProvider,
Context context) {
if(syncDataResetter == null) {
throw new ArgumentNullException("syncDataResetter");
}
if(shoppingListSynchronizer == null) {
throw new ArgumentNullException("shoppingListSynchronizer");
}
if(recipeSynchronizer == null) {
throw new ArgumentNullException("recipeSynchronizer");
}
if(boughtItemStorage == null) {
throw new ArgumentNullException("boughtItemStorage");
}
if(restClientFactory == null) {
throw new ArgumentNullException("restClientFactory");
}
if(resourceProvider == null) {
throw new ArgumentNullException("resourceProvider");
}
if(context == null) {
throw new ArgumentNullException("context");
}
this.syncDataResetter = syncDataResetter;
this.shoppingListSynchronizer = shoppingListSynchronizer;
this.recipeSynchronizer = recipeSynchronizer;
this.boughtItemStorage = boughtItemStorage;
this.restClientFactory = restClientFactory;
this.resourceProvider = resourceProvider;
this.context = context;
}
public void synchronize() {
//check if the user is logged in. otherwise we cannot sync
Context applicationContext = context.getApplicationContext();
if(!SessionHandler.isAuthenticated(applicationContext)) {
return;
}
//check if synchronization is even enabled
if(!SharedPreferencesHelper.loadBoolean(SharedPreferencesHelper.ENABLE_SYNCHRONIZATION, true, context)) {
return;
}
// start synchronization
post(SynchronizationEvent.CreateStartedMessage());
// reset all local server data if the used server or user has changed
syncDataResetter.resetSyncDataIfNecessary();
// get a synchronization lease
LeaseResource leaseCLient = restClientFactory.getLeaseClient();
post(SynchronizationEvent.CreateProgressMessage(resourceProvider.getString(R.string.aquiring_lease)));
SynchronizationLease[] lease = new SynchronizationLease[1];
try {
lease[0] = leaseCLient.getSynchronizationLeaseSynchronously();
} catch (RetrofitError ex) {
post(SynchronizationEvent.CreateFailedMessage(resourceProvider.getString(R.string.error_aquiring_lease)));
return;
}
// update the lease in the background
UpdateLeaseRunnable updateLeaseRunnable = new UpdateLeaseRunnable(leaseCLient, lease);
Thread updateLeaseThread = new Thread(updateLeaseRunnable);
updateLeaseThread.start();
// synchronize shopping lists and recipes
boolean success = synchronizeShoppingLists() && synchronizeRecipes();
sendBoughtItems();
// stop updating the lease
updateLeaseRunnable.cancel();
updateLeaseThread.interrupt();
try {
leaseCLient.removeSynchronizationLeaseSynchronously(lease[0].getId());
} catch (RetrofitError ex) {
// error while deleting lease can be ignore
}
if(success) {
post(SynchronizationEvent.CreateCompletedMessage());
}
}
private void sendBoughtItems() {
try {
List<BoughtItem> syncableBoughtItems = new ArrayList<>();
for(BoughtItem boughtItem: boughtItemStorage.getItems()) {
if(boughtItem.isSync()) {
syncableBoughtItems.add(boughtItem);
boughtItemStorage.deleteSingleItem(boughtItem);
}
}
if(syncableBoughtItems.size() == 0)
return;
// Sort the List by Date
Collections.sort(syncableBoughtItems);
ItemOrderWrapper itemOrderWrapper = new ItemOrderWrapper(syncableBoughtItems);
restClientFactory.getShoppingListClient().postItemOrder(itemOrderWrapper);
} catch (Exception e) {
}
}
private boolean synchronizeShoppingLists() {
post(SynchronizationEvent.CreateProgressMessage(resourceProvider.getString(R.string.synchronizing_shoppingLists)));
try {
shoppingListSynchronizer.synchronize();
} catch (Exception ex) {
Log.e("KwikShop-Sync", "Exception in ShoppingList synchronization", ex);
String message = String.format("%s\n\n%s", resourceProvider.getString(R.string.error_synchronizing_shoppingLists), ex.toString());
post(SynchronizationEvent.CreateFailedMessage(message));
return false;
}
return true;
}
private boolean synchronizeRecipes() {
post(SynchronizationEvent.CreateProgressMessage(resourceProvider.getString(R.string.synchronizing_recipes)));
try {
recipeSynchronizer.synchronize();
} catch (Exception ex) {
Log.e("KwikShop-Sync", "Exception in Recipe synchronization", ex);
String message = String.format("%s\n\n%s", resourceProvider.getString(R.string.error_synchronizing_recipes), ex.toString());
post(SynchronizationEvent.CreateFailedMessage(message));
return false;
}
return true;
}
private void post(SynchronizationEvent event) {
EventBus.getDefault().post(event);
}
private class UpdateLeaseRunnable implements Runnable {
private LeaseResource leaseClient;
private final SynchronizationLease[] lease;
private boolean cancelled = false;
public UpdateLeaseRunnable(LeaseResource leaseClient, SynchronizationLease[] lease) {
if(leaseClient == null) {
throw new ArgumentNullException("leaseClient");
}
if(lease == null) {
throw new ArgumentNullException("lease");
}
if(lease.length != 1) {
throw new IllegalArgumentException("lease must be an array of size 1");
}
if(lease[0] == null) {
throw new ArgumentNullException("lease[0]");
}
this.leaseClient = leaseClient;
this.lease = lease;
}
@Override
public void run() {
while(!cancelled) {
try {
Thread.sleep(getSleepTime());
} catch (InterruptedException e) {
return;
}
if(cancelled) {
return;
}
try {
SynchronizationLease updatedLease = leaseClient.extendSynchronizationLeaseSynchronously(lease[0].getId());
synchronized (lease) {
lease[0] = updatedLease;
}
} catch (RetrofitError ex) {
}
}
}
public void cancel() {
this.cancelled = true;
}
private long getSleepTime() {
return Math.max(0, (lease[0].getExpirationTime().getTime() - new Date().getTime()) / 2);
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.ui;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.DebuggerInvocationUtil;
import com.intellij.debugger.DebuggerManagerEx;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.DebuggerUtils;
import com.intellij.debugger.engine.JVMName;
import com.intellij.debugger.engine.JVMNameUtil;
import com.intellij.debugger.engine.evaluation.*;
import com.intellij.debugger.engine.evaluation.expression.EvaluatorBuilderImpl;
import com.intellij.debugger.engine.evaluation.expression.ExpressionEvaluator;
import com.intellij.debugger.engine.events.DebuggerContextCommandImpl;
import com.intellij.debugger.impl.DebuggerContextImpl;
import com.intellij.debugger.impl.DebuggerSession;
import com.intellij.debugger.ui.impl.DebuggerTreeRenderer;
import com.intellij.debugger.ui.impl.InspectDebuggerTree;
import com.intellij.debugger.ui.impl.watch.WatchItemDescriptor;
import com.intellij.debugger.ui.tree.render.DescriptorLabelListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.psi.*;
import com.intellij.ui.SimpleColoredText;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.IncorrectOperationException;
import com.intellij.xdebugger.impl.evaluate.quick.common.AbstractValueHint;
import com.intellij.xdebugger.impl.evaluate.quick.common.AbstractValueHintTreeComponent;
import com.intellij.xdebugger.impl.evaluate.quick.common.ValueHintType;
import com.sun.jdi.Method;
import com.sun.jdi.PrimitiveValue;
import com.sun.jdi.Value;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
/**
* User: lex
* Date: Nov 24, 2003
* Time: 7:31:26 PM
*/
public class ValueHint extends AbstractValueHint {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.ui.ValueHint");
private PsiExpression myCurrentExpression = null;
private Value myValueToShow = null;
private ValueHint(Project project, Editor editor, Point point, ValueHintType type, final PsiExpression selectedExpression, final TextRange textRange) {
super(project, editor, point, type, textRange);
myCurrentExpression = selectedExpression;
}
public static ValueHint createValueHint(Project project, Editor editor, Point point, ValueHintType type) {
Trinity<PsiExpression, TextRange, Value> trinity = getSelectedExpression(project, editor, point, type);
final ValueHint hint = new ValueHint(project, editor, point, type, trinity.getFirst(), trinity.getSecond());
hint.myValueToShow = trinity.getThird();
return hint;
}
protected boolean canShowHint() {
return myCurrentExpression != null;
}
protected void evaluateAndShowHint() {
final DebuggerContextImpl debuggerContext = DebuggerManagerEx.getInstanceEx(getProject()).getContext();
final DebuggerSession debuggerSession = debuggerContext.getDebuggerSession();
if(debuggerSession == null || !debuggerSession.isPaused()) return;
try {
final ExpressionEvaluator evaluator = EvaluatorBuilderImpl.getInstance().build(myCurrentExpression, debuggerContext.getSourcePosition());
debuggerContext.getDebugProcess().getManagerThread().schedule(new DebuggerContextCommandImpl(debuggerContext) {
public Priority getPriority() {
return Priority.HIGH;
}
public void threadAction() {
try {
final EvaluationContextImpl evaluationContext = debuggerContext.createEvaluationContext();
final String expressionText = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
public String compute() {
return myCurrentExpression.getText();
}
});
final TextWithImports text = new TextWithImportsImpl(CodeFragmentKind.EXPRESSION, expressionText);
final Value value = myValueToShow != null? myValueToShow : evaluator.evaluate(evaluationContext);
final WatchItemDescriptor descriptor = new WatchItemDescriptor(getProject(), text, value);
if (!isActiveTootlipApplicable(value) || getType() == ValueHintType.MOUSE_OVER_HINT) {
if (getType() == ValueHintType.MOUSE_OVER_HINT) {
// force using default renderer for mouse over hint in order to not to call accidentaly methods while rendering
// otherwise, if the hint is invoked explicitly, show it with the right "auto" renderer
descriptor.setRenderer(debuggerContext.getDebugProcess().getDefaultRenderer(value));
}
descriptor.updateRepresentation(evaluationContext, new DescriptorLabelListener() {
public void labelChanged() {
if(getCurrentRange() != null) {
if(getType() != ValueHintType.MOUSE_OVER_HINT || descriptor.isValueValid()) {
final SimpleColoredText simpleColoredText = DebuggerTreeRenderer.getDescriptorText(debuggerContext, descriptor, true);
if (isActiveTootlipApplicable(value)){
simpleColoredText.append(" (" + DebuggerBundle.message("active.tooltip.suggestion") + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
showHint(simpleColoredText, descriptor);
}
}
}
});
} else {
final InspectDebuggerTree tree = getInspectTree(descriptor);
showTreePopup(tree, debuggerContext, myCurrentExpression.getText(), new ValueHintTreeComponent(ValueHint.this, tree, myCurrentExpression.getText()));
}
}
catch (EvaluateException e) {
LOG.debug(e);
}
}
});
}
catch (EvaluateException e) {
LOG.debug(e);
}
}
private static boolean isActiveTootlipApplicable(final Value value) {
return value != null && !(value instanceof PrimitiveValue);
}
public void showTreePopup(final InspectDebuggerTree tree,
final DebuggerContextImpl debuggerContext,
final String title,
final AbstractValueHintTreeComponent<?> component) {
DebuggerInvocationUtil.invokeLater(getProject(), new Runnable() {
public void run() {
tree.rebuild(debuggerContext);
showTreePopup(component, tree, title);
}
});
}
private void showHint(final SimpleColoredText text, final WatchItemDescriptor descriptor) {
DebuggerInvocationUtil.invokeLater(getProject(), new Runnable() {
public void run() {
if(!isHintHidden()) {
JComponent component;
if (!isActiveTootlipApplicable(descriptor.getValue())) {
component = HintUtil.createInformationLabel(text);
}
else {
component = createExpandableHintComponent(text, new Runnable() {
public void run() {
final DebuggerContextImpl debuggerContext = DebuggerManagerEx.getInstanceEx(getProject()).getContext();
final DebugProcessImpl debugProcess = debuggerContext.getDebugProcess();
debugProcess.getManagerThread().schedule(new DebuggerContextCommandImpl(debuggerContext) {
public void threadAction() {
descriptor.setRenderer(debugProcess.getAutoRenderer(descriptor));
final InspectDebuggerTree tree = getInspectTree(descriptor);
showTreePopup(tree, debuggerContext, myCurrentExpression.getText(),
new ValueHintTreeComponent(ValueHint.this, tree, myCurrentExpression.getText()));
}
});
}
});
}
if (!showHint(component)) return;
if(getType() == ValueHintType.MOUSE_CLICK_HINT) {
HintUtil.createInformationLabel(text).requestFocusInWindow();
}
}
}
});
}
private InspectDebuggerTree getInspectTree(final WatchItemDescriptor descriptor) {
final InspectDebuggerTree tree = new InspectDebuggerTree(getProject());
tree.getModel().addTreeModelListener(createTreeListener(tree));
tree.setInspectDescriptor(descriptor);
return tree;
}
@Nullable
private static Pair<PsiExpression, TextRange> findExpression(PsiElement element, boolean allowMethodCalls) {
if (!(element instanceof PsiIdentifier || element instanceof PsiKeyword)) {
return null;
}
PsiElement expression = null;
PsiElement parent = element.getParent();
if (parent instanceof PsiVariable) {
expression = element;
}
else if (parent instanceof PsiReferenceExpression) {
final PsiElement pparent = parent.getParent();
if (pparent instanceof PsiMethodCallExpression) {
parent = pparent;
}
if (allowMethodCalls || !DebuggerUtils.hasSideEffects(parent)) {
expression = parent;
}
}
else if (parent instanceof PsiThisExpression) {
expression = parent;
}
if (expression != null) {
try {
PsiElement context = element;
if(parent instanceof PsiParameter) {
try {
context = ((PsiMethod)((PsiParameter)parent).getDeclarationScope()).getBody();
}
catch (Throwable ignored) {
}
}
else {
while(context != null && !(context instanceof PsiStatement) && !(context instanceof PsiClass)) {
context = context.getParent();
}
}
TextRange textRange = expression.getTextRange();
PsiExpression psiExpression = JavaPsiFacade.getInstance(expression.getProject()).getElementFactory().createExpressionFromText(expression.getText(), context);
return Pair.create(psiExpression, textRange);
}
catch (IncorrectOperationException e) {
LOG.debug(e);
}
}
return null;
}
private static Trinity<PsiExpression, TextRange, Value> getSelectedExpression(final Project project, final Editor editor, final Point point, final ValueHintType type) {
final Ref<PsiExpression> selectedExpression = Ref.create(null);
final Ref<TextRange> currentRange = Ref.create(null);
final Ref<Value> preCalculatedValue = Ref.create(null);
PsiDocumentManager.getInstance(project).commitAndRunReadAction(new Runnable() {
public void run() {
// Point -> offset
final int offset = calculateOffset(editor, point);
PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if(psiFile == null || !psiFile.isValid()) return;
int selectionStart = editor.getSelectionModel().getSelectionStart();
int selectionEnd = editor.getSelectionModel().getSelectionEnd();
if((type == ValueHintType.MOUSE_CLICK_HINT || type == ValueHintType.MOUSE_ALT_OVER_HINT) && (selectionStart <= offset && offset <= selectionEnd)) {
PsiElement ctx = (selectionStart > 0) ? psiFile.findElementAt(selectionStart - 1) : psiFile.findElementAt(selectionStart);
try {
String text = editor.getSelectionModel().getSelectedText();
if(text != null && ctx != null) {
selectedExpression.set(JavaPsiFacade.getInstance(project).getElementFactory().createExpressionFromText(text, ctx));
currentRange.set(new TextRange(editor.getSelectionModel().getSelectionStart(), editor.getSelectionModel().getSelectionEnd()));
}
} catch (IncorrectOperationException e) {
}
}
if(currentRange.get() == null) {
PsiElement elementAtCursor = psiFile.findElementAt(offset);
if (elementAtCursor == null) {
return;
}
Pair<PsiExpression, TextRange> pair = findExpression(elementAtCursor, type == ValueHintType.MOUSE_CLICK_HINT || type == ValueHintType.MOUSE_ALT_OVER_HINT);
if (pair == null) {
if (type == ValueHintType.MOUSE_OVER_HINT) {
final DebuggerSession debuggerSession = DebuggerManagerEx.getInstanceEx(project).getContext().getDebuggerSession();
if(debuggerSession != null && debuggerSession.isPaused()) {
final Pair<Method, Value> lastExecuted = debuggerSession.getProcess().getLastExecutedMethod();
if (lastExecuted != null) {
final Method method = lastExecuted.getFirst();
if (method != null) {
final Pair<PsiExpression, TextRange> expressionPair = findExpression(elementAtCursor, true);
if (expressionPair != null && expressionPair.getFirst() instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)expressionPair.getFirst();
final PsiMethod psiMethod = methodCallExpression.resolveMethod();
if (psiMethod != null) {
final JVMName jvmSignature = JVMNameUtil.getJVMSignature(psiMethod);
try {
if (method.name().equals(psiMethod.getName()) && method.signature().equals(jvmSignature.getName(debuggerSession.getProcess()))) {
pair = expressionPair;
preCalculatedValue.set(lastExecuted.getSecond());
}
}
catch (EvaluateException ignored) {
}
}
}
}
}
}
}
}
if (pair == null) {
return;
}
selectedExpression.set(pair.getFirst());
currentRange.set(pair.getSecond());
}
}
});
return Trinity.create(selectedExpression.get(), currentRange.get(), preCalculatedValue.get());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.extractor;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.jmeter.processor.PostProcessor;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.AbstractScopedTestElement;
import org.apache.jmeter.testelement.property.IntegerProperty;
import org.apache.jmeter.threads.JMeterContext;
import org.apache.jmeter.threads.JMeterVariables;
import org.apache.jmeter.util.Document;
import org.apache.jmeter.util.JMeterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extracts Strings from a text response between a start and end boundary.
*/
public class BoundaryExtractor extends AbstractScopedTestElement implements PostProcessor, Serializable {
private static final Logger log = LoggerFactory.getLogger(BoundaryExtractor.class);
private static final long serialVersionUID = 2L;
private static final String REFNAME = "BoundaryExtractor.refname"; // $NON-NLS-1$
private static final String MATCH_NUMBER = "BoundaryExtractor.match_number"; // $NON-NLS-1$
private static final String L_BOUNDARY = "BoundaryExtractor.lboundary"; // $NON-NLS-1$
private static final String R_BOUNDARY = "BoundaryExtractor.rboundary"; // $NON-NLS-1$
private static final String DEFAULT_EMPTY_VALUE = "BoundaryExtractor.default_empty_value"; // $NON-NLS-1$
private static final String DEFAULT = "BoundaryExtractor.default"; // $NON-NLS-1$
private static final String REF_MATCH_NR = "_matchNr"; // $NON-NLS-1$
private static final char UNDERSCORE = '_'; // $NON-NLS-1$
// What to match against. N.B. do not change the string value or test plans will break!
private static final String MATCH_AGAINST = "BoundaryExtractor.useHeaders"; // $NON-NLS-1$
/*
* Permissible values:
* true - match against headers
* false or absent - match against body (this was the original default)
* URL - match against URL
* These are passed to the setUseField() method
*
* Do not change these values!
*/
private static final String USE_HDRS = "true"; // $NON-NLS-1$
private static final String USE_REQUEST_HDRS = "request_headers"; // $NON-NLS-1$
private static final String USE_BODY = "false"; // $NON-NLS-1$
private static final String USE_BODY_UNESCAPED = "unescaped"; // $NON-NLS-1$
private static final String USE_BODY_AS_DOCUMENT = "as_document"; // $NON-NLS-1$
private static final String USE_URL = "URL"; // $NON-NLS-1$
private static final String USE_CODE = "code"; // $NON-NLS-1$
private static final String USE_MESSAGE = "message"; // $NON-NLS-1$
/**
* Parses the response data using Boundaries and saving the results
* into variables for use later in the test.
*
* @see PostProcessor#process()
*/
@Override
public void process() {
JMeterContext context = getThreadContext();
SampleResult previousResult = context.getPreviousResult();
if (previousResult == null) {
return;
}
if (log.isDebugEnabled()) {
log.debug("Boundary Extractor {}: processing result", getName());
}
if (StringUtils.isEmpty(getRefName())) {
throw new IllegalArgumentException(
"One of the mandatory properties is missing in Boundary Extractor:" + getName());
}
JMeterVariables vars = context.getVariables();
String refName = getRefName();
final String defaultValue = getDefaultValue();
if (StringUtils.isNotBlank(defaultValue) || isEmptyDefaultValue()) {
vars.put(refName, defaultValue);
}
int matchNumber = getMatchNumber();
int prevCount = 0;
int matchCount = 0;
try {
prevCount = removePrevCount(vars, refName);
List<String> matches = extractMatches(previousResult, vars, matchNumber);
matchCount = saveMatches(vars, refName, matchNumber, matches);
} catch (RuntimeException e) { // NOSONAR
if (log.isWarnEnabled()) {
log.warn("{}: Error while generating result. {}", getName(), e.toString()); // NOSONAR We don't want to be too verbose
}
} finally {
// Remove any left-over variables
for (int i = matchCount + 1; i <= prevCount; i++) {
vars.remove(refName + UNDERSCORE + i);
}
}
}
private int removePrevCount(JMeterVariables vars, String refName) {
int prevCount = 0;
String prevString = vars.get(refName + REF_MATCH_NR);
if (prevString != null) {
// ensure old value is not left defined
vars.remove(refName + REF_MATCH_NR);
try {
prevCount = Integer.parseInt(prevString);
} catch (NumberFormatException nfe) {
if (log.isWarnEnabled()) {
log.warn("{}: Could not parse number: '{}'.", getName(), prevString);
}
}
}
return prevCount;
}
private List<String> extractMatches(SampleResult previousResult, JMeterVariables vars, int matchNumber) {
if (isScopeVariable()) {
String inputString = vars.get(getVariableName());
if (inputString == null && log.isWarnEnabled()) {
log.warn("No variable '{}' found to process by Boundary Extractor '{}', skipping processing",
getVariableName(), getName());
}
return extract(getLeftBoundary(), getRightBoundary(), matchNumber, inputString);
} else {
Stream<String> inputs = getSampleList(previousResult).stream().map(this::getInputString);
return extract(getLeftBoundary(), getRightBoundary(), matchNumber, inputs);
}
}
/**
* @param vars {@link JMeterVariables}
* @param refName Var name
* @param matchNumber number of matches
* @param matches List of String
* @return 0 if there is only one match, else the number of matches, this is used to remove
*/
private int saveMatches(JMeterVariables vars, String refName, int matchNumber, List<String> matches) {
if (matchNumber >=0 && matches.isEmpty()) {
return 0;
}
int matchCount = 0;
if (matchNumber == 0) {
saveRandomMatch(vars, refName, matches);
} else if (matchNumber > 0) {
saveOneMatch(vars, refName, matches);
} else {
matchCount = matches.size();
saveAllMatches(vars, refName, matches);
}
return matchCount;
}
private void saveRandomMatch(JMeterVariables vars, String refName, List<String> matches) {
String match = matches.get(JMeterUtils.getRandomInt(matches.size()));
if (match != null) {
vars.put(refName, match);
}
}
private void saveOneMatch(JMeterVariables vars, String refName, List<String> matches) {
if (matches.size() == 1) { // if not then invalid matchNum was likely supplied
String match = matches.get(0);
if (match != null) {
vars.put(refName, match);
}
}
}
private void saveAllMatches(JMeterVariables vars, String refName, List<String> matches) {
vars.put(refName + REF_MATCH_NR, Integer.toString(matches.size()));
for (int i = 0; i < matches.size(); i++) {
String match = matches.get(i);
if (match != null) {
int varNum = i + 1;
vars.put(refName + UNDERSCORE + varNum, match);
}
}
}
private String getInputString(SampleResult result) {
String inputString = chosenInput(result);
log.debug("Input = '{}'", inputString);
return inputString;
}
private String chosenInput(SampleResult result) {
if (useUrl()) {
return result.getUrlAsString(); // Bug 39707;
}
if (useHeaders()) {
return result.getResponseHeaders();
}
if (useRequestHeaders()) {
return result.getRequestHeaders();
}
if (useCode()) {
return result.getResponseCode(); // Bug 43451
}
if (useMessage()) {
return result.getResponseMessage(); // Bug 43451
}
if (useUnescapedBody()) {
return StringEscapeUtils.unescapeHtml4(result.getResponseDataAsString());
}
if (useBodyAsDocument()) {
return Document.getTextFromDocument(result.getResponseData());
}
return result.getResponseDataAsString(); // Bug 36898
}
private List<String> extract(
String leftBoundary, String rightBoundary, int matchNumber, Stream<String> previousResults) {
boolean allItems = matchNumber <= 0;
return previousResults
.flatMap(input -> extractAll(leftBoundary, rightBoundary, input).stream())
.skip(allItems ? 0L : matchNumber - 1)
.limit(allItems ? Long.MAX_VALUE : 1L)
.collect(Collectors.toList());
}
/**
* Extracts text fragments, that are between the boundaries, into {@code result}.
* The number of extracted fragments can be controlled by {@code matchNumber}
*
* @param leftBoundary fragment representing the left boundary of the searched text
* @param rightBoundary fragment representing the right boundary of the searched text
* @param matchNumber if {@code <=0}, all found matches will be returned, else only
* up to {@code matchNumber} matches
* @param inputString text in which to look for the fragments
* @return list where the found text fragments will be placed
*/
private List<String> extract(String leftBoundary, String rightBoundary, int matchNumber, String inputString) {
if (StringUtils.isBlank(inputString)) {
return Collections.emptyList();
}
boolean isEmptyLeftBoundary = StringUtils.isEmpty(leftBoundary);
boolean isEmptyRightBoundary = StringUtils.isEmpty(rightBoundary);
if (isEmptyLeftBoundary && isEmptyRightBoundary) {
return Collections.singletonList(inputString);
}
if (isEmptyLeftBoundary) {
int rightBoundaryIndex = inputString.indexOf(rightBoundary);
if (rightBoundaryIndex != -1) {
return Collections.singletonList(inputString.substring(0, rightBoundaryIndex));
}
}
if (isEmptyRightBoundary) {
int leftBoundaryIndex = inputString.indexOf(leftBoundary);
if (leftBoundaryIndex != -1) {
return Collections.singletonList(inputString.substring(leftBoundaryIndex + leftBoundary.length()));
}
}
List<String> matches = new ArrayList<>();
int leftBoundaryLen = leftBoundary.length();
boolean collectAll = matchNumber <= 0;
int found = 0;
for (int startIndex = 0;
(startIndex = inputString.indexOf(leftBoundary, startIndex)) != -1;
startIndex += leftBoundaryLen) {
int endIndex = inputString.indexOf(rightBoundary, startIndex + leftBoundaryLen);
if (endIndex >= 0) {
found++;
if (collectAll) {
matches.add(inputString.substring(startIndex + leftBoundaryLen, endIndex));
} else if (found == matchNumber) {
return Collections.singletonList(inputString.substring(startIndex + leftBoundaryLen, endIndex));
}
} else {
break;
}
}
return Collections.unmodifiableList(matches);
}
public List<String> extractAll(
String leftBoundary, String rightBoundary, String textToParse) {
return extract(leftBoundary, rightBoundary, -1, textToParse);
}
public void setRefName(String refName) {
setProperty(REFNAME, refName);
}
public String getRefName() {
return getPropertyAsString(REFNAME);
}
/**
* Set which Match to use. This can be any positive number, indicating the
* exact match to use, or <code>0</code>, which is interpreted as meaning random.
*
* @param matchNumber The number of the match to be used
*/
public void setMatchNumber(int matchNumber) {
setProperty(new IntegerProperty(MATCH_NUMBER, matchNumber));
}
public void setMatchNumber(String matchNumber) {
setProperty(MATCH_NUMBER, matchNumber);
}
public int getMatchNumber() {
return getPropertyAsInt(MATCH_NUMBER);
}
public String getMatchNumberAsString() {
return getPropertyAsString(MATCH_NUMBER);
}
public void setLeftBoundary(String leftBoundary) {
setProperty(L_BOUNDARY, leftBoundary);
}
public String getLeftBoundary() {
return getPropertyAsString(L_BOUNDARY);
}
public void setRightBoundary(String rightBoundary) {
setProperty(R_BOUNDARY, rightBoundary);
}
public String getRightBoundary() {
return getPropertyAsString(R_BOUNDARY);
}
/**
* Sets the value of the variable if no matches are found
*
* @param defaultValue The default value for the variable
*/
public void setDefaultValue(String defaultValue) {
setProperty(DEFAULT, defaultValue);
}
/**
* @param defaultEmptyValue boolean set value to "" if not found
*/
public void setDefaultEmptyValue(boolean defaultEmptyValue) {
setProperty(DEFAULT_EMPTY_VALUE, defaultEmptyValue);
}
/**
* Get the default value for the variable if no matches are found
*
* @return The default value for the variable
*/
public String getDefaultValue() {
return getPropertyAsString(DEFAULT);
}
/**
* @return boolean set value to "" if not found
*/
public boolean isEmptyDefaultValue() {
return getPropertyAsBoolean(DEFAULT_EMPTY_VALUE);
}
public boolean useHeaders() {
return USE_HDRS.equalsIgnoreCase(getPropertyAsString(MATCH_AGAINST));
}
public boolean useRequestHeaders() {
return USE_REQUEST_HDRS.equalsIgnoreCase(getPropertyAsString(MATCH_AGAINST));
}
public boolean useBody() {
String prop = getPropertyAsString(MATCH_AGAINST);
return prop.length() == 0 || USE_BODY.equalsIgnoreCase(prop);
}
public boolean useUnescapedBody() {
String prop = getPropertyAsString(MATCH_AGAINST);
return USE_BODY_UNESCAPED.equalsIgnoreCase(prop);
}
public boolean useBodyAsDocument() {
String prop = getPropertyAsString(MATCH_AGAINST);
return USE_BODY_AS_DOCUMENT.equalsIgnoreCase(prop);
}
public boolean useUrl() {
String prop = getPropertyAsString(MATCH_AGAINST);
return USE_URL.equalsIgnoreCase(prop);
}
public boolean useCode() {
String prop = getPropertyAsString(MATCH_AGAINST);
return USE_CODE.equalsIgnoreCase(prop);
}
public boolean useMessage() {
String prop = getPropertyAsString(MATCH_AGAINST);
return USE_MESSAGE.equalsIgnoreCase(prop);
}
public void setUseField(String actionCommand) {
setProperty(MATCH_AGAINST, actionCommand);
}
}
| |
/**
* Copyright (C) 2015 DataTorrent, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datatorrent.stram.client;
import java.io.*;
import java.net.*;
import java.security.PrivilegedExceptionAction;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import org.mozilla.javascript.Scriptable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.ClientRMProxy;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.DTLoggerFactory;
import com.datatorrent.api.StreamingApplication;
import com.datatorrent.stram.StramClient;
import com.datatorrent.stram.security.StramUserLogin;
import com.datatorrent.stram.util.ConfigUtils;
import com.datatorrent.stram.util.ConfigValidator;
/**
* Collection of utility classes for command line interface package<p>
* <br>
* List includes<br>
* Yarn Client Helper<br>
* Resource Mgr Client Helper<br>
* <br>
*
* @since 0.3.2
*/
public class StramClientUtils
{
public static final String DT_VERSION = StreamingApplication.DT_PREFIX + "version";
public static final String DT_DFS_ROOT_DIR = StreamingApplication.DT_PREFIX + "dfsRootDirectory";
public static final String DT_DFS_USER_NAME = "%USER_NAME%";
public static final String DT_CONFIG_STATUS = StreamingApplication.DT_PREFIX + "configStatus";
public static final String SUBDIR_APPS = "apps";
public static final String SUBDIR_PROFILES = "profiles";
public static final String SUBDIR_CONF = "conf";
public static final int RESOURCEMANAGER_CONNECT_MAX_WAIT_MS_OVERRIDE = 10 * 1000;
public static final String HDFS_TOKEN_MAX_LIFE_TIME = "dfs.namenode.delegation.token.max-lifetime";
public static final String RM_TOKEN_MAX_LIFE_TIME = YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_KEY;
public static final String KEY_TAB_FILE = StramUserLogin.DT_AUTH_PREFIX + "store.keytab";
public static final String TOKEN_ANTICIPATORY_REFRESH_FACTOR = StramUserLogin.DT_AUTH_PREFIX + "token.refresh.factor";
/**
* TBD<p>
* <br>
*/
public static class YarnClientHelper
{
private static final Logger LOG = LoggerFactory.getLogger(YarnClientHelper.class);
// Configuration
private final Configuration conf;
// RPC to communicate to RM
private final YarnRPC rpc;
public YarnClientHelper(Configuration conf)
{
// Set up the configuration and RPC
this.conf = conf;
this.rpc = YarnRPC.create(conf);
}
public Configuration getConf()
{
return this.conf;
}
public YarnRPC getYarnRPC()
{
return rpc;
}
/**
* Connect to the Resource Manager/Applications Manager<p>
*
* @return Handle to communicate with the ASM
* @throws IOException
*/
public ApplicationClientProtocol connectToASM() throws IOException
{
YarnConfiguration yarnConf = new YarnConfiguration(conf);
InetSocketAddress rmAddress = yarnConf.getSocketAddr(
YarnConfiguration.RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_PORT);
LOG.debug("Connecting to ResourceManager at " + rmAddress);
return ((ApplicationClientProtocol) rpc.getProxy(
ApplicationClientProtocol.class, rmAddress, conf));
}
/**
* Connect to the Resource Manager<p>
*
* @return Handle to communicate with the RM
*/
public ApplicationMasterProtocol connectToRM()
{
InetSocketAddress rmAddress = conf.getSocketAddr(
YarnConfiguration.RM_SCHEDULER_ADDRESS,
YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS,
YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT);
LOG.debug("Connecting to ResourceManager at " + rmAddress);
return ((ApplicationMasterProtocol) rpc.getProxy(ApplicationMasterProtocol.class, rmAddress, conf));
}
}
/**
* Bunch of utilities that ease repeating interactions with {@link ClientRMProxy}<p>
*/
public static class ClientRMHelper
{
private static final Logger LOG = LoggerFactory.getLogger(ClientRMHelper.class);
private static final String RM_HOSTNAME_PREFIX = YarnConfiguration.RM_PREFIX + "hostname.";
private final YarnClient clientRM;
private final Configuration conf;
public ClientRMHelper(YarnClient yarnClient, Configuration conf) throws IOException
{
this.clientRM = yarnClient;
this.conf = conf;
}
public static interface AppStatusCallback
{
boolean exitLoop(ApplicationReport report);
}
/**
* Monitor the submitted application for completion. Kill application if time expires.
*
* @param appId Application Id of application to be monitored
* @param callback
* @param timeoutMillis
* @return true if application completed successfully
* @throws YarnException
* @throws IOException
*/
@SuppressWarnings("SleepWhileInLoop")
public boolean waitForCompletion(ApplicationId appId, AppStatusCallback callback, long timeoutMillis) throws YarnException, IOException
{
long startMillis = System.currentTimeMillis();
while (true) {
// Check app status every 1 second.
try {
Thread.sleep(1000);
}
catch (InterruptedException e) {
LOG.debug("Thread sleep in monitoring loop interrupted");
}
ApplicationReport report = clientRM.getApplicationReport(appId);
if (callback.exitLoop(report) == true) {
return true;
}
YarnApplicationState state = report.getYarnApplicationState();
FinalApplicationStatus dsStatus = report.getFinalApplicationStatus();
if (YarnApplicationState.FINISHED == state) {
if (FinalApplicationStatus.SUCCEEDED == dsStatus) {
LOG.info("Application has completed successfully. Breaking monitoring loop");
return true;
}
else {
LOG.info("Application finished unsuccessfully."
+ " YarnState=" + state.toString() + ", DSFinalStatus=" + dsStatus.toString()
+ ". Breaking monitoring loop");
return false;
}
}
else if (YarnApplicationState.KILLED == state
|| YarnApplicationState.FAILED == state) {
LOG.info("Application did not finish."
+ " YarnState=" + state.toString() + ", DSFinalStatus=" + dsStatus.toString()
+ ". Breaking monitoring loop");
return false;
}
if (System.currentTimeMillis() - startMillis > timeoutMillis) {
LOG.info("Reached specified timeout. Killing application");
clientRM.killApplication(appId);
return false;
}
}
}
// TODO: HADOOP UPGRADE - replace with YarnConfiguration constants
private Token<RMDelegationTokenIdentifier> getRMHAToken(org.apache.hadoop.yarn.api.records.Token rmDelegationToken) {
// Build a list of service addresses to form the service name
ArrayList<String> services = new ArrayList<String>();
for (String rmId : ConfigUtils.getRMHAIds(conf)) {
LOG.info("Yarn Resource Manager id: {}", rmId);
// Set RM_ID to get the corresponding RM_ADDRESS
services.add(SecurityUtil.buildTokenService(NetUtils.createSocketAddr(
conf.get(RM_HOSTNAME_PREFIX + rmId),
YarnConfiguration.DEFAULT_RM_PORT,
RM_HOSTNAME_PREFIX + rmId)).toString());
}
Text rmTokenService = new Text(Joiner.on(',').join(services));
return new Token<RMDelegationTokenIdentifier>(
rmDelegationToken.getIdentifier().array(),
rmDelegationToken.getPassword().array(),
new Text(rmDelegationToken.getKind()),
rmTokenService);
}
public void addRMDelegationToken(final String renewer, final Credentials credentials) throws IOException, YarnException {
// Get the ResourceManager delegation rmToken
final org.apache.hadoop.yarn.api.records.Token rmDelegationToken = clientRM.getRMDelegationToken(new Text(renewer));
Token<RMDelegationTokenIdentifier> token;
// TODO: Use the utility method getRMDelegationTokenService in ClientRMProxy to remove the separate handling of
// TODO: HA and non-HA cases when hadoop dependency is changed to hadoop 2.4 or above
if (ConfigUtils.isRMHAEnabled(conf)) {
LOG.info("Yarn Resource Manager HA is enabled");
token = getRMHAToken(rmDelegationToken);
} else {
LOG.info("Yarn Resource Manager HA is not enabled");
InetSocketAddress rmAddress = conf.getSocketAddr(YarnConfiguration.RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_PORT);
token = ConverterUtils.convertFromYarn(rmDelegationToken, rmAddress);
}
LOG.info("RM dt {}", token);
credentials.addToken(token.getService(), token);
}
}
private static final Logger LOG = LoggerFactory.getLogger(StramClientUtils.class);
public static String getHostName()
{
try {
return java.net.InetAddress.getLocalHost().getHostName();
}
catch (UnknownHostException ex) {
return null;
}
}
public static File getUserDTDirectory()
{
String envHome = System.getenv("HOME");
if (StringUtils.isEmpty(envHome)) {
return new File(FileUtils.getUserDirectory(), ".dt");
}
else {
return new File(envHome, ".dt");
}
}
public static File getConfigDir()
{
URL resource = StramClientUtils.class.getClassLoader().getResource(DT_ENV_SH_FILE);
try {
if (resource == null) {
return getUserDTDirectory();
}
return new File(resource.toURI()).getParentFile();
}
catch (URISyntaxException ex) {
throw new RuntimeException(ex);
}
}
public static File getInstallationDir()
{
URL resource = StramClientUtils.class.getClassLoader().getResource(DT_ENV_SH_FILE);
try {
if (resource == null) {
return null;
}
return new File(resource.toURI()).getParentFile().getParentFile();
}
catch (URISyntaxException ex) {
throw new RuntimeException(ex);
}
}
public static boolean isDevelopmentMode()
{
return getUserDTDirectory().equals(getConfigDir());
}
public static File getBackupsDirectory()
{
return new File(getConfigDir(), BACKUPS_DIRECTORY);
}
public static final String DT_DEFAULT_XML_FILE = "dt-default.xml";
public static final String DT_SITE_XML_FILE = "dt-site.xml";
public static final String DT_SITE_GLOBAL_XML_FILE = "dt-site-global.xml";
public static final String DT_ENV_SH_FILE = "dt-env.sh";
public static final String CUSTOM_ENV_SH_FILE = "custom-env.sh";
public static final String BACKUPS_DIRECTORY = "backups";
public static Configuration addDTDefaultResources(Configuration conf)
{
conf.addResource(DT_DEFAULT_XML_FILE);
return conf;
}
public static Configuration addDTSiteResources(Configuration conf)
{
addDTLocalResources(conf);
FileSystem fs = null;
File targetGlobalFile;
try {
fs = newFileSystemInstance(conf);
// after getting the dfsRootDirectory config parameter, redo the entire process with the global config
// load global settings from DFS
targetGlobalFile = new File(String.format("/tmp/dt-site-global-%s.xml", UserGroupInformation.getLoginUser().getShortUserName()));
org.apache.hadoop.fs.Path hdfsGlobalPath = new org.apache.hadoop.fs.Path(StramClientUtils.getDTDFSConfigDir(fs, conf), StramClientUtils.DT_SITE_GLOBAL_XML_FILE);
LOG.debug("Copying global dt-site.xml from {} to {}", hdfsGlobalPath, targetGlobalFile.getAbsolutePath());
fs.copyToLocalFile(hdfsGlobalPath, new org.apache.hadoop.fs.Path(targetGlobalFile.toURI()));
addDTSiteResources(conf, targetGlobalFile);
if (!isDevelopmentMode()) {
// load node local config file
addDTSiteResources(conf, new File(StramClientUtils.getConfigDir(), StramClientUtils.DT_SITE_XML_FILE));
}
// load user config file
addDTSiteResources(conf, new File(StramClientUtils.getUserDTDirectory(), StramClientUtils.DT_SITE_XML_FILE));
}
catch (IOException ex) {
// ignore
LOG.debug("Caught exception when loading configuration: {}: moving on...", ex.getMessage());
}
finally {
// Cannot delete the file here because addDTSiteResource which eventually calls Configuration.reloadConfiguration
// does not actually reload the configuration. The file is actually read later and it needs to exist.
//
//if (targetGlobalFile != null) {
//targetGlobalFile.delete();
//}
IOUtils.closeQuietly(fs);
}
//Validate loggers-level settings
String loggersLevel = conf.get(DTLoggerFactory.DT_LOGGERS_LEVEL);
if (loggersLevel != null) {
String targets[] = loggersLevel.split(",");
Preconditions.checkArgument(targets.length > 0, "zero loggers level");
for (String target : targets) {
String parts[] = target.split(":");
Preconditions.checkArgument(parts.length == 2, "incorrect " + target);
Preconditions.checkArgument(ConfigValidator.validateLoggersLevel(parts[0], parts[1]), "incorrect " + target);
}
}
convertDeprecatedProperties(conf);
//
// The ridiculous default RESOURCEMANAGER_CONNECT_MAX_WAIT_MS from hadoop is 15 minutes (!!!!), which actually translates to 20 minutes with the connect interval.
// That means if there is anything wrong with YARN or if YARN is not running, the caller has to wait for up to 20 minutes until it gets an error.
// We are overriding this to be 10 seconds maximum.
//
int rmConnectMaxWait = conf.getInt(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_MS, YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_MS);
if (rmConnectMaxWait > RESOURCEMANAGER_CONNECT_MAX_WAIT_MS_OVERRIDE) {
LOG.info("Overriding {} assigned value of {} to {} because the assigned value is too big.", YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_MS, rmConnectMaxWait, RESOURCEMANAGER_CONNECT_MAX_WAIT_MS_OVERRIDE);
conf.setInt(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_MS, RESOURCEMANAGER_CONNECT_MAX_WAIT_MS_OVERRIDE);
int rmConnectRetryInterval = conf.getInt(YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS, YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_MS);
int defaultRetryInterval = Math.max(500, RESOURCEMANAGER_CONNECT_MAX_WAIT_MS_OVERRIDE / 5);
if (rmConnectRetryInterval > defaultRetryInterval) {
LOG.info("Overriding {} assigned value of {} to {} because the assigned value is too big.", YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS, rmConnectRetryInterval, defaultRetryInterval);
conf.setInt(YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS, defaultRetryInterval);
}
}
LOG.info(" conf object in stramclient {}", conf);
return conf;
}
public static void addDTLocalResources(Configuration conf)
{
conf.addResource(DT_DEFAULT_XML_FILE);
if (!isDevelopmentMode()) {
addDTSiteResources(conf, new File(StramClientUtils.getConfigDir(), StramClientUtils.DT_SITE_XML_FILE));
}
addDTSiteResources(conf, new File(StramClientUtils.getUserDTDirectory(), StramClientUtils.DT_SITE_XML_FILE));
}
private static Configuration addDTSiteResources(Configuration conf, File confFile)
{
if (confFile.exists()) {
LOG.info("Loading settings: " + confFile.toURI());
conf.addResource(new Path(confFile.toURI()));
}
else {
LOG.info("Configuration file {} is not found. Skipping...", confFile.toURI());
}
return conf;
}
@SuppressWarnings("deprecation")
private static void convertDeprecatedProperties(Configuration conf)
{
Iterator<Map.Entry<String, String>> iterator = conf.iterator();
Map<String, String> newEntries = new HashMap<String, String>();
while (iterator.hasNext()) {
Map.Entry<String, String> entry = iterator.next();
if (entry.getKey().startsWith("stram.")) {
String newKey = StreamingApplication.DT_PREFIX + entry.getKey().substring(6);
LOG.warn("Configuration property {} is deprecated. Please use {} instead.", entry.getKey(), newKey);
newEntries.put(newKey, entry.getValue());
iterator.remove();
}
}
for (Map.Entry<String, String> entry : newEntries.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}
}
public static URL getDTSiteXmlFile()
{
File cfgResource = new File(StramClientUtils.getConfigDir(), StramClientUtils.DT_SITE_XML_FILE);
try {
return cfgResource.toURI().toURL();
}
catch (MalformedURLException ex) {
throw new RuntimeException(ex);
}
}
public static FileSystem newFileSystemInstance(Configuration conf) throws IOException
{
String dfsRootDir = conf.get(DT_DFS_ROOT_DIR);
if (StringUtils.isBlank(dfsRootDir)) {
return FileSystem.newInstance(conf);
}
else {
if (dfsRootDir.contains(DT_DFS_USER_NAME)) {
dfsRootDir = dfsRootDir.replace(DT_DFS_USER_NAME, UserGroupInformation.getLoginUser().getShortUserName());
conf.set(DT_DFS_ROOT_DIR, dfsRootDir);
}
try {
return FileSystem.newInstance(new URI(dfsRootDir), conf);
}
catch (URISyntaxException ex) {
LOG.warn("{} is not a valid URI. Returning the default filesystem", dfsRootDir, ex);
return FileSystem.newInstance(conf);
}
}
}
public static Path getDTDFSRootDir(FileSystem fs, Configuration conf)
{
String dfsRootDir = conf.get(DT_DFS_ROOT_DIR);
if (StringUtils.isBlank(dfsRootDir)) {
return new Path(fs.getHomeDirectory(), "datatorrent");
}
else {
try {
if (dfsRootDir.contains(DT_DFS_USER_NAME)) {
dfsRootDir = dfsRootDir.replace(DT_DFS_USER_NAME, UserGroupInformation.getLoginUser().getShortUserName());
conf.set(DT_DFS_ROOT_DIR, dfsRootDir);
}
URI uri = new URI(dfsRootDir);
if (uri.isAbsolute()) {
return new Path(uri);
}
}
catch (IOException ex) {
LOG.warn("Error getting user login name {}", dfsRootDir, ex);
}
catch (URISyntaxException ex) {
LOG.warn("{} is not a valid URI. Using the default filesystem to construct the path", dfsRootDir, ex);
}
return new Path(fs.getUri().getScheme(), fs.getUri().getAuthority(), dfsRootDir);
}
}
public static Path getDTDFSConfigDir(FileSystem fs, Configuration conf)
{
return new Path(getDTDFSRootDir(fs, conf), SUBDIR_CONF);
}
public static Path getDTDFSProfilesDir(FileSystem fs, Configuration conf)
{
return new Path(getDTDFSRootDir(fs, conf), SUBDIR_PROFILES);
}
/**
* Change DT environment variable in the env file.
* Calling this will require a restart for the new setting to take place
*
* @param key
* @param value
* @throws IOException
*/
public static void changeDTEnvironment(String key, String value) throws IOException
{
if (isDevelopmentMode()) {
throw new IllegalStateException("Cannot change DT environment in development mode.");
}
URL resource = StramClientUtils.class.getClassLoader().getResource(CUSTOM_ENV_SH_FILE);
if (resource == null) {
File envFile = new File(StramClientUtils.getUserDTDirectory(), StramClientUtils.CUSTOM_ENV_SH_FILE);
FileOutputStream out = new FileOutputStream(envFile);
try {
out.write(("export " + key + "=\"" + value + "\"\n").getBytes());
}
finally {
out.close();
}
}
else {
try {
File cfgResource = new File(resource.toURI());
synchronized (StramClientUtils.class) {
BufferedReader br = new BufferedReader(new FileReader(cfgResource));
StringBuilder sb = new StringBuilder(1024);
try {
String line;
boolean changed = false;
while ((line = br.readLine()) != null) {
try {
line = line.trim();
if (line.startsWith("#")) {
continue;
}
if (line.matches("export\\s+" + key + "=.*")) {
line = "export " + key + "=\"" + value + "\"";
changed = true;
}
}
finally {
sb.append(line).append("\n");
}
}
if (!changed) {
sb.append("export ").append(key).append("=\"").append(value).append("\"\n");
}
}
finally {
br.close();
}
if (sb.length() > 0) {
FileOutputStream out = new FileOutputStream(cfgResource);
try {
out.write(sb.toString().getBytes());
}
finally {
out.close();
}
}
}
}
catch (URISyntaxException ex) {
LOG.error("Caught exception when getting env resource:", ex);
}
}
}
public static void copyFromLocalFileNoChecksum(FileSystem fs, File fromLocal, Path toDFS) throws IOException
{
// This is to void the hadoop FileSystem API to perform checksum on the local file
// This "feature" has caused a lot of headache because the local file can be copied from HDFS and modified,
// and the checksum will fail if the file is again copied to HDFS
try {
new File(fromLocal.getParentFile(), "." + fromLocal.getName() + ".crc").delete();
}
catch (Exception ex) {
// ignore
}
fs.copyFromLocalFile(new Path(fromLocal.toURI()), toDFS);
}
public static boolean configComplete(Configuration conf)
{
String configStatus = conf.get(StramClientUtils.DT_CONFIG_STATUS);
return "complete".equals(configStatus);
}
public static void evalProperties(Properties target, Configuration vars)
{
Pattern substitionPattern = Pattern.compile("\\$\\{(.+?)\\}");
Pattern evalPattern = Pattern.compile("\\{% (.+?) %\\}");
org.mozilla.javascript.Context context = org.mozilla.javascript.Context.enter();
context.setOptimizationLevel(-1);
Scriptable scope = context.initStandardObjects();
try {
context.evaluateString(scope, "var _prop = {}", "EvalLaunchProperties", 0, null);
for (Map.Entry<String, String> entry : vars) {
LOG.info("Evaluating: {}", "_prop[\"" + entry.getKey() + "\"] = " + entry.getValue());
context.evaluateString(scope, "_prop[\"" + entry.getKey() + "\"] = \"" + StringEscapeUtils.escapeJava(entry.getValue()) + "\"", "EvalLaunchProperties", 0, null);
}
for (Map.Entry<Object, Object> entry : target.entrySet()) {
String value = entry.getValue().toString();
Matcher matcher = substitionPattern.matcher(value);
if (matcher.find()) {
StringBuilder newValue = new StringBuilder();
int cursor = 0;
do {
newValue.append(value.substring(cursor, matcher.start()));
String subst = vars.get(matcher.group(1));
if (subst != null) {
newValue.append(subst);
}
cursor = matcher.end();
} while (matcher.find());
newValue.append(value.substring(cursor));
target.put(entry.getKey(), newValue.toString());
}
matcher = evalPattern.matcher(value);
if (matcher.find()) {
StringBuilder newValue = new StringBuilder();
int cursor = 0;
do {
newValue.append(value.substring(cursor, matcher.start()));
String eval = context.evaluateString(scope, matcher.group(1), "EvalLaunchProperties", 0, null).toString();
if (eval != null) {
newValue.append(eval);
}
cursor = matcher.end();
} while (matcher.find());
newValue.append(value.substring(cursor));
target.put(entry.getKey(), newValue.toString());
}
}
}
finally {
org.mozilla.javascript.Context.exit();
}
}
public static <T> T doAs(String userName, PrivilegedExceptionAction<T> action) throws Exception
{
if (StringUtils.isNotBlank(userName) && !userName.equals(UserGroupInformation.getLoginUser().getShortUserName())) {
LOG.info("Executing command as {}", userName);
UserGroupInformation ugi
= UserGroupInformation.createProxyUser(userName, UserGroupInformation.getLoginUser());
return ugi.doAs(action);
}
else {
LOG.info("Executing command as if there is no login info: {}", userName);
return action.run();
}
}
public static ApplicationReport getStartedAppInstanceByName(YarnClient clientRMService, String appName, String user, String excludeAppId) throws YarnException, IOException
{
List<ApplicationReport> applications = clientRMService.getApplications(Sets.newHashSet(StramClient.YARN_APPLICATION_TYPE),
EnumSet.of(YarnApplicationState.RUNNING,
YarnApplicationState.ACCEPTED,
YarnApplicationState.NEW,
YarnApplicationState.NEW_SAVING,
YarnApplicationState.SUBMITTED));
// see whether there is an app with the app name and user name running
for (ApplicationReport app : applications) {
if (!app.getApplicationId().toString().equals(excludeAppId)
&& app.getName().equals(appName)
&& app.getUser().equals(user)) {
return app;
}
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.management.mbean;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.management.AttributeValueExp;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.Query;
import javax.management.QueryExp;
import javax.management.StringValueExp;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeDataSupport;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData;
import javax.management.openmbean.TabularDataSupport;
import org.w3c.dom.Document;
import org.apache.camel.CamelContext;
import org.apache.camel.ManagementStatisticsLevel;
import org.apache.camel.Route;
import org.apache.camel.ServiceStatus;
import org.apache.camel.TimerListener;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.api.management.mbean.CamelOpenMBeanTypes;
import org.apache.camel.api.management.mbean.ManagedProcessorMBean;
import org.apache.camel.api.management.mbean.ManagedRouteMBean;
import org.apache.camel.model.ModelCamelContext;
import org.apache.camel.model.ModelHelper;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.spi.InflightRepository;
import org.apache.camel.spi.ManagementStrategy;
import org.apache.camel.spi.RouteError;
import org.apache.camel.spi.RoutePolicy;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.XmlLineNumberParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ManagedResource(description = "Managed Route")
public class ManagedRoute extends ManagedPerformanceCounter implements TimerListener, ManagedRouteMBean {
public static final String VALUE_UNKNOWN = "Unknown";
private static final Logger LOG = LoggerFactory.getLogger(ManagedRoute.class);
protected final Route route;
protected final String description;
protected final ModelCamelContext context;
private final LoadTriplet load = new LoadTriplet();
private final String jmxDomain;
public ManagedRoute(ModelCamelContext context, Route route) {
this.route = route;
this.context = context;
this.description = route.getDescription();
this.jmxDomain = context.getManagementStrategy().getManagementAgent().getMBeanObjectDomainName();
}
@Override
public void init(ManagementStrategy strategy) {
super.init(strategy);
boolean enabled = context.getManagementStrategy().getManagementAgent().getStatisticsLevel() != ManagementStatisticsLevel.Off;
setStatisticsEnabled(enabled);
}
public Route getRoute() {
return route;
}
public CamelContext getContext() {
return context;
}
public String getRouteId() {
String id = route.getId();
if (id == null) {
id = VALUE_UNKNOWN;
}
return id;
}
public String getRouteGroup() {
return route.getGroup();
}
@Override
public TabularData getRouteProperties() {
try {
final Map<String, Object> properties = route.getProperties();
final TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.camelRoutePropertiesTabularType());
final CompositeType ct = CamelOpenMBeanTypes.camelRoutePropertiesCompositeType();
// gather route properties
for (Map.Entry<String, Object> entry : properties.entrySet()) {
final String key = entry.getKey();
final String val = context.getTypeConverter().convertTo(String.class, entry.getValue());
CompositeData data = new CompositeDataSupport(
ct,
new String[]{"key", "value"},
new Object[]{key, val}
);
answer.put(data);
}
return answer;
} catch (Exception e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
}
public String getDescription() {
return description;
}
@Override
public String getEndpointUri() {
if (route.getEndpoint() != null) {
return route.getEndpoint().getEndpointUri();
}
return VALUE_UNKNOWN;
}
public String getState() {
// must use String type to be sure remote JMX can read the attribute without requiring Camel classes.
ServiceStatus status = context.getRouteStatus(route.getId());
// if no status exists then its stopped
if (status == null) {
status = ServiceStatus.Stopped;
}
return status.name();
}
public String getUptime() {
return route.getUptime();
}
public long getUptimeMillis() {
return route.getUptimeMillis();
}
public Integer getInflightExchanges() {
return (int) super.getExchangesInflight();
}
public String getCamelId() {
return context.getName();
}
public String getCamelManagementName() {
return context.getManagementName();
}
public Boolean getTracing() {
return route.getRouteContext().isTracing();
}
public void setTracing(Boolean tracing) {
route.getRouteContext().setTracing(tracing);
}
public Boolean getMessageHistory() {
return route.getRouteContext().isMessageHistory();
}
public Boolean getLogMask() {
return route.getRouteContext().isLogMask();
}
public String getRoutePolicyList() {
List<RoutePolicy> policyList = route.getRouteContext().getRoutePolicyList();
if (policyList == null || policyList.isEmpty()) {
// return an empty string to have it displayed nicely in JMX consoles
return "";
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < policyList.size(); i++) {
RoutePolicy policy = policyList.get(i);
sb.append(policy.getClass().getSimpleName());
sb.append("(").append(ObjectHelper.getIdentityHashCode(policy)).append(")");
if (i < policyList.size() - 1) {
sb.append(", ");
}
}
return sb.toString();
}
public String getLoad01() {
double load1 = load.getLoad1();
if (Double.isNaN(load1)) {
// empty string if load statistics is disabled
return "";
} else {
return String.format("%.2f", load1);
}
}
public String getLoad05() {
double load5 = load.getLoad5();
if (Double.isNaN(load5)) {
// empty string if load statistics is disabled
return "";
} else {
return String.format("%.2f", load5);
}
}
public String getLoad15() {
double load15 = load.getLoad15();
if (Double.isNaN(load15)) {
// empty string if load statistics is disabled
return "";
} else {
return String.format("%.2f", load15);
}
}
@Override
public void onTimer() {
load.update(getInflightExchanges());
}
public void start() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
context.getRouteController().startRoute(getRouteId());
}
public void stop() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
context.getRouteController().stopRoute(getRouteId());
}
public void stop(long timeout) throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
context.getRouteController().stopRoute(getRouteId(), timeout, TimeUnit.SECONDS);
}
public boolean stop(Long timeout, Boolean abortAfterTimeout) throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
return context.getRouteController().stopRoute(getRouteId(), timeout, TimeUnit.SECONDS, abortAfterTimeout);
}
public void shutdown() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
String routeId = getRouteId();
context.stopRoute(routeId);
context.removeRoute(routeId);
}
public void shutdown(long timeout) throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
String routeId = getRouteId();
context.stopRoute(routeId, timeout, TimeUnit.SECONDS);
context.removeRoute(routeId);
}
public boolean remove() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
return context.removeRoute(getRouteId());
}
@Override
public void restart() throws Exception {
restart(1);
}
@Override
public void restart(long delay) throws Exception {
stop();
if (delay > 0) {
try {
LOG.debug("Sleeping {} seconds before starting route: {}", delay, getRouteId());
Thread.sleep(delay * 1000);
} catch (InterruptedException e) {
// ignore
}
}
start();
}
public String dumpRouteAsXml() throws Exception {
return dumpRouteAsXml(false);
}
@Override
public String dumpRouteAsXml(boolean resolvePlaceholders) throws Exception {
String id = route.getId();
RouteDefinition def = context.getRouteDefinition(id);
if (def != null) {
String xml = ModelHelper.dumpModelAsXml(context, def);
// if resolving placeholders we parse the xml, and resolve the property placeholders during parsing
if (resolvePlaceholders) {
final AtomicBoolean changed = new AtomicBoolean();
InputStream is = new ByteArrayInputStream(xml.getBytes("UTF-8"));
Document dom = XmlLineNumberParser.parseXml(is, new XmlLineNumberParser.XmlTextTransformer() {
@Override
public String transform(String text) {
try {
String after = getContext().resolvePropertyPlaceholders(text);
if (!changed.get()) {
changed.set(!text.equals(after));
}
return after;
} catch (Exception e) {
// ignore
return text;
}
}
});
// okay there were some property placeholder replaced so re-create the model
if (changed.get()) {
xml = context.getTypeConverter().mandatoryConvertTo(String.class, dom);
RouteDefinition copy = ModelHelper.createModelFromXml(context, xml, RouteDefinition.class);
xml = ModelHelper.dumpModelAsXml(context, copy);
}
}
return xml;
}
return null;
}
public void updateRouteFromXml(String xml) throws Exception {
// convert to model from xml
RouteDefinition def = ModelHelper.createModelFromXml(context, xml, RouteDefinition.class);
if (def == null) {
return;
}
// if the xml does not contain the route-id then we fix this by adding the actual route id
// this may be needed if the route-id was auto-generated, as the intend is to update this route
// and not add a new route, adding a new route, use the MBean operation on ManagedCamelContext instead.
if (ObjectHelper.isEmpty(def.getId())) {
def.setId(getRouteId());
} else if (!def.getId().equals(getRouteId())) {
throw new IllegalArgumentException("Cannot update route from XML as routeIds does not match. routeId: "
+ getRouteId() + ", routeId from XML: " + def.getId());
}
LOG.debug("Updating route: {} from xml: {}", def.getId(), xml);
try {
// add will remove existing route first
context.addRouteDefinition(def);
} catch (Exception e) {
// log the error as warn as the management api may be invoked remotely over JMX which does not propagate such exception
String msg = "Error updating route: " + def.getId() + " from xml: " + xml + " due: " + e.getMessage();
LOG.warn(msg, e);
throw e;
}
}
public String dumpRouteStatsAsXml(boolean fullStats, boolean includeProcessors) throws Exception {
// in this logic we need to calculate the accumulated processing time for the processor in the route
// and hence why the logic is a bit more complicated to do this, as we need to calculate that from
// the bottom -> top of the route but this information is valuable for profiling routes
StringBuilder sb = new StringBuilder();
// need to calculate this value first, as we need that value for the route stat
Long processorAccumulatedTime = 0L;
// gather all the processors for this route, which requires JMX
if (includeProcessors) {
sb.append(" <processorStats>\n");
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
// get all the processor mbeans and sort them accordingly to their index
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
ObjectName query = ObjectName.getInstance(jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=processors,*");
Set<ObjectName> names = server.queryNames(query, null);
List<ManagedProcessorMBean> mps = new ArrayList<>();
for (ObjectName on : names) {
ManagedProcessorMBean processor = context.getManagementStrategy().getManagementAgent().newProxyClient(on, ManagedProcessorMBean.class);
// the processor must belong to this route
if (getRouteId().equals(processor.getRouteId())) {
mps.add(processor);
}
}
mps.sort(new OrderProcessorMBeans());
// walk the processors in reverse order, and calculate the accumulated total time
Map<String, Long> accumulatedTimes = new HashMap<>();
Collections.reverse(mps);
for (ManagedProcessorMBean processor : mps) {
processorAccumulatedTime += processor.getTotalProcessingTime();
accumulatedTimes.put(processor.getProcessorId(), processorAccumulatedTime);
}
// and reverse back again
Collections.reverse(mps);
// and now add the sorted list of processors to the xml output
for (ManagedProcessorMBean processor : mps) {
sb.append(" <processorStat").append(String.format(" id=\"%s\" index=\"%s\" state=\"%s\"", processor.getProcessorId(), processor.getIndex(), processor.getState()));
// do we have an accumulated time then append that
Long accTime = accumulatedTimes.get(processor.getProcessorId());
if (accTime != null) {
sb.append(" accumulatedProcessingTime=\"").append(accTime).append("\"");
}
// use substring as we only want the attributes
sb.append(" ").append(processor.dumpStatsAsXml(fullStats).substring(7)).append("\n");
}
}
sb.append(" </processorStats>\n");
}
// route self time is route total - processor accumulated total)
long routeSelfTime = getTotalProcessingTime() - processorAccumulatedTime;
if (routeSelfTime < 0) {
// ensure we don't calculate that as negative
routeSelfTime = 0;
}
StringBuilder answer = new StringBuilder();
answer.append("<routeStat").append(String.format(" id=\"%s\"", route.getId())).append(String.format(" state=\"%s\"", getState()));
// use substring as we only want the attributes
String stat = dumpStatsAsXml(fullStats);
answer.append(" exchangesInflight=\"").append(getInflightExchanges()).append("\"");
answer.append(" selfProcessingTime=\"").append(routeSelfTime).append("\"");
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest == null) {
answer.append(" oldestInflightExchangeId=\"\"");
answer.append(" oldestInflightDuration=\"\"");
} else {
answer.append(" oldestInflightExchangeId=\"").append(oldest.getExchange().getExchangeId()).append("\"");
answer.append(" oldestInflightDuration=\"").append(oldest.getDuration()).append("\"");
}
answer.append(" ").append(stat.substring(7, stat.length() - 2)).append(">\n");
if (includeProcessors) {
answer.append(sb);
}
answer.append("</routeStat>");
return answer.toString();
}
public void reset(boolean includeProcessors) throws Exception {
reset();
// and now reset all processors for this route
if (includeProcessors) {
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
// get all the processor mbeans and sort them accordingly to their index
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
ObjectName query = ObjectName.getInstance(jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=processors,*");
QueryExp queryExp = Query.match(new AttributeValueExp("RouteId"), new StringValueExp(getRouteId()));
Set<ObjectName> names = server.queryNames(query, queryExp);
for (ObjectName name : names) {
server.invoke(name, "reset", null, null);
}
}
}
}
public String createRouteStaticEndpointJson() {
return getContext().createRouteStaticEndpointJson(getRouteId());
}
@Override
public String createRouteStaticEndpointJson(boolean includeDynamic) {
return getContext().createRouteStaticEndpointJson(getRouteId(), includeDynamic);
}
@Override
public boolean equals(Object o) {
return this == o || (o != null && getClass() == o.getClass() && route.equals(((ManagedRoute) o).route));
}
@Override
public int hashCode() {
return route.hashCode();
}
private InflightRepository.InflightExchange getOldestInflightEntry() {
return getContext().getInflightRepository().oldest(getRouteId());
}
public Long getOldestInflightDuration() {
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest == null) {
return null;
} else {
return oldest.getDuration();
}
}
public String getOldestInflightExchangeId() {
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest == null) {
return null;
} else {
return oldest.getExchange().getExchangeId();
}
}
@Override
public Boolean getHasRouteController() {
return route.getRouteContext().getRouteController() != null;
}
@Override
public RouteError getLastError() {
return route.getRouteContext().getLastError();
}
/**
* Used for sorting the processor mbeans accordingly to their index.
*/
private static final class OrderProcessorMBeans implements Comparator<ManagedProcessorMBean> {
@Override
public int compare(ManagedProcessorMBean o1, ManagedProcessorMBean o2) {
return o1.getIndex().compareTo(o2.getIndex());
}
}
}
| |
/*
* CombineGenotypeTable
*/
package net.maizegenetics.dna.snp;
import net.maizegenetics.dna.snp.bit.BitStorage;
import net.maizegenetics.dna.snp.depth.AlleleDepth;
import net.maizegenetics.dna.snp.genotypecall.GenotypeCallTable;
import net.maizegenetics.dna.map.Chromosome;
import net.maizegenetics.dna.map.PositionList;
import net.maizegenetics.taxa.TaxaList;
import net.maizegenetics.taxa.TaxaListUtils;
import net.maizegenetics.util.BitSet;
import java.util.*;
/**
* Combines multiple GenotypeTables together.
*
* @author Terry Casstevens
*/
public class CombineGenotypeTable implements GenotypeTable {
private static final long serialVersionUID = -5197800047652332969L;
private final GenotypeTable[] myAlignments;
private final int[] mySiteOffsets;
private final Map<Chromosome, GenotypeTable> myChromosomes = new HashMap<>();
private Chromosome[] myChromosomesList;
private int[] myChromosomesOffsets;
private final TaxaList myTaxaList;
private String[][] myAlleleStates;
private CombineGenotypeTable(TaxaList taxaList, GenotypeTable[] genoTables) {
myTaxaList = taxaList;
myAlignments = genoTables;
mySiteOffsets = new int[genoTables.length + 1];
mySiteOffsets[0] = 0;
int count = 0;
for (int i = 0; i < genoTables.length; i++) {
count = genoTables[i].numberOfSites() + count;
mySiteOffsets[i + 1] = count;
Chromosome[] chromosomes = genoTables[i].chromosomes();
for (int j = 0; j < chromosomes.length; j++) {
myChromosomes.put(chromosomes[j], genoTables[i]);
}
}
initChromosomes();
}
/**
* This factory method combines given genoTables. If only one genotypeTable,
* then it is returned unchanged. Otherwise, this requires that each
* genotypeTable has the same Taxa in the same order.
*
* @param genoTables
* @return
*/
public static GenotypeTable getInstance(GenotypeTable[] genoTables) {
if ((genoTables == null) || (genoTables.length == 0)) {
throw new IllegalArgumentException("CombineAlignment: getInstance: must provide genoTables.");
}
if (genoTables.length == 1) {
return genoTables[0];
}
TaxaList firstGroup = genoTables[0].taxa();
for (int i = 1; i < genoTables.length; i++) {
if (!areTaxaListsEqual(firstGroup, genoTables[i].taxa())) {
throw new IllegalArgumentException("CombineAlignment: getInstance: TaxaLists do not match.");
}
}
return new CombineGenotypeTable(firstGroup, genoTables);
}
/**
* This factory method combines given genoTables. If only one genotypeTable,
* then it is returned unchanged. If isUnion equals true, a union join of
* the Identifiers will be used to construct the combination. Any genotypeTable
* not containing one of the Identifiers will return unknown value for those
* locations. If isUnion equals false, a intersect join of the Identifiers
* will be used.
*
* @param genoTables genoTables to combine
* @param isUnion whether to union or intersect join
* @return
*/
public static GenotypeTable getInstance(GenotypeTable[] genoTables, boolean isUnion) {
if ((genoTables == null) || (genoTables.length == 0)) {
throw new IllegalArgumentException("CombineAlignment: getInstance: must provide genoTables.");
}
if (genoTables.length == 1) {
return genoTables[0];
}
TaxaList[] groups = new TaxaList[genoTables.length];
for (int i = 0; i < genoTables.length; i++) {
groups[i] = genoTables[i].taxa();
}
TaxaList newTaxa = null;
if (isUnion) {
newTaxa = TaxaListUtils.getAllTaxa(groups);
} else {
newTaxa = TaxaListUtils.getCommonTaxa(groups);
}
GenotypeTable[] newAlignmentNews = new GenotypeTable[genoTables.length];
for (int i = 0; i < genoTables.length; i++) {
newAlignmentNews[i] = FilterGenotypeTable.getInstance(genoTables[i], newTaxa);
}
return new CombineGenotypeTable(newTaxa, newAlignmentNews);
}
private static boolean areTaxaListsEqual(TaxaList first, TaxaList second) {
if (first.numberOfTaxa() != second.numberOfTaxa()) {
return false;
}
for (int i = 0, n = first.numberOfTaxa(); i < n; i++) {
if (!first.get(i).equals(second.get(i))) {
return false;
}
}
return true;
}
private void initChromosomes() {
List<Integer> offsets = new ArrayList<>();
List<Chromosome> chromosomes = new ArrayList<>();
for (int i = 0; i < myAlignments.length; i++) {
chromosomes.addAll(Arrays.asList(myAlignments[i].chromosomes()));
int[] tempOffsets = myAlignments[i].chromosomesOffsets();
for (int j = 0; j < tempOffsets.length; j++) {
offsets.add(tempOffsets[j] + mySiteOffsets[i]);
}
}
myChromosomesList = new Chromosome[chromosomes.size()];
myChromosomesList = chromosomes.toArray(myChromosomesList);
myChromosomesOffsets = new int[offsets.size()];
for (int i = 0; i < offsets.size(); i++) {
myChromosomesOffsets[i] = (Integer) offsets.get(i);
}
if (myChromosomesOffsets.length != myChromosomesList.length) {
throw new IllegalStateException("CombineAlignment: initChromosomes: number chromosomes offsets should equal number of chromosomes.");
}
}
public byte genotype(int taxon, int site) {
int translate = translateSite(site);
return myAlignments[translate].genotype(taxon, site - mySiteOffsets[translate]);
}
@Override
public byte[] genotypeRange(int taxon, int startSite, int endSite) {
byte[] result = new byte[endSite - startSite];
int count = 0;
int firstAlign = translateSite(startSite);
int secondAlign = translateSite(endSite);
for (int i = firstAlign; i <= secondAlign; i++) {
int firstSite = 0;
if (i == firstAlign) {
firstSite = startSite - mySiteOffsets[firstAlign];
}
int secondSite = 0;
if (firstAlign == secondAlign) {
secondSite = endSite - mySiteOffsets[firstAlign];
} else if (i != secondAlign) {
secondSite = myAlignments[i].numberOfSites();
} else {
secondSite = endSite - mySiteOffsets[secondAlign];
}
for (int s = firstSite; s < secondSite; s++) {
result[count++] = myAlignments[i].genotype(taxon, s);
}
}
return result;
}
@Override
public byte genotype(int taxon, Chromosome locus, int physicalPosition) {
int site = siteOfPhysicalPosition(physicalPosition, locus);
int translate = translateSite(site);
return myAlignments[translate].genotype(taxon, site - mySiteOffsets[translate]);
}
/**
* Returns which genotypeTable to use.
*
* @param site
* @return genotypeTable index.
*/
public int translateSite(int site) {
for (int i = 1; i < mySiteOffsets.length; i++) {
if (mySiteOffsets[i] > site) {
return i - 1;
}
}
throw new IndexOutOfBoundsException("CombineAlignment: translateSite: index out of range: " + site);
}
@Override
public boolean hasReference() {
for (int i = 0; i < myAlignments.length; i++) {
if (!myAlignments[i].hasReference()) {
return false;
}
}
return true;
}
@Override
public String siteName(int site) {
int translate = translateSite(site);
return myAlignments[translate].siteName(site - mySiteOffsets[translate]);
}
@Override
public int numberOfSites() {
return mySiteOffsets[mySiteOffsets.length - 1];
}
@Override
public int chromosomeSiteCount(Chromosome locus) {
return ((GenotypeTable) myChromosomes.get(locus)).chromosomeSiteCount(locus);
}
@Override
public int chromosomalPosition(int site) {
int translate = translateSite(site);
return myAlignments[translate].chromosomalPosition(site - mySiteOffsets[translate]);
}
@Override
public int siteOfPhysicalPosition(int physicalPosition, Chromosome locus) {
GenotypeTable align = ((GenotypeTable) myChromosomes.get(locus));
int i = -1;
for (int j = 0; j < myAlignments.length; j++) {
if (myAlignments[j] == align) {
i = j;
break;
}
}
if (i == -1) {
return -1;
}
return mySiteOffsets[i] + align.siteOfPhysicalPosition(physicalPosition, locus);
}
@Override
public int siteOfPhysicalPosition(int physicalPosition, Chromosome locus, String snpName) {
GenotypeTable align = ((GenotypeTable) myChromosomes.get(locus));
int i = -1;
for (int j = 0; j < myAlignments.length; j++) {
if (myAlignments[j] == align) {
i = j;
break;
}
}
if (i == -1) {
return -1;
}
return mySiteOffsets[i] + align.siteOfPhysicalPosition(physicalPosition, locus, snpName);
}
@Override
public Chromosome chromosome(int site) {
int translate = translateSite(site);
return myAlignments[translate].chromosome(site - mySiteOffsets[translate]);
}
@Override
public Chromosome[] chromosomes() {
return myChromosomesList;
}
@Override
public int numChromosomes() {
if (myChromosomesList == null) {
return 0;
} else {
return myChromosomesList.length;
}
}
@Override
public float[][] siteScores() {
if (!hasSiteScores()) {
return null;
}
int numSeqs = numberOfTaxa();
float[][] result = new float[numSeqs][numberOfSites()];
for (int a = 0, n = myAlignments.length; a < n; a++) {
if (myAlignments[a].hasSiteScores()) {
for (int s = 0, m = myAlignments[a].numberOfSites(); s < m; s++) {
for (int t = 0; t < numSeqs; t++) {
result[t][mySiteOffsets[a] + s] = myAlignments[a].siteScore(t, s);
}
}
}
}
return result;
}
@Override
public float siteScore(int taxon, int site) {
int translate = translateSite(site);
return myAlignments[translate].siteScore(taxon, site - mySiteOffsets[translate]);
}
@Override
public boolean hasSiteScores() {
for (GenotypeTable align : myAlignments) {
if (align.hasSiteScores()) {
return true;
}
}
return false;
}
@Override
public int indelSize(int site) {
int translate = translateSite(site);
return myAlignments[translate].indelSize(site - mySiteOffsets[translate]);
}
@Override
public boolean isIndel(int site) {
int translate = translateSite(site);
return myAlignments[translate].isIndel(site - mySiteOffsets[translate]);
}
@Override
public byte referenceAllele(int site) {
int translate = translateSite(site);
return myAlignments[translate].referenceAllele(site - mySiteOffsets[translate]);
}
@Override
public GenotypeTable[] compositeAlignments() {
return myAlignments;
}
@Override
public byte majorAllele(int site) {
int translate = translateSite(site);
return myAlignments[translate].majorAllele(site - mySiteOffsets[translate]);
}
@Override
public byte minorAllele(int site) {
int translate = translateSite(site);
return myAlignments[translate].minorAllele(site - mySiteOffsets[translate]);
}
@Override
public byte[] minorAlleles(int site) {
int translate = translateSite(site);
return myAlignments[translate].minorAlleles(site - mySiteOffsets[translate]);
}
@Override
public byte[] alleles(int site) {
int translate = translateSite(site);
return myAlignments[translate].alleles(site - mySiteOffsets[translate]);
}
@Override
public double minorAlleleFrequency(int site) {
int translate = translateSite(site);
return myAlignments[translate].minorAlleleFrequency(site - mySiteOffsets[translate]);
}
@Override
public int[][] allelesSortedByFrequency(int site) {
int translate = translateSite(site);
return myAlignments[translate].allelesSortedByFrequency(site - mySiteOffsets[translate]);
}
@Override
public byte[] genotypeArray(int taxon, int site) {
int translate = translateSite(site);
return myAlignments[translate].genotypeArray(taxon, site - mySiteOffsets[translate]);
}
@Override
public byte[] genotypeAllTaxa(int site) {
byte[] result = new byte[numberOfTaxa()];
int offset=0;
for (int i = 0; i < myAlignments.length; i++) {
byte[] current = myAlignments[i].genotypeAllTaxa(site);
System.arraycopy(current, 0, result, offset, current.length);
offset+=current.length;
}
return result;
}
@Override
public byte[] genotypeAllSites(int taxon) {
byte[] result = new byte[numberOfSites()];
for (int i = 0; i < myAlignments.length; i++) {
byte[] current = myAlignments[i].genotypeAllSites(taxon);
System.arraycopy(current, 0, result, myChromosomesOffsets[i], current.length);
}
return result;
}
@Override
public BitSet allelePresenceForAllSites(int taxon, WHICH_ALLELE allele) {
throw new UnsupportedOperationException("CombineAlignment: getAllelePresenceForAllSites: This operation isn't possible as it spans multiple GenotypeTables.");
}
@Override
public long[] allelePresenceForSitesBlock(int taxon, WHICH_ALLELE allele, int startBlock, int endBlock) {
throw new UnsupportedOperationException("CombineAlignment: getAllelePresenceForSitesBlock: This operation isn't possible as it spans multiple GenotypeTables.");
}
@Override
public String genotypeAsString(int taxon, int site) {
int translate = translateSite(site);
return myAlignments[translate].genotypeAsString(taxon, site - mySiteOffsets[translate]);
}
@Override
public String[] genotypeAsStringArray(int taxon, int site) {
int translate = translateSite(site);
return myAlignments[translate].genotypeAsStringArray(taxon, site - mySiteOffsets[translate]);
}
@Override
public byte[] referenceAlleles(int startSite, int endSite) {
int numSites = endSite - startSite;
byte[] result = new byte[numSites];
for (int i = 0; i < numSites; i++) {
result[i] = referenceAllele(startSite + i);
}
return result;
}
@Override
public byte[] referenceAlleleForAllSites() {
for (int i = 0; i < myAlignments.length; i++) {
if (!myAlignments[i].hasReference()) {
return null;
}
}
byte[] result = new byte[numberOfSites()];
int count = 0;
for (int i = 0; i < myAlignments.length; i++) {
byte[] current = myAlignments[i].referenceAlleleForAllSites();
for (int j = 0; j < current.length; j++) {
result[count++] = current[j];
}
}
return result;
}
@Override
public boolean isHeterozygous(int taxon, int site) {
int translate = translateSite(site);
return myAlignments[translate].isHeterozygous(taxon, site - mySiteOffsets[translate]);
}
@Override
public int[] physicalPositions() {
boolean allNull = true;
for (int i = 0; i < myAlignments.length; i++) {
int[] current = myAlignments[0].physicalPositions();
if ((current != null) && (current.length != 0)) {
allNull = false;
break;
}
}
if (allNull) {
return null;
} else {
int[] result = new int[numberOfSites()];
int count = 0;
for (int i = 0; i < myAlignments.length; i++) {
int[] current = myAlignments[i].physicalPositions();
for (int j = 0; j < current.length; j++) {
result[count++] = current[j];
}
}
return result;
}
}
@Override
public String chromosomeName(int site) {
int translate = translateSite(site);
return myAlignments[translate].chromosomeName(site - mySiteOffsets[translate]);
}
@Override
public int[] chromosomesOffsets() {
return myChromosomesOffsets;
}
@Override
public SITE_SCORE_TYPE siteScoreType() {
SITE_SCORE_TYPE first = myAlignments[0].siteScoreType();
for (int i = 1; i < myAlignments.length; i++) {
if (first != myAlignments[i].siteScoreType()) {
return SITE_SCORE_TYPE.MixedScoreTypes;
}
}
return first;
}
@Override
public boolean isAllPolymorphic() {
for (int i = 0; i < myAlignments.length; i++) {
if (!myAlignments[i].isAllPolymorphic()) {
return false;
}
}
return true;
}
@Override
public boolean isPolymorphic(int site) {
int translate = translateSite(site);
return myAlignments[translate].isPolymorphic(site - mySiteOffsets[translate]);
}
@Override
public double majorAlleleFrequency(int site) {
int translate = translateSite(site);
return myAlignments[translate].majorAlleleFrequency(site - mySiteOffsets[translate]);
}
@Override
public String genomeVersion() {
String first = myAlignments[0].genomeVersion();
if (first == null) {
return null;
}
for (int i = 1; i < myAlignments.length; i++) {
String current = myAlignments[i].genomeVersion();
if ((current != null) && (!first.equals(current))) {
return null;
}
}
return first;
}
@Override
public boolean isPositiveStrand(int site) {
int translate = translateSite(site);
return myAlignments[translate].isPositiveStrand(site - mySiteOffsets[translate]);
}
@Override
public boolean isPhased() {
for (int i = 0; i < myAlignments.length; i++) {
if (myAlignments[i].isPhased() == false) {
return false;
}
}
return true;
}
@Override
public boolean retainsRareAlleles() {
for (int i = 0; i < myAlignments.length; i++) {
if (myAlignments[i].retainsRareAlleles() == false) {
return false;
}
}
return true;
}
@Override
public String[][] alleleDefinitions() {
if (myAlleleStates != null) {
return myAlleleStates;
}
boolean allTheSame = true;
String[][] encodings = myAlignments[0].alleleDefinitions();
if (encodings.length == 1) {
for (int i = 1; i < myAlignments.length; i++) {
String[][] current = myAlignments[i].alleleDefinitions();
if ((current.length == 1) && (encodings[0].length == current[0].length)) {
for (int j = 0; j < encodings[0].length; j++) {
if (!current[0][j].equals(encodings[0][j])) {
allTheSame = false;
break;
}
}
} else {
allTheSame = false;
break;
}
if (!allTheSame) {
break;
}
}
} else {
allTheSame = false;
}
if (allTheSame) {
myAlleleStates = encodings;
} else {
String[][] result = new String[numberOfSites()][];
int count = 0;
for (int i = 0; i < myAlignments.length; i++) {
for (int j = 0, n = myAlignments[i].numberOfSites(); j < n; j++) {
result[count++] = myAlignments[i].alleleDefinitions(j);
}
}
myAlleleStates = result;
}
return myAlleleStates;
}
@Override
public String[] alleleDefinitions(int site) {
int translate = translateSite(site);
return myAlignments[translate].alleleDefinitions(site - mySiteOffsets[translate]);
}
@Override
public String genotypeAsString(int site, byte value) {
int translate = translateSite(site);
return myAlignments[translate].genotypeAsString(site - mySiteOffsets[translate], value);
}
@Override
public int maxNumAlleles() {
int result = 999999;
for (int i = 0; i < myAlignments.length; i++) {
if (myAlignments[i].maxNumAlleles() < result) {
result = myAlignments[i].maxNumAlleles();
}
}
return result;
}
@Override
public int totalGametesNonMissingForSite(int site) {
int translate = translateSite(site);
return myAlignments[translate].totalGametesNonMissingForSite(site - mySiteOffsets[translate]);
}
@Override
public int heterozygousCount(int site) {
int translate = translateSite(site);
return myAlignments[translate].heterozygousCount(site - mySiteOffsets[translate]);
}
@Override
public int minorAlleleCount(int site) {
int translate = translateSite(site);
return myAlignments[translate].minorAlleleCount(site - mySiteOffsets[translate]);
}
@Override
public int majorAlleleCount(int site) {
int translate = translateSite(site);
return myAlignments[translate].majorAlleleCount(site - mySiteOffsets[translate]);
}
@Override
public Object[][] genosSortedByFrequency(int site) {
int translate = translateSite(site);
return myAlignments[translate].genosSortedByFrequency(site - mySiteOffsets[translate]);
}
@Override
public byte[] allelesBySortType(ALLELE_SORT_TYPE scope, int site) {
int translate = translateSite(site);
return myAlignments[translate].allelesBySortType(scope, site - mySiteOffsets[translate]);
}
@Override
public BitSet allelePresenceForAllTaxa(int site, WHICH_ALLELE allele) {
int translate = translateSite(site);
return myAlignments[translate].allelePresenceForAllTaxa(site - mySiteOffsets[translate], allele);
}
@Override
public BitSet haplotypeAllelePresenceForAllSites(int taxon, boolean firstParent, WHICH_ALLELE allele) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public BitSet haplotypeAllelePresenceForAllTaxa(int site, boolean firstParent, WHICH_ALLELE allele) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public long[] haplotypeAllelePresenceForSitesBlock(int taxon, boolean firstParent, WHICH_ALLELE allele, int startBlock, int endBlock) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String genotypeAsStringRange(int taxon, int startSite, int endSite) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String genotypeAsStringRow(int taxon) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int[] firstLastSiteOfChromosome(Chromosome chromosome) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int numberOfTaxa() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Chromosome chromosome(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String majorAlleleAsString(int site) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String minorAlleleAsString(int site) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public TaxaList taxa() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String taxaName(int index) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String diploidAsString(int site, byte value) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int totalNonMissingForSite(int site) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Object[][] genoCounts() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Object[][] majorMinorCounts() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int totalGametesNonMissingForTaxon(int taxon) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int heterozygousCountForTaxon(int taxon) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int totalNonMissingForTaxon(int taxon) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean hasDepth() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public AlleleDepth depth() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int[] depthForAlleles(int taxon, int site) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public BitStorage bitStorage(WHICH_ALLELE allele) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public PositionList positions() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public GenotypeCallTable genotypeMatrix() {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| |
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.uikit;
import apple.NSObject;
import apple.coregraphics.struct.CGRect;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSDate;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.uikit.protocol.UIAppearanceContainer;
import apple.uikit.protocol.UIContentConfiguration;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.c.ann.Variadic;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.ByValue;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NFloat;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("UIKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class UICollectionViewCell extends UICollectionReusableView {
static {
NatJ.register();
}
@Generated
protected UICollectionViewCell(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Selector("addKeyframeWithRelativeStartTime:relativeDuration:animations:")
public static native void addKeyframeWithRelativeStartTimeRelativeDurationAnimations(double frameStartTime,
double frameDuration,
@ObjCBlock(name = "call_addKeyframeWithRelativeStartTimeRelativeDurationAnimations") UIView.Block_addKeyframeWithRelativeStartTimeRelativeDurationAnimations animations);
@Generated
@Owned
@Selector("alloc")
public static native UICollectionViewCell alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native UICollectionViewCell allocWithZone(VoidPtr zone);
@Generated
@Selector("animateKeyframesWithDuration:delay:options:animations:completion:")
public static native void animateKeyframesWithDurationDelayOptionsAnimationsCompletion(double duration,
double delay, @NUInt long options,
@ObjCBlock(name = "call_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_3") UIView.Block_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_3 animations,
@ObjCBlock(name = "call_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_4") UIView.Block_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("animateWithDuration:animations:")
public static native void animateWithDurationAnimations(double duration,
@ObjCBlock(name = "call_animateWithDurationAnimations") UIView.Block_animateWithDurationAnimations animations);
@Generated
@Selector("animateWithDuration:animations:completion:")
public static native void animateWithDurationAnimationsCompletion(double duration,
@ObjCBlock(name = "call_animateWithDurationAnimationsCompletion_1") UIView.Block_animateWithDurationAnimationsCompletion_1 animations,
@ObjCBlock(name = "call_animateWithDurationAnimationsCompletion_2") UIView.Block_animateWithDurationAnimationsCompletion_2 completion);
@Generated
@Selector("animateWithDuration:delay:options:animations:completion:")
public static native void animateWithDurationDelayOptionsAnimationsCompletion(double duration, double delay,
@NUInt long options,
@ObjCBlock(name = "call_animateWithDurationDelayOptionsAnimationsCompletion_3") UIView.Block_animateWithDurationDelayOptionsAnimationsCompletion_3 animations,
@ObjCBlock(name = "call_animateWithDurationDelayOptionsAnimationsCompletion_4") UIView.Block_animateWithDurationDelayOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("animateWithDuration:delay:usingSpringWithDamping:initialSpringVelocity:options:animations:completion:")
public static native void animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion(
double duration, double delay, @NFloat double dampingRatio, @NFloat double velocity, @NUInt long options,
@ObjCBlock(name = "call_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_5") UIView.Block_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_5 animations,
@ObjCBlock(name = "call_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_6") UIView.Block_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_6 completion);
@Generated
@Selector("appearance")
public static native UICollectionViewCell appearance();
@Generated
@Selector("appearanceForTraitCollection:")
public static native UICollectionViewCell appearanceForTraitCollection(UITraitCollection trait);
@Generated
@Variadic()
@Deprecated
@Selector("appearanceForTraitCollection:whenContainedIn:")
public static native UICollectionViewCell appearanceForTraitCollectionWhenContainedIn(UITraitCollection trait,
@Mapped(ObjCObjectMapper.class) UIAppearanceContainer ContainerClass, Object... varargs);
@Generated
@Selector("appearanceForTraitCollection:whenContainedInInstancesOfClasses:")
public static native UICollectionViewCell appearanceForTraitCollectionWhenContainedInInstancesOfClasses(
UITraitCollection trait, NSArray<?> containerTypes);
@Generated
@Variadic()
@Deprecated
@Selector("appearanceWhenContainedIn:")
public static native UICollectionViewCell appearanceWhenContainedIn(
@Mapped(ObjCObjectMapper.class) UIAppearanceContainer ContainerClass, Object... varargs);
@Generated
@Selector("appearanceWhenContainedInInstancesOfClasses:")
public static native UICollectionViewCell appearanceWhenContainedInInstancesOfClasses(NSArray<?> containerTypes);
@Generated
@Selector("areAnimationsEnabled")
public static native boolean areAnimationsEnabled();
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("beginAnimations:context:")
public static native void beginAnimationsContext(String animationID, VoidPtr context);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("clearTextInputContextIdentifier:")
public static native void clearTextInputContextIdentifier(String identifier);
@Generated
@Selector("commitAnimations")
public static native void commitAnimations();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("inheritedAnimationDuration")
public static native double inheritedAnimationDuration();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Selector("layerClass")
public static native Class layerClass();
@Generated
@Owned
@Selector("new")
public static native UICollectionViewCell new_objc();
@Generated
@Selector("performSystemAnimation:onViews:options:animations:completion:")
public static native void performSystemAnimationOnViewsOptionsAnimationsCompletion(@NUInt long animation,
NSArray<? extends UIView> views, @NUInt long options,
@ObjCBlock(name = "call_performSystemAnimationOnViewsOptionsAnimationsCompletion_3") UIView.Block_performSystemAnimationOnViewsOptionsAnimationsCompletion_3 parallelAnimations,
@ObjCBlock(name = "call_performSystemAnimationOnViewsOptionsAnimationsCompletion_4") UIView.Block_performSystemAnimationOnViewsOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("performWithoutAnimation:")
public static native void performWithoutAnimation(
@ObjCBlock(name = "call_performWithoutAnimation") UIView.Block_performWithoutAnimation actionsWithoutAnimation);
@Generated
@Selector("requiresConstraintBasedLayout")
public static native boolean requiresConstraintBasedLayout();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setAnimationBeginsFromCurrentState:")
public static native void setAnimationBeginsFromCurrentState(boolean fromCurrentState);
@Generated
@Selector("setAnimationCurve:")
public static native void setAnimationCurve(@NInt long curve);
@Generated
@Selector("setAnimationDelay:")
public static native void setAnimationDelay(double delay);
@Generated
@Selector("setAnimationDelegate:")
public static native void setAnimationDelegate(@Mapped(ObjCObjectMapper.class) Object delegate);
@Generated
@Selector("setAnimationDidStopSelector:")
public static native void setAnimationDidStopSelector(SEL selector);
@Generated
@Selector("setAnimationDuration:")
public static native void setAnimationDuration_static(double duration);
@Generated
@Selector("setAnimationRepeatAutoreverses:")
public static native void setAnimationRepeatAutoreverses(boolean repeatAutoreverses);
@Generated
@Selector("setAnimationRepeatCount:")
public static native void setAnimationRepeatCount_static(float repeatCount);
@Generated
@Selector("setAnimationStartDate:")
public static native void setAnimationStartDate(NSDate startDate);
@Generated
@Selector("setAnimationTransition:forView:cache:")
public static native void setAnimationTransitionForViewCache(@NInt long transition, UIView view, boolean cache);
@Generated
@Selector("setAnimationWillStartSelector:")
public static native void setAnimationWillStartSelector(SEL selector);
@Generated
@Selector("setAnimationsEnabled:")
public static native void setAnimationsEnabled(boolean enabled);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("transitionFromView:toView:duration:options:completion:")
public static native void transitionFromViewToViewDurationOptionsCompletion(UIView fromView, UIView toView,
double duration, @NUInt long options,
@ObjCBlock(name = "call_transitionFromViewToViewDurationOptionsCompletion") UIView.Block_transitionFromViewToViewDurationOptionsCompletion completion);
@Generated
@Selector("transitionWithView:duration:options:animations:completion:")
public static native void transitionWithViewDurationOptionsAnimationsCompletion(UIView view, double duration,
@NUInt long options,
@ObjCBlock(name = "call_transitionWithViewDurationOptionsAnimationsCompletion_3") UIView.Block_transitionWithViewDurationOptionsAnimationsCompletion_3 animations,
@ObjCBlock(name = "call_transitionWithViewDurationOptionsAnimationsCompletion_4") UIView.Block_transitionWithViewDurationOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("userInterfaceLayoutDirectionForSemanticContentAttribute:")
@NInt
public static native long userInterfaceLayoutDirectionForSemanticContentAttribute(@NInt long attribute);
@Generated
@Selector("userInterfaceLayoutDirectionForSemanticContentAttribute:relativeToLayoutDirection:")
@NInt
public static native long userInterfaceLayoutDirectionForSemanticContentAttributeRelativeToLayoutDirection(
@NInt long semanticContentAttribute, @NInt long layoutDirection);
@Generated
@Selector("version")
@NInt
public static native long version_static();
@Generated
@ProtocolClassMethod("appearance")
public UICollectionViewCell _appearance() {
return appearance();
}
@Generated
@ProtocolClassMethod("appearanceForTraitCollection")
public UICollectionViewCell _appearanceForTraitCollection(UITraitCollection trait) {
return appearanceForTraitCollection(trait);
}
@Generated
@Deprecated
@ProtocolClassMethod("appearanceForTraitCollectionWhenContainedIn")
public UICollectionViewCell _appearanceForTraitCollectionWhenContainedIn(UITraitCollection trait,
@Mapped(ObjCObjectMapper.class) UIAppearanceContainer ContainerClass, Object... varargs) {
return appearanceForTraitCollectionWhenContainedIn(trait, ContainerClass, varargs);
}
@Generated
@ProtocolClassMethod("appearanceForTraitCollectionWhenContainedInInstancesOfClasses")
public UICollectionViewCell _appearanceForTraitCollectionWhenContainedInInstancesOfClasses(UITraitCollection trait,
NSArray<?> containerTypes) {
return appearanceForTraitCollectionWhenContainedInInstancesOfClasses(trait, containerTypes);
}
@Generated
@Deprecated
@ProtocolClassMethod("appearanceWhenContainedIn")
public UICollectionViewCell _appearanceWhenContainedIn(
@Mapped(ObjCObjectMapper.class) UIAppearanceContainer ContainerClass, Object... varargs) {
return appearanceWhenContainedIn(ContainerClass, varargs);
}
@Generated
@ProtocolClassMethod("appearanceWhenContainedInInstancesOfClasses")
public UICollectionViewCell _appearanceWhenContainedInInstancesOfClasses(NSArray<?> containerTypes) {
return appearanceWhenContainedInInstancesOfClasses(containerTypes);
}
/**
* These properties are always nil when a non-nil `backgroundConfiguration` is set.
* The background view is a subview behind all other views.
* If selectedBackgroundView is different than backgroundView, it will be placed above the background view and animated in on selection.
*/
@Generated
@Selector("backgroundView")
public native UIView backgroundView();
/**
* add custom subviews to the cell's contentView
*/
@Generated
@Selector("contentView")
public native UIView contentView();
@Generated
@Selector("init")
public native UICollectionViewCell init();
@Generated
@Selector("initWithCoder:")
public native UICollectionViewCell initWithCoder(NSCoder coder);
@Generated
@Selector("initWithFrame:")
public native UICollectionViewCell initWithFrame(@ByValue CGRect frame);
@Generated
@Selector("isHighlighted")
public native boolean isHighlighted();
@Generated
@Selector("setHighlighted:")
public native void setHighlighted(boolean value);
/**
* Cells become highlighted when the user touches them.
* The selected state is toggled when the user lifts up from a highlighted cell.
* Override these methods to provide custom UI for a selected or highlighted state.
* The collection view may call the setters inside an animation block.
*/
@Generated
@Selector("isSelected")
public native boolean isSelected();
/**
* Cells become highlighted when the user touches them.
* The selected state is toggled when the user lifts up from a highlighted cell.
* Override these methods to provide custom UI for a selected or highlighted state.
* The collection view may call the setters inside an animation block.
*/
@Generated
@Selector("setSelected:")
public native void setSelected(boolean value);
@Generated
@Selector("selectedBackgroundView")
public native UIView selectedBackgroundView();
/**
* These properties are always nil when a non-nil `backgroundConfiguration` is set.
* The background view is a subview behind all other views.
* If selectedBackgroundView is different than backgroundView, it will be placed above the background view and animated in on selection.
*/
@Generated
@Selector("setBackgroundView:")
public native void setBackgroundView(UIView value);
@Generated
@Selector("setSelectedBackgroundView:")
public native void setSelectedBackgroundView(UIView value);
/**
* Override this method to modify the visual appearance for a particular
* dragState.
* <p>
* Call super if you want to add to the existing default implementation.
*/
@Generated
@Selector("dragStateDidChange:")
public native void dragStateDidChange(@NInt long dragState);
@Generated
@Selector("modifyAnimationsWithRepeatCount:autoreverses:animations:")
public static native void modifyAnimationsWithRepeatCountAutoreversesAnimations(@NFloat double count,
boolean autoreverses,
@ObjCBlock(name = "call_modifyAnimationsWithRepeatCountAutoreversesAnimations") UIView.Block_modifyAnimationsWithRepeatCountAutoreversesAnimations animations);
/**
* When YES, the cell will automatically call -updatedConfigurationForState: on its `backgroundConfiguration` when the cell's
* configuration state changes, and apply the updated configuration back to the cell. The default value is YES.
*/
@Generated
@Selector("automaticallyUpdatesBackgroundConfiguration")
public native boolean automaticallyUpdatesBackgroundConfiguration();
/**
* When YES, the cell will automatically call -updatedConfigurationForState: on its `contentConfiguration` when the cell's
* configuration state changes, and apply the updated configuration back to the cell. The default value is YES.
*/
@Generated
@Selector("automaticallyUpdatesContentConfiguration")
public native boolean automaticallyUpdatesContentConfiguration();
/**
* Setting a background configuration supersedes the cell's backgroundView and selectedBackgroundView. The default value is nil.
*/
@Generated
@Selector("backgroundConfiguration")
public native UIBackgroundConfiguration backgroundConfiguration();
/**
* Returns the current configuration state for the cell.
* To add your own custom state(s), override the getter and call super to obtain an instance with the
* system properties set, then set your own custom states as desired.
*/
@Generated
@Selector("configurationState")
public native UICellConfigurationState configurationState();
/**
* Setting a content configuration replaces the existing contentView of the cell with a new content view instance from the configuration,
* or directly applies the configuration to the existing content view if the configuration is compatible with the existing content view type.
* The default value is nil. After a configuration has been set, setting this property to nil will replace the current content view with a new content view.
*/
@Generated
@Selector("contentConfiguration")
@MappedReturn(ObjCObjectMapper.class)
public native UIContentConfiguration contentConfiguration();
/**
* When YES, the cell will automatically call -updatedConfigurationForState: on its `backgroundConfiguration` when the cell's
* configuration state changes, and apply the updated configuration back to the cell. The default value is YES.
*/
@Generated
@Selector("setAutomaticallyUpdatesBackgroundConfiguration:")
public native void setAutomaticallyUpdatesBackgroundConfiguration(boolean value);
/**
* When YES, the cell will automatically call -updatedConfigurationForState: on its `contentConfiguration` when the cell's
* configuration state changes, and apply the updated configuration back to the cell. The default value is YES.
*/
@Generated
@Selector("setAutomaticallyUpdatesContentConfiguration:")
public native void setAutomaticallyUpdatesContentConfiguration(boolean value);
/**
* Setting a background configuration supersedes the cell's backgroundView and selectedBackgroundView. The default value is nil.
*/
@Generated
@Selector("setBackgroundConfiguration:")
public native void setBackgroundConfiguration(UIBackgroundConfiguration value);
/**
* Setting a content configuration replaces the existing contentView of the cell with a new content view instance from the configuration,
* or directly applies the configuration to the existing content view if the configuration is compatible with the existing content view type.
* The default value is nil. After a configuration has been set, setting this property to nil will replace the current content view with a new content view.
*/
@Generated
@Selector("setContentConfiguration:")
public native void setContentConfiguration(@Mapped(ObjCObjectMapper.class) UIContentConfiguration value);
/**
* Requests the cell update its configuration for its current state. This method is called automatically
* when the cell's `configurationState` may have changed, as well as in other circumstances where an
* update may be required. Multiple requests may be coalesced into a single update at the appropriate time.
*/
@Generated
@Selector("setNeedsUpdateConfiguration")
public native void setNeedsUpdateConfiguration();
/**
* Subclasses should override this method and update the cell's configuration using the state provided.
* This method should not be called directly, use `setNeedsUpdateConfiguration` to request an update.
*/
@Generated
@Selector("updateConfigurationUsingState:")
public native void updateConfigurationUsingState(UICellConfigurationState state);
/**
* Optional block-based alternative to overriding `-updateConfigurationUsingState:` in a subclass. This handler
* is called after `-updateConfigurationUsingState:`. Setting a new handler triggers `setNeedsUpdateConfiguration`.
*/
@Generated
@Selector("configurationUpdateHandler")
@ObjCBlock(name = "call_configurationUpdateHandler_ret")
public native Block_configurationUpdateHandler_ret configurationUpdateHandler();
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_configurationUpdateHandler_ret {
@Generated
void call_configurationUpdateHandler_ret(UICollectionViewCell cell, UICellConfigurationState state);
}
/**
* Optional block-based alternative to overriding `-updateConfigurationUsingState:` in a subclass. This handler
* is called after `-updateConfigurationUsingState:`. Setting a new handler triggers `setNeedsUpdateConfiguration`.
*/
@Generated
@Selector("setConfigurationUpdateHandler:")
public native void setConfigurationUpdateHandler(
@ObjCBlock(name = "call_setConfigurationUpdateHandler") Block_setConfigurationUpdateHandler value);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_setConfigurationUpdateHandler {
@Generated
void call_setConfigurationUpdateHandler(UICollectionViewCell cell, UICellConfigurationState state);
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Gareth Jon Lynch
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.gazbert.java8.aggregate;
import static org.junit.Assert.*;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.IntConsumer;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import com.gazbert.java8.common.Order;
import com.gazbert.java8.common.Order.Market;
import com.gazbert.java8.common.Order.Type;
/**
* Test class for demonstrating use of reduction aggregate operations in Java 8.
* <p>
* Use case is for querying orders in a trading exchange order book.
* <p>
*
* @author gazbert
*/
public class TestReductionAggregateOperations {
private Order order1;
private Order order2;
private Order order3;
private List<Order> orderBook;
/**
* Builds the order book up for each test.
*/
@Before
public void setupForEachTest() {
order1 = new Order(
Market.EUR, Type.BUY, new BigDecimal("100.00"), new BigDecimal("1.69"), new BigDecimal("0.01"));
order2 = new Order(
Market.USD, Type.SELL, new BigDecimal("201.00"), new BigDecimal("1.70"), new BigDecimal("0.01"));
order3 = new Order(
Market.CNY, Type.SELL, new BigDecimal("250.00"), new BigDecimal("10.58"), new BigDecimal("0.01"));
orderBook = new ArrayList<>();
orderBook.add(order1);
orderBook.add(order2);
orderBook.add(order3);
}
/**
* Gets the average number of trades it took to fill all SELL orders.
* <p>
* Nice example of aggregate ops in action.
* <p>
* JDK contains many terminal operations (such as average, sum, min, max, and count) that return one value by
* combining the contents of a stream. These operations are called <em>reduction operations</em>
* <p>
* This example uses the average reduction operation.
*/
@Test
public void getAverageNumberOfTradesToFillASellOrderUsingJava8PipelineAggregateOperation() {
order1.setTradeCountToFill(3);
order2.setTradeCountToFill(4);
order3.setTradeCountToFill(2);
final double averageTradesToFillSellOrder =
orderBook
.stream()
.filter(e -> e.getType() == Type.SELL) // intermediate operation #1 - filter
.mapToInt(Order::getTradeCountToFill) // intermediate operation #2 - creates new int stream for all values from getTradeCountToFill
.average() // terminal operation - averages them (reduction operation)
.getAsDouble(); // returns result as double
assertEquals(3, averageTradesToFillSellOrder, 0);
}
/**
* Sums the average number of trades it took to fill all orders of any type.
* <p>
* Uses <code>Stream.reduce<code> operation to sum up the number of trades.
* <p>
* The reduce operation in this example takes two arguments:
* <ol>
* <li>identity - the initial value of the reduction and the default result if there are no elements in the stream</li>
* <li>accumulator function - The accumulator function takes two parameters: a partial result of the reduction
* (the sum of all processed trades so far) and the next element of the stream (next trade integer). It returns
* a new partial result. This accumulator function is a lambda expression that adds two integer values and
* returns an integer value</li>
* </ol>
*/
@Test
public void getSumOfTradesUsingJava8StreamReduce() {
order1.setTradeCountToFill(3);
order2.setTradeCountToFill(4);
order3.setTradeCountToFill(2);
// First way uses JDK sum() reduction op
int sumOfAllTrades =
orderBook
.stream()
.mapToInt(Order::getTradeCountToFill)
.sum();
assertEquals(9, sumOfAllTrades, 0);
// Second way uses Stream.reduce
sumOfAllTrades =
orderBook
.stream()
.map(Order::getTradeCountToFill)
.reduce(
0, // identity arg
(a, b) -> a + b); // lambda accumulator function. args: a is partial sum so far, b is next trade to add,
// actual function adds them up.
assertEquals(9, sumOfAllTrades, 0);
}
/**
* Sums the average number of trades it took to fill SELL orders.
* <p>
* The <code>Stream.reduce<code> operation always returns a new value. The accumulator function in the previous
* example also returns a new int every time it processes an element of a stream. This is not very efficient if you
* wanted to reduce the elements of a stream to a more complex object like a collection. If your reduce operation
* involves adding elements to a collection, then every time your accumulator function processes an element, it
* creates a new collection that includes the element... not good.
* <p>
* It would be more efficient to update an existing collection instead. You can do this with the
* <code>Stream.collect</code> method; the collect method modifies, or mutates, an existing value. Also knwon as
* mutable reduction.
* <p>
* Example below uses an Averager helper class to do this. The collect line can be broen down as follows:
* <ul>
* <li>supplier - factory function. For our collect operation, it creates instances of the result container.
* In this example, it is a new instance of the Averager class.</li>
* <li>accumulator - accumulator function incorporates a stream element into a result container. In this example,
* accept() modifies the Averager result container by incrementing the count variable by one and
* adding to the total member variable the value of the stream element - the trade count.
* <li>combiner - combiner function takes two result containers and merges their contents. In this example, it
* modifies an Averager result container by incrementing the count variable by the count member
* variable of the other Averager instance and adding to the total member variable the value of
* the other Averager instance's total member variable.</li>
* </ul>
* I could have used the JDK average reduction method here (see earlier demo), but I wanted a simple demo of using
* a collector. You would use the collect operation and a custom class if you needed to calculate several values
* from the elements of a stream.
*/
@Test
public void getAverageNumberOfTradesToFillASellOrderUsingJava8StreamCollect() {
order1.setTradeCountToFill(3);
order2.setTradeCountToFill(4);
order3.setTradeCountToFill(2);
final Averager averagerHelper = orderBook
.stream()
.filter(p -> p.getType() == Type.SELL)
.map(Order::getTradeCountToFill) // the thing we want to call to get the average
.collect(Averager::new, Averager::accept, Averager::combine); // collect syntax
assertEquals(3, averagerHelper.average(), 0);
}
/**
* The collect operation is best suited for collections.
* <p>
* The following example puts the amount of all SELL order in a collection with the collect operation.
* <p>
* This version of the collect operation takes one parameter of type Collector.
* This class encapsulates the functions used as arguments in the collect operation that requires three arguments
* (supplier, accumulator, and combiner functions).
* <p>
* The {@link Collectors} class contains many useful reduction operations, such as accumulating elements into collections
* and summarizing elements according to various criteria. These reduction operations return instances of the
* class Collector, so you can use them as a parameter for the collect operation.
* <p>
* This example uses the Collectors.toList operation, which accumulates the stream elements into a new instance
* of List. As with most operations in the Collectors class, the toList operator returns an instance of Collector,
* not a collection.
*/
@Test
public void getListOfAllSellOrderAmountsUsingJava8StreamCollectors() {
final List<BigDecimal> listOfSellOrderAmounts = orderBook
.stream()
.filter(p -> p.getType() == Type.SELL)
.map(p -> p.getAmount())
.collect(Collectors.toList());
assertEquals(2, listOfSellOrderAmounts.size());
}
/**
* Demonstrates use of {@link Collectors} groupingBy functionality to aggregate total trade counts
* for buy and sell orders.
* <p>
* The groupingBy takes 3 params:
* <ol>
* <li>identity: Like the <code>Stream.reduce</code> operation, the identity element is both the initial value of
* the reduction and the default result if there are no elements in the stream.</li>
* <li>mapper: The reducing operation applies this mapper function to all stream elements. In this example,
* the mapper retrieves the trade count of each member.</li>
* <li>operation: The operation function is used to reduce the mapped values. In this example, the operation
* function adds Integer values.</li>
* </ol>
*/
@Test
public void getMapOfSellAndBuyTradeCountsUsingJava8CollectorsGroupingBy() {
order1.setTradeCountToFill(3);
order2.setTradeCountToFill(4);
order3.setTradeCountToFill(2);
final Map<Order.Type, Integer> totalTradeCountMap = orderBook
.stream()
.collect(
Collectors.groupingBy(
Order::getType,
Collectors.reducing(
0,
Order::getTradeCountToFill,
Integer::sum)));
final int totalNumberOfSellTrades = totalTradeCountMap.get(Type.SELL);
assertEquals(6, totalNumberOfSellTrades);
final int totalNumberOfBuyTrades = totalTradeCountMap.get(Type.BUY);
assertEquals(3, totalNumberOfBuyTrades);
}
/**
* Another example of using groupingBy returns average trade count for Buy and Sell orders.
*/
@Test
public void getMapOfSellAndBuyTradeAveragesUsingJava8CollectorsGroupingBy() {
order1.setTradeCountToFill(3);
order2.setTradeCountToFill(4);
order3.setTradeCountToFill(2);
final Map<Order.Type, Double> averageTradeCountByOrderType = orderBook
.stream()
.collect(
Collectors.groupingBy(
Order::getType,
Collectors.averagingInt(Order::getTradeCountToFill)));
final double averageNumberOfSellTrades = averageTradeCountByOrderType.get(Type.SELL);
assertEquals(3, averageNumberOfSellTrades, 0);
final double averageNumberOfBuyTrades = averageTradeCountByOrderType.get(Type.BUY);
assertEquals(3, averageNumberOfBuyTrades, 0);
}
/**
* Helper class used in demoing use of the <code>Stream.collect</code> method.
*
* @author gazbert
*/
private class Averager implements IntConsumer {
private int total = 0;
private int count = 0;
public double average() {
return count > 0 ? ((double) total) / count : 0;
}
/*
* We override this to get next stream element so we can update the running totals.
*/
public void accept(int i) {
total += i;
count++;
}
/*
* Combines
*/
public void combine(Averager other) {
total += other.total;
count += other.count;
}
}
}
| |
/**
* Copyright 2010 The ForPlay Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package forplay.java;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.io.IOException;
import javax.swing.JComponent;
import javax.swing.JFrame;
import forplay.core.Analytics;
import forplay.core.Audio;
import forplay.core.ForPlay;
import forplay.core.Game;
import forplay.core.Json;
import forplay.core.Keyboard;
import forplay.core.Log;
import forplay.core.Net;
import forplay.core.Platform;
import forplay.core.Pointer;
import forplay.core.Mouse;
import forplay.core.Touch;
import forplay.core.Storage;
import forplay.core.RegularExpression;
public class JavaPlatform implements Platform {
private static final float MAX_DELTA = 100;
private static final float FRAME_TIME = 50;
public static JavaPlatform register() {
JavaPlatform platform = new JavaPlatform();
ForPlay.setPlatform(platform);
return platform;
}
private JComponent component;
private JFrame frame;
private Game game;
private JavaRegularExpression regularExpression = new JavaRegularExpression();
private JavaAudio audio;
private JavaGraphics graphics;
private JavaJson json = new JavaJson();
private JavaKeyboard keyboard;
private JavaLog log = new JavaLog();
private JavaNet net = new JavaNet();
private JavaPointer pointer;
private JavaMouse mouse;
private JavaStorage storage;
private JavaAssetManager assetManager = new JavaAssetManager();
private int updateRate = 0;
private Analytics analytics = new JavaAnalytics();
private JavaPlatform() {
}
@Override
public Audio audio() {
return audio;
}
@Override
public JavaGraphics graphics() {
return graphics;
}
@Override
public Json json() {
return json;
}
@Override
public Keyboard keyboard() {
return keyboard;
}
@Override
public Log log() {
return log;
}
@Override
public Net net() {
return net;
}
@Override
public Pointer pointer() {
return pointer;
}
@Override
public Mouse mouse() {
return mouse;
}
@Override
public Touch touch() {
// TODO(pdr): need to implement this.
return null;
}
@Override
public Storage storage() {
return storage;
}
@Override
public Analytics analytics() {
return analytics;
}
@Override
public JavaAssetManager assetManager() {
return assetManager;
}
@Override
public float random() {
return (float) Math.random();
}
@Override
public void run(final Game game) {
this.updateRate = game.updateRate();
ensureFrame();
audio = new JavaAudio();
graphics = new JavaGraphics(frame, component);
keyboard = new JavaKeyboard(frame);
pointer = new JavaPointer(component);
mouse = new JavaMouse(component);
storage = new JavaStorage();
game.init();
// Don't set the game until after ensureFrame(). This keeps paint() from
// being called early.
this.game = game;
}
@Override
public double time() {
return System.currentTimeMillis();
}
private void ensureFrame() {
frame = new JFrame();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
component = new JComponent() {
private float accum = updateRate;
private double lastTime;
@Override
public void paint(Graphics g) {
if (game != null) {
double now = time();
float delta = (float)(now - lastTime);
if (delta > MAX_DELTA) {
delta = MAX_DELTA;
}
lastTime = now;
if (updateRate == 0) {
game.update(delta);
accum = 0;
} else {
accum += delta;
while (accum > updateRate) {
game.update(updateRate);
accum -= updateRate;
}
}
game.paint(accum / updateRate);
int width = component.getWidth();
int height = component.getHeight();
JavaCanvas canvas = new JavaCanvas((Graphics2D) g, width, height);
graphics.rootLayer().paint(canvas);
}
repaint((long) FRAME_TIME);
}
};
frame.add(component);
frame.setResizable(false);
component.setPreferredSize(new Dimension(640, 480));
frame.pack();
frame.setVisible(true);
}
@Override
public RegularExpression regularExpression() {
return regularExpression;
}
@Override
public void openURL(String url) {
System.out.println("Opening url: " + url);
String browser = "chrome ";
if (System.getProperty("os.name", "-").contains("indows"))
browser = "rundll32 url.dll,FileProtocolHandler ";
try {
Runtime.getRuntime().exec(browser + url);
} catch (IOException e) {
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.Diffable;
import org.elasticsearch.cluster.DiffableUtils;
import org.elasticsearch.cluster.InternalClusterInfoService;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.FromXContentBuilder;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.gateway.MetaDataStateFormat;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.indices.ttl.IndicesTTLService;
import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.ScriptMetaData;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, FromXContentBuilder<MetaData>, ToXContent {
public static final MetaData PROTO = builder().build();
public static final String ALL = "_all";
public enum XContentContext {
/* Custom metadata should be returns as part of API call */
API,
/* Custom metadata should be stored as part of the persistent cluster state */
GATEWAY,
/* Custom metadata should be stored as part of a snapshot */
SNAPSHOT
}
public static EnumSet<XContentContext> API_ONLY = EnumSet.of(XContentContext.API);
public static EnumSet<XContentContext> API_AND_GATEWAY = EnumSet.of(XContentContext.API, XContentContext.GATEWAY);
public static EnumSet<XContentContext> API_AND_SNAPSHOT = EnumSet.of(XContentContext.API, XContentContext.SNAPSHOT);
public interface Custom extends Diffable<Custom>, ToXContent {
String type();
Custom fromXContent(XContentParser parser) throws IOException;
EnumSet<XContentContext> context();
}
public static Map<String, Custom> customPrototypes = new HashMap<>();
static {
// register non plugin custom metadata
registerPrototype(RepositoriesMetaData.TYPE, RepositoriesMetaData.PROTO);
registerPrototype(IngestMetadata.TYPE, IngestMetadata.PROTO);
registerPrototype(ScriptMetaData.TYPE, ScriptMetaData.PROTO);
registerPrototype(IndexGraveyard.TYPE, IndexGraveyard.PROTO);
}
/**
* Register a custom index meta data factory. Make sure to call it from a static block.
*/
public static void registerPrototype(String type, Custom proto) {
customPrototypes.put(type, proto);
}
@Nullable
public static <T extends Custom> T lookupPrototype(String type) {
//noinspection unchecked
return (T) customPrototypes.get(type);
}
public static <T extends Custom> T lookupPrototypeSafe(String type) {
//noinspection unchecked
T proto = (T) customPrototypes.get(type);
if (proto == null) {
throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "], node likely missing plugins");
}
return proto;
}
public static final Setting<Boolean> SETTING_READ_ONLY_SETTING =
Setting.boolSetting("cluster.blocks.read_only", false, Property.Dynamic, Property.NodeScope);
public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE));
public static final MetaData EMPTY_META_DATA = builder().build();
public static final String CONTEXT_MODE_PARAM = "context_mode";
public static final String CONTEXT_MODE_SNAPSHOT = XContentContext.SNAPSHOT.toString();
public static final String CONTEXT_MODE_GATEWAY = XContentContext.GATEWAY.toString();
public static final String GLOBAL_STATE_FILE_PREFIX = "global-";
private final String clusterUUID;
private final long version;
private final Settings transientSettings;
private final Settings persistentSettings;
private final Settings settings;
private final ImmutableOpenMap<String, IndexMetaData> indices;
private final ImmutableOpenMap<String, IndexTemplateMetaData> templates;
private final ImmutableOpenMap<String, Custom> customs;
private final transient int totalNumberOfShards; // Transient ? not serializable anyway?
private final int numberOfShards;
private final String[] allIndices;
private final String[] allOpenIndices;
private final String[] allClosedIndices;
private final SortedMap<String, AliasOrIndex> aliasAndIndexLookup;
@SuppressWarnings("unchecked")
MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings,
ImmutableOpenMap<String, IndexMetaData> indices, ImmutableOpenMap<String, IndexTemplateMetaData> templates,
ImmutableOpenMap<String, Custom> customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices,
SortedMap<String, AliasOrIndex> aliasAndIndexLookup) {
this.clusterUUID = clusterUUID;
this.version = version;
this.transientSettings = transientSettings;
this.persistentSettings = persistentSettings;
this.settings = Settings.builder().put(persistentSettings).put(transientSettings).build();
this.indices = indices;
this.customs = customs;
this.templates = templates;
int totalNumberOfShards = 0;
int numberOfShards = 0;
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
totalNumberOfShards += cursor.value.getTotalNumberOfShards();
numberOfShards += cursor.value.getNumberOfShards();
}
this.totalNumberOfShards = totalNumberOfShards;
this.numberOfShards = numberOfShards;
this.allIndices = allIndices;
this.allOpenIndices = allOpenIndices;
this.allClosedIndices = allClosedIndices;
this.aliasAndIndexLookup = aliasAndIndexLookup;
}
public long version() {
return this.version;
}
public String clusterUUID() {
return this.clusterUUID;
}
/**
* Returns the merged transient and persistent settings.
*/
public Settings settings() {
return this.settings;
}
public Settings transientSettings() {
return this.transientSettings;
}
public Settings persistentSettings() {
return this.persistentSettings;
}
public boolean hasAlias(String alias) {
AliasOrIndex aliasOrIndex = getAliasAndIndexLookup().get(alias);
if (aliasOrIndex != null) {
return aliasOrIndex.isAlias();
} else {
return false;
}
}
public boolean equalsAliases(MetaData other) {
for (ObjectCursor<IndexMetaData> cursor : other.indices().values()) {
IndexMetaData otherIndex = cursor.value;
IndexMetaData thisIndex = index(otherIndex.getIndex());
if (thisIndex == null) {
return false;
}
if (otherIndex.getAliases().equals(thisIndex.getAliases()) == false) {
return false;
}
}
return true;
}
public SortedMap<String, AliasOrIndex> getAliasAndIndexLookup() {
return aliasAndIndexLookup;
}
/**
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
* that point to the specified concrete indices or match partially with the indices via wildcards.
*
* @param aliases The names of the index aliases to find
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
* @return the found index aliases grouped by index
*/
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final String[] aliases, String[] concreteIndices) {
assert aliases != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return ImmutableOpenMap.of();
}
boolean matchAllAliases = matchAllAliases(aliases);
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index);
List<AliasMetaData> filteredValues = new ArrayList<>();
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
AliasMetaData value = cursor.value;
if (matchAllAliases || Regex.simpleMatch(aliases, value.alias())) {
filteredValues.add(value);
}
}
if (!filteredValues.isEmpty()) {
// Make the list order deterministic
CollectionUtil.timSort(filteredValues, new Comparator<AliasMetaData>() {
@Override
public int compare(AliasMetaData o1, AliasMetaData o2) {
return o1.alias().compareTo(o2.alias());
}
});
mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
}
}
return mapBuilder.build();
}
private static boolean matchAllAliases(final String[] aliases) {
for (String alias : aliases) {
if (alias.equals(ALL)) {
return true;
}
}
return aliases.length == 0;
}
/**
* Checks if at least one of the specified aliases exists in the specified concrete indices. Wildcards are supported in the
* alias names for partial matches.
*
* @param aliases The names of the index aliases to find
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
* @return whether at least one of the specified aliases exists in one of the specified concrete indices.
*/
public boolean hasAliases(final String[] aliases, String[] concreteIndices) {
assert aliases != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return false;
}
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index);
List<AliasMetaData> filteredValues = new ArrayList<>();
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
AliasMetaData value = cursor.value;
if (Regex.simpleMatch(aliases, value.alias())) {
filteredValues.add(value);
}
}
if (!filteredValues.isEmpty()) {
return true;
}
}
return false;
}
/*
* Finds all mappings for types and concrete indices. Types are expanded to
* include all types that match the glob patterns in the types array. Empty
* types array, null or {"_all"} will be expanded to all types available for
* the given indices.
*/
public ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> findMappings(String[] concreteIndices, final String[] types) {
assert types != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return ImmutableOpenMap.of();
}
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index);
ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings;
if (isAllTypes(types)) {
indexMapBuilder.put(index, indexMetaData.getMappings()); // No types specified means get it all
} else {
filteredMappings = ImmutableOpenMap.builder();
for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) {
if (Regex.simpleMatch(types, cursor.key)) {
filteredMappings.put(cursor.key, cursor.value);
}
}
if (!filteredMappings.isEmpty()) {
indexMapBuilder.put(index, filteredMappings.build());
}
}
}
return indexMapBuilder.build();
}
/**
* Returns all the concrete indices.
*/
public String[] getConcreteAllIndices() {
return allIndices;
}
public String[] getConcreteAllOpenIndices() {
return allOpenIndices;
}
public String[] getConcreteAllClosedIndices() {
return allClosedIndices;
}
/**
* Returns indexing routing for the given index.
*/
// TODO: This can be moved to IndexNameExpressionResolver too, but this means that we will support wildcards and other expressions
// in the index,bulk,update and delete apis.
public String resolveIndexRouting(@Nullable String parent, @Nullable String routing, String aliasOrIndex) {
if (aliasOrIndex == null) {
return routingOrParent(parent, routing);
}
AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex);
if (result == null || result.isAlias() == false) {
return routingOrParent(parent, routing);
}
AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result;
if (result.getIndices().size() > 1) {
rejectSingleIndexOperation(aliasOrIndex, result);
}
AliasMetaData aliasMd = alias.getFirstAliasMetaData();
if (aliasMd.indexRouting() != null) {
if (aliasMd.indexRouting().indexOf(',') != -1) {
throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + aliasMd.getIndexRouting() + "] that resolved to several routing values, rejecting operation");
}
if (routing != null) {
if (!routing.equals(aliasMd.indexRouting())) {
throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation");
}
}
// Alias routing overrides the parent routing (if any).
return aliasMd.indexRouting();
}
return routingOrParent(parent, routing);
}
private void rejectSingleIndexOperation(String aliasOrIndex, AliasOrIndex result) {
String[] indexNames = new String[result.getIndices().size()];
int i = 0;
for (IndexMetaData indexMetaData : result.getIndices()) {
indexNames[i++] = indexMetaData.getIndex().getName();
}
throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has more than one index associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op");
}
private String routingOrParent(@Nullable String parent, @Nullable String routing) {
if (routing == null) {
return parent;
}
return routing;
}
public boolean hasIndex(String index) {
return indices.containsKey(index);
}
public boolean hasConcreteIndex(String index) {
return getAliasAndIndexLookup().containsKey(index);
}
public IndexMetaData index(String index) {
return indices.get(index);
}
public IndexMetaData index(Index index) {
IndexMetaData metaData = index(index.getName());
if (metaData != null && metaData.getIndexUUID().equals(index.getUUID())) {
return metaData;
}
return null;
}
/** Returns true iff existing index has the same {@link IndexMetaData} instance */
public boolean hasIndexMetaData(final IndexMetaData indexMetaData) {
return indices.get(indexMetaData.getIndex().getName()) == indexMetaData;
}
/**
* Returns the {@link IndexMetaData} for this index.
* @throws IndexNotFoundException if no metadata for this index is found
*/
public IndexMetaData getIndexSafe(Index index) {
IndexMetaData metaData = index(index.getName());
if (metaData != null) {
if(metaData.getIndexUUID().equals(index.getUUID())) {
return metaData;
}
throw new IndexNotFoundException(index,
new IllegalStateException("index uuid doesn't match expected: [" + index.getUUID()
+ "] but got: [" + metaData.getIndexUUID() +"]"));
}
throw new IndexNotFoundException(index);
}
public ImmutableOpenMap<String, IndexMetaData> indices() {
return this.indices;
}
public ImmutableOpenMap<String, IndexMetaData> getIndices() {
return indices();
}
public ImmutableOpenMap<String, IndexTemplateMetaData> templates() {
return this.templates;
}
public ImmutableOpenMap<String, IndexTemplateMetaData> getTemplates() {
return this.templates;
}
public ImmutableOpenMap<String, Custom> customs() {
return this.customs;
}
public ImmutableOpenMap<String, Custom> getCustoms() {
return this.customs;
}
/**
* The collection of index deletions in the cluster.
*/
public IndexGraveyard indexGraveyard() {
return custom(IndexGraveyard.TYPE);
}
public <T extends Custom> T custom(String type) {
return (T) customs.get(type);
}
public int getTotalNumberOfShards() {
return this.totalNumberOfShards;
}
public int getNumberOfShards() {
return this.numberOfShards;
}
/**
* Identifies whether the array containing type names given as argument refers to all types
* The empty or null array identifies all types
*
* @param types the array containing types
* @return true if the provided array maps to all types, false otherwise
*/
public static boolean isAllTypes(String[] types) {
return types == null || types.length == 0 || isExplicitAllType(types);
}
/**
* Identifies whether the array containing type names given as argument explicitly refers to all types
* The empty or null array doesn't explicitly map to all types
*
* @param types the array containing index names
* @return true if the provided array explicitly maps to all types, false otherwise
*/
public static boolean isExplicitAllType(String[] types) {
return types != null && types.length == 1 && ALL.equals(types[0]);
}
/**
* @param concreteIndex The concrete index to check if routing is required
* @param type The type to check if routing is required
* @return Whether routing is required according to the mapping for the specified index and type
*/
public boolean routingRequired(String concreteIndex, String type) {
IndexMetaData indexMetaData = indices.get(concreteIndex);
if (indexMetaData != null) {
MappingMetaData mappingMetaData = indexMetaData.getMappings().get(type);
if (mappingMetaData != null) {
return mappingMetaData.routing().required();
}
}
return false;
}
@Override
public Iterator<IndexMetaData> iterator() {
return indices.valuesIt();
}
public static boolean isGlobalStateEquals(MetaData metaData1, MetaData metaData2) {
if (!metaData1.persistentSettings.equals(metaData2.persistentSettings)) {
return false;
}
if (!metaData1.templates.equals(metaData2.templates())) {
return false;
}
// Check if any persistent metadata needs to be saved
int customCount1 = 0;
for (ObjectObjectCursor<String, Custom> cursor : metaData1.customs) {
if (customPrototypes.get(cursor.key).context().contains(XContentContext.GATEWAY)) {
if (!cursor.value.equals(metaData2.custom(cursor.key))) return false;
customCount1++;
}
}
int customCount2 = 0;
for (ObjectObjectCursor<String, Custom> cursor : metaData2.customs) {
if (customPrototypes.get(cursor.key).context().contains(XContentContext.GATEWAY)) {
customCount2++;
}
}
if (customCount1 != customCount2) return false;
return true;
}
@Override
public Diff<MetaData> diff(MetaData previousState) {
return new MetaDataDiff(previousState, this);
}
@Override
public Diff<MetaData> readDiffFrom(StreamInput in) throws IOException {
return new MetaDataDiff(in);
}
@Override
public MetaData fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
return Builder.fromXContent(parser);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
Builder.toXContent(this, builder, params);
return builder;
}
private static class MetaDataDiff implements Diff<MetaData> {
private long version;
private String clusterUUID;
private Settings transientSettings;
private Settings persistentSettings;
private Diff<ImmutableOpenMap<String, IndexMetaData>> indices;
private Diff<ImmutableOpenMap<String, IndexTemplateMetaData>> templates;
private Diff<ImmutableOpenMap<String, Custom>> customs;
public MetaDataDiff(MetaData before, MetaData after) {
clusterUUID = after.clusterUUID;
version = after.version;
transientSettings = after.transientSettings;
persistentSettings = after.persistentSettings;
indices = DiffableUtils.diff(before.indices, after.indices, DiffableUtils.getStringKeySerializer());
templates = DiffableUtils.diff(before.templates, after.templates, DiffableUtils.getStringKeySerializer());
customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer());
}
public MetaDataDiff(StreamInput in) throws IOException {
clusterUUID = in.readString();
version = in.readLong();
transientSettings = Settings.readSettingsFromStream(in);
persistentSettings = Settings.readSettingsFromStream(in);
indices = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexMetaData.PROTO);
templates = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexTemplateMetaData.PROTO);
customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(),
new DiffableUtils.DiffableValueSerializer<String, Custom>() {
@Override
public Custom read(StreamInput in, String key) throws IOException {
return lookupPrototypeSafe(key).readFrom(in);
}
@Override
public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
return lookupPrototypeSafe(key).readDiffFrom(in);
}
});
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(clusterUUID);
out.writeLong(version);
Settings.writeSettingsToStream(transientSettings, out);
Settings.writeSettingsToStream(persistentSettings, out);
indices.writeTo(out);
templates.writeTo(out);
customs.writeTo(out);
}
@Override
public MetaData apply(MetaData part) {
Builder builder = builder();
builder.clusterUUID(clusterUUID);
builder.version(version);
builder.transientSettings(transientSettings);
builder.persistentSettings(persistentSettings);
builder.indices(indices.apply(part.indices));
builder.templates(templates.apply(part.templates));
builder.customs(customs.apply(part.customs));
return builder.build();
}
}
@Override
public MetaData readFrom(StreamInput in) throws IOException {
Builder builder = new Builder();
builder.version = in.readLong();
builder.clusterUUID = in.readString();
builder.transientSettings(readSettingsFromStream(in));
builder.persistentSettings(readSettingsFromStream(in));
int size = in.readVInt();
for (int i = 0; i < size; i++) {
builder.put(IndexMetaData.Builder.readFrom(in), false);
}
size = in.readVInt();
for (int i = 0; i < size; i++) {
builder.put(IndexTemplateMetaData.Builder.readFrom(in));
}
int customSize = in.readVInt();
for (int i = 0; i < customSize; i++) {
String type = in.readString();
Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in);
builder.putCustom(type, customIndexMetaData);
}
return builder.build();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(version);
out.writeString(clusterUUID);
writeSettingsToStream(transientSettings, out);
writeSettingsToStream(persistentSettings, out);
out.writeVInt(indices.size());
for (IndexMetaData indexMetaData : this) {
indexMetaData.writeTo(out);
}
out.writeVInt(templates.size());
for (ObjectCursor<IndexTemplateMetaData> cursor : templates.values()) {
cursor.value.writeTo(out);
}
out.writeVInt(customs.size());
for (ObjectObjectCursor<String, Custom> cursor : customs) {
out.writeString(cursor.key);
cursor.value.writeTo(out);
}
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(MetaData metaData) {
return new Builder(metaData);
}
/** All known byte-sized cluster settings. */
public static final Set<String> CLUSTER_BYTES_SIZE_SETTINGS = unmodifiableSet(newHashSet(
IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()));
/** All known time cluster settings. */
public static final Set<String> CLUSTER_TIME_SETTINGS = unmodifiableSet(newHashSet(
IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getKey(),
DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(),
InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(),
InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(),
DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(),
ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey()));
/** As of 2.0 we require units for time and byte-sized settings. This methods adds default units to any cluster settings that don't
* specify a unit. */
public static MetaData addDefaultUnitsIfNeeded(Logger logger, MetaData metaData) {
Settings.Builder newPersistentSettings = null;
for(Map.Entry<String,String> ent : metaData.persistentSettings().getAsMap().entrySet()) {
String settingName = ent.getKey();
String settingValue = ent.getValue();
if (CLUSTER_BYTES_SIZE_SETTINGS.contains(settingName)) {
try {
Long.parseLong(settingValue);
} catch (NumberFormatException nfe) {
continue;
}
// It's a naked number that previously would be interpreted as default unit (bytes); now we add it:
logger.warn("byte-sized cluster setting [{}] with value [{}] is missing units; assuming default units (b) but in future versions this will be a hard error", settingName, settingValue);
if (newPersistentSettings == null) {
newPersistentSettings = Settings.builder();
newPersistentSettings.put(metaData.persistentSettings());
}
newPersistentSettings.put(settingName, settingValue + "b");
}
if (CLUSTER_TIME_SETTINGS.contains(settingName)) {
try {
Long.parseLong(settingValue);
} catch (NumberFormatException nfe) {
continue;
}
// It's a naked number that previously would be interpreted as default unit (ms); now we add it:
logger.warn("time cluster setting [{}] with value [{}] is missing units; assuming default units (ms) but in future versions this will be a hard error", settingName, settingValue);
if (newPersistentSettings == null) {
newPersistentSettings = Settings.builder();
newPersistentSettings.put(metaData.persistentSettings());
}
newPersistentSettings.put(settingName, settingValue + "ms");
}
}
if (newPersistentSettings != null) {
return new MetaData(
metaData.clusterUUID(),
metaData.version(),
metaData.transientSettings(),
newPersistentSettings.build(),
metaData.getIndices(),
metaData.getTemplates(),
metaData.getCustoms(),
metaData.getConcreteAllIndices(),
metaData.getConcreteAllOpenIndices(),
metaData.getConcreteAllClosedIndices(),
metaData.getAliasAndIndexLookup());
} else {
// No changes:
return metaData;
}
}
public static class Builder {
private String clusterUUID;
private long version;
private Settings transientSettings = Settings.Builder.EMPTY_SETTINGS;
private Settings persistentSettings = Settings.Builder.EMPTY_SETTINGS;
private final ImmutableOpenMap.Builder<String, IndexMetaData> indices;
private final ImmutableOpenMap.Builder<String, IndexTemplateMetaData> templates;
private final ImmutableOpenMap.Builder<String, Custom> customs;
public Builder() {
clusterUUID = "_na_";
indices = ImmutableOpenMap.builder();
templates = ImmutableOpenMap.builder();
customs = ImmutableOpenMap.builder();
indexGraveyard(IndexGraveyard.builder().build()); // create new empty index graveyard to initialize
}
public Builder(MetaData metaData) {
this.clusterUUID = metaData.clusterUUID;
this.transientSettings = metaData.transientSettings;
this.persistentSettings = metaData.persistentSettings;
this.version = metaData.version;
this.indices = ImmutableOpenMap.builder(metaData.indices);
this.templates = ImmutableOpenMap.builder(metaData.templates);
this.customs = ImmutableOpenMap.builder(metaData.customs);
}
public Builder put(IndexMetaData.Builder indexMetaDataBuilder) {
// we know its a new one, increment the version and store
indexMetaDataBuilder.version(indexMetaDataBuilder.version() + 1);
IndexMetaData indexMetaData = indexMetaDataBuilder.build();
indices.put(indexMetaData.getIndex().getName(), indexMetaData);
return this;
}
public Builder put(IndexMetaData indexMetaData, boolean incrementVersion) {
if (indices.get(indexMetaData.getIndex().getName()) == indexMetaData) {
return this;
}
// if we put a new index metadata, increment its version
if (incrementVersion) {
indexMetaData = IndexMetaData.builder(indexMetaData).version(indexMetaData.getVersion() + 1).build();
}
indices.put(indexMetaData.getIndex().getName(), indexMetaData);
return this;
}
public IndexMetaData get(String index) {
return indices.get(index);
}
public IndexMetaData getSafe(Index index) {
IndexMetaData indexMetaData = get(index.getName());
if (indexMetaData != null) {
if(indexMetaData.getIndexUUID().equals(index.getUUID())) {
return indexMetaData;
}
throw new IndexNotFoundException(index,
new IllegalStateException("index uuid doesn't match expected: [" + index.getUUID()
+ "] but got: [" + indexMetaData.getIndexUUID() +"]"));
}
throw new IndexNotFoundException(index);
}
public Builder remove(String index) {
indices.remove(index);
return this;
}
public Builder removeAllIndices() {
indices.clear();
return this;
}
public Builder indices(ImmutableOpenMap<String, IndexMetaData> indices) {
this.indices.putAll(indices);
return this;
}
public Builder put(IndexTemplateMetaData.Builder template) {
return put(template.build());
}
public Builder put(IndexTemplateMetaData template) {
templates.put(template.name(), template);
return this;
}
public Builder removeTemplate(String templateName) {
templates.remove(templateName);
return this;
}
public Builder templates(ImmutableOpenMap<String, IndexTemplateMetaData> templates) {
this.templates.putAll(templates);
return this;
}
public Custom getCustom(String type) {
return customs.get(type);
}
public Builder putCustom(String type, Custom custom) {
customs.put(type, custom);
return this;
}
public Builder removeCustom(String type) {
customs.remove(type);
return this;
}
public Builder customs(ImmutableOpenMap<String, Custom> customs) {
this.customs.putAll(customs);
return this;
}
public Builder indexGraveyard(final IndexGraveyard indexGraveyard) {
putCustom(IndexGraveyard.TYPE, indexGraveyard);
return this;
}
public IndexGraveyard indexGraveyard() {
@SuppressWarnings("unchecked") IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE);
return graveyard;
}
public Builder updateSettings(Settings settings, String... indices) {
if (indices == null || indices.length == 0) {
indices = this.indices.keys().toArray(String.class);
}
for (String index : indices) {
IndexMetaData indexMetaData = this.indices.get(index);
if (indexMetaData == null) {
throw new IndexNotFoundException(index);
}
put(IndexMetaData.builder(indexMetaData)
.settings(Settings.builder().put(indexMetaData.getSettings()).put(settings)));
}
return this;
}
public Builder updateNumberOfReplicas(int numberOfReplicas, String... indices) {
if (indices == null || indices.length == 0) {
indices = this.indices.keys().toArray(String.class);
}
for (String index : indices) {
IndexMetaData indexMetaData = this.indices.get(index);
if (indexMetaData == null) {
throw new IndexNotFoundException(index);
}
put(IndexMetaData.builder(indexMetaData).numberOfReplicas(numberOfReplicas));
}
return this;
}
public Settings transientSettings() {
return this.transientSettings;
}
public Builder transientSettings(Settings settings) {
this.transientSettings = settings;
return this;
}
public Settings persistentSettings() {
return this.persistentSettings;
}
public Builder persistentSettings(Settings settings) {
this.persistentSettings = settings;
return this;
}
public Builder version(long version) {
this.version = version;
return this;
}
public Builder clusterUUID(String clusterUUID) {
this.clusterUUID = clusterUUID;
return this;
}
public Builder generateClusterUuidIfNeeded() {
if (clusterUUID.equals("_na_")) {
clusterUUID = UUIDs.randomBase64UUID();
}
return this;
}
public MetaData build() {
// TODO: We should move these datastructures to IndexNameExpressionResolver, this will give the following benefits:
// 1) The datastructures will only be rebuilded when needed. Now during serializing we rebuild these datastructures
// while these datastructures aren't even used.
// 2) The aliasAndIndexLookup can be updated instead of rebuilding it all the time.
// build all concrete indices arrays:
// TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices.
// When doing an operation across all indices, most of the time is spent on actually going to all shards and
// do the required operations, the bottleneck isn't resolving expressions into concrete indices.
List<String> allIndicesLst = new ArrayList<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
allIndicesLst.add(cursor.value.getIndex().getName());
}
String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]);
List<String> allOpenIndicesLst = new ArrayList<>();
List<String> allClosedIndicesLst = new ArrayList<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
if (indexMetaData.getState() == IndexMetaData.State.OPEN) {
allOpenIndicesLst.add(indexMetaData.getIndex().getName());
} else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
allClosedIndicesLst.add(indexMetaData.getIndex().getName());
}
}
String[] allOpenIndices = allOpenIndicesLst.toArray(new String[allOpenIndicesLst.size()]);
String[] allClosedIndices = allClosedIndicesLst.toArray(new String[allClosedIndicesLst.size()]);
// build all indices map
SortedMap<String, AliasOrIndex> aliasAndIndexLookup = new TreeMap<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData));
for (ObjectObjectCursor<String, AliasMetaData> aliasCursor : indexMetaData.getAliases()) {
AliasMetaData aliasMetaData = aliasCursor.value;
AliasOrIndex aliasOrIndex = aliasAndIndexLookup.get(aliasMetaData.getAlias());
if (aliasOrIndex == null) {
aliasOrIndex = new AliasOrIndex.Alias(aliasMetaData, indexMetaData);
aliasAndIndexLookup.put(aliasMetaData.getAlias(), aliasOrIndex);
} else if (aliasOrIndex instanceof AliasOrIndex.Alias) {
AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex;
alias.addIndex(indexMetaData);
} else if (aliasOrIndex instanceof AliasOrIndex.Index) {
AliasOrIndex.Index index = (AliasOrIndex.Index) aliasOrIndex;
throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index " + index.getIndex().getIndex() + " have the same name");
} else {
throw new IllegalStateException("unexpected alias [" + aliasMetaData.getAlias() + "][" + aliasOrIndex + "]");
}
}
}
aliasAndIndexLookup = Collections.unmodifiableSortedMap(aliasAndIndexLookup);
return new MetaData(clusterUUID, version, transientSettings, persistentSettings, indices.build(), templates.build(),
customs.build(), allIndices, allOpenIndices, allClosedIndices, aliasAndIndexLookup);
}
public static String toXContent(MetaData metaData) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
toXContent(metaData, builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
return builder.string();
}
public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException {
XContentContext context = XContentContext.valueOf(params.param(CONTEXT_MODE_PARAM, "API"));
builder.startObject("meta-data");
builder.field("version", metaData.version());
builder.field("cluster_uuid", metaData.clusterUUID);
if (!metaData.persistentSettings().getAsMap().isEmpty()) {
builder.startObject("settings");
for (Map.Entry<String, String> entry : metaData.persistentSettings().getAsMap().entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
}
if (context == XContentContext.API && !metaData.transientSettings().getAsMap().isEmpty()) {
builder.startObject("transient_settings");
for (Map.Entry<String, String> entry : metaData.transientSettings().getAsMap().entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
}
builder.startObject("templates");
for (ObjectCursor<IndexTemplateMetaData> cursor : metaData.templates().values()) {
IndexTemplateMetaData.Builder.toXContent(cursor.value, builder, params);
}
builder.endObject();
if (context == XContentContext.API && !metaData.indices().isEmpty()) {
builder.startObject("indices");
for (IndexMetaData indexMetaData : metaData) {
IndexMetaData.Builder.toXContent(indexMetaData, builder, params);
}
builder.endObject();
}
for (ObjectObjectCursor<String, Custom> cursor : metaData.customs()) {
Custom proto = lookupPrototypeSafe(cursor.key);
if (proto.context().contains(context)) {
builder.startObject(cursor.key);
cursor.value.toXContent(builder, params);
builder.endObject();
}
}
builder.endObject();
}
public static MetaData fromXContent(XContentParser parser) throws IOException {
Builder builder = new Builder();
// we might get here after the meta-data element, or on a fresh parser
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (!"meta-data".equals(currentFieldName)) {
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
// move to the field name (meta-data)
token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new IllegalArgumentException("Expected a field name but got " + token);
}
// move to the next object
token = parser.nextToken();
}
currentFieldName = parser.currentName();
}
if (!"meta-data".equals(parser.currentName())) {
throw new IllegalArgumentException("Expected [meta-data] as a field name but got " + currentFieldName);
}
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("Expected a START_OBJECT but got " + token);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("settings".equals(currentFieldName)) {
builder.persistentSettings(Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).build());
} else if ("indices".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
builder.put(IndexMetaData.Builder.fromXContent(parser), false);
}
} else if ("templates".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
builder.put(IndexTemplateMetaData.Builder.fromXContent(parser, parser.currentName()));
}
} else {
// check if its a custom index metadata
Custom proto = lookupPrototype(currentFieldName);
if (proto == null) {
//TODO warn
parser.skipChildren();
} else {
Custom custom = proto.fromXContent(parser);
builder.putCustom(custom.type(), custom);
}
}
} else if (token.isValue()) {
if ("version".equals(currentFieldName)) {
builder.version = parser.longValue();
} else if ("cluster_uuid".equals(currentFieldName) || "uuid".equals(currentFieldName)) {
builder.clusterUUID = parser.text();
} else {
throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]");
}
} else {
throw new IllegalArgumentException("Unexpected token " + token);
}
}
return builder.build();
}
public static MetaData readFrom(StreamInput in) throws IOException {
return PROTO.readFrom(in);
}
}
private static final ToXContent.Params FORMAT_PARAMS;
static {
Map<String, String> params = new HashMap<>(2);
params.put("binary", "true");
params.put(MetaData.CONTEXT_MODE_PARAM, MetaData.CONTEXT_MODE_GATEWAY);
FORMAT_PARAMS = new MapParams(params);
}
/**
* State format for {@link MetaData} to write to and load from disk
*/
public static final MetaDataStateFormat<MetaData> FORMAT = new MetaDataStateFormat<MetaData>(XContentType.SMILE, GLOBAL_STATE_FILE_PREFIX) {
@Override
public void toXContent(XContentBuilder builder, MetaData state) throws IOException {
Builder.toXContent(state, builder, FORMAT_PARAMS);
}
@Override
public MetaData fromXContent(XContentParser parser) throws IOException {
return Builder.fromXContent(parser);
}
};
}
| |
package net.workingdeveloper.java.spring.statemachine.dumper.mdt_uml2.impl.w3m;
import net.workingdeveloper.java.spring.statemachine.dumper.mdt_uml2.IId;
import net.workingdeveloper.java.spring.statemachine.dumper.mdt_uml2.IMdtUml2Model;
import net.workingdeveloper.java.spring.statemachine.dumper.mdt_uml2.UuidId;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Text;
import javax.xml.parsers.ParserConfigurationException;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* Created by Christoph Graupner on 8/22/16.
*
* @author Christoph Graupner <christoph.graupner@workingdeveloper.net>
*/
class ModelUml extends ModelXmlBase {
class MXUAction extends MXUNode {
MXUAction(IId aId, String aName, MXUNode aParent) {
super(aId, aParent);
fXmlNode = createXmlElement();
setName(aName);
addLanguage("bean");
}
public MXUAction addBody(String aBody) {
Element lBody = createElement("body");
getXmlNode().appendChild(lBody);
lBody.appendChild(createTextNode(aBody));
return this;
}
public MXUAction addLanguage(String aLanguageName) {
Element lLanguage = createElement("language");
getXmlNode().appendChild(lLanguage);
lLanguage.appendChild(createTextNode(aLanguageName));
return this;
}
@Override
Element createXmlElement() {
Element lElement = createElement("effect");
lElement.setAttribute("xmi:id", getXmiId().toString());
lElement.setAttribute("xmi:type", "uml:FunctionBehavior");
return lElement;
}
}
class MXUActionEntry extends MXUAction {
MXUActionEntry(IId aId, String aName, MXUNode aParent) {
super(aId, aName, aParent);
}
@Override
Element createXmlElement() {
Element lElement = createElement("entry");
lElement.setAttribute("xmi:id", getXmiId().toString());
lElement.setAttribute("xmi:type", "uml:FunctionBehavior");
return lElement;
}
}
class MXUActionExit extends MXUAction {
MXUActionExit(IId aId, String aName, MXUNode aParent) {
super(aId, aName, aParent);
}
@Override
Element createXmlElement() {
Element lElement = createElement("exit");
lElement.setAttribute("xmi:id", getXmiId().toString());
lElement.setAttribute("xmi:type", "uml:FunctionBehavior");
return lElement;
}
}
class MXUGuard extends MXUNode {
Element fSpecificationXml;
MXUGuard(IId aIid, MXUNode aParent) {
super(aIid, aParent);
fGuardMap.put(aIid, this);
fXmlNode = createXmlElement();
}
public MXUGuard addBody(String aBody) {
Element lBody = createElement("body");
fSpecificationXml.appendChild(lBody);
lBody.appendChild(createTextNode(aBody));
return this;
}
public MXUGuard addLanguage(String aLanguageName) {
Element lLanguage = createElement("language");
fSpecificationXml.appendChild(lLanguage);
lLanguage.appendChild(createTextNode(aLanguageName));
return this;
}
@Override
public void setName(String aName) {
super.setName(aName);
fSpecificationXml.setAttribute("name", "spec#" + aName);
}
void setParent(MXUNode aParent) {
fParent = aParent;
}
@Override
Element createXmlElement() {
/*
<ownedRule xmi:id="_T53EcHKSEea1hrJUR6Bqog" name="thisIsAGuard">
<specification xmi:type="uml:OpaqueExpression" xmi:id="_XOT3wHKSEea1hrJUR6Bqog" name="myOne">
<language>Bean</language>
<language>Java</language>
<body>if x = y</body>
</specification>
</ownedRule>
*/
Element lRule = createElement("ownedRule");
lRule.setAttribute("xmi:id", getXmiId().toString());
fSpecificationXml = createElement("specification");
lRule.appendChild(fSpecificationXml);
fSpecificationXml.setAttribute("xmi:type", "uml:OpaqueExpression");
fSpecificationXml.setAttribute("xmi:id", new UuidId().toString());
fSpecificationXml.setAttribute("name", "spec");
addLanguage("bean");
return lRule;
}
}
abstract class MXUNode {
IId fID;
MXUNode fParent;
Element fXmlNode;
MXUNode(IId aId, MXUNode aParent) {
fParent = aParent;
fID = aId;
ModelUml.this.fStateMap.put(aId, this);
}
public MXUNode appendToParentXml(Element aXmlNode) {
aXmlNode.appendChild(fXmlNode);
return this;
}
public String getName() {
return getXmlNode().getAttribute("name");
}
public void setName(String aName) {
getXmlNode().setAttribute("name", aName);
}
Element getXmlNode() {
return fXmlNode;
}
MXUNode getParent() {
return fParent;
}
IId getXmiId() {
if (fID == null) {
fID = new UuidId();
}
return fID;
}
abstract Element createXmlElement();
/*
<ownedComment xmi:id="_d0GIsHKBEea1hrJUR6Bqog" annotatedElement="4e4a4981-dd3d-4273-93d6-39e84ab04bb5">
<body>this is a comment</body>
</ownedComment>
*/
void addComment(String aCommentText) {
Element lCommentElement = createElement("ownedComment");
lCommentElement.setAttribute("xmi:id", new UuidId().toString());
lCommentElement.setAttribute("annotatedElement", getXmiId().toString());
fXmlNode.appendChild(lCommentElement);
Element lCommentText = createElement("body");
lCommentElement.appendChild(lCommentText);
lCommentText.appendChild(createTextNode(aCommentText));
}
}
class MXUPseudoState extends MXUStateBase {
MXUPseudoState(IId aIid, IMdtUml2Model.PseudoKind aKind, MXUStateBase aParent) {
super(aIid, aParent);
fXmlNode = createXmlElement(aKind);
}
private Element createXmlElement(IMdtUml2Model.PseudoKind aKind) {
Element lElement = createElement("subvertex");
lElement.setAttribute("xmi:type", "uml:Pseudostate");
lElement.setAttribute("xmi:id", getXmiId().toString());
switch (aKind) {
case SHALLOW_HISTORY:
lElement.setAttribute("kind", "shallowHistory");
break;
case FORK:
lElement.setAttribute("kind", "fork");
break;
case INITIAL:
// lElement.setAttribute("kind","inital");
break;
case CHOICE:
lElement.setAttribute("kind", "choice");
break;
case JUNCTION:
lElement.setAttribute("kind", "junction");
break;
case JOIN:
lElement.setAttribute("kind", "join");
break;
case ENTRY_POINT:
lElement.setAttribute("kind", "entryPoint");
break;
case EXIT_POINT:
lElement.setAttribute("kind", "exitPoint");
break;
case DEEP_HISTORY:
lElement.setAttribute("kind", "deepHistory");
break;
case FINAL:
lElement.setAttribute("xmi:type", "uml:FinalState");
break;
}
return lElement;
}
@Override
Element createXmlElement() {
return null;
}
}
abstract class MXURegionMachineShared extends MXUStateBase {
MXURegionMachineShared(IId aIid, MXUStateBase aParent) {
super(aIid, aParent);
}
public MXUPseudoState addPseudoState(IMdtUml2Model.PseudoKind aKind, IId aIid, String aName) {
MXUPseudoState lMUPseudoState = new MXUPseudoState(aIid, aKind, this);
lMUPseudoState.appendToParentXml(getXmlNode());
lMUPseudoState.setName(aName);
return lMUPseudoState;
}
public MXURegionState addRegionState(IId aIid, String aName) {
MXURegionState lMURegionState = new MXURegionState(aIid, this);
lMURegionState.setName(aName);
lMURegionState.appendToParentXml(getXmlNode());
return lMURegionState;
}
public MXUSimpleState addState(IId aIid, String aName) {
MXUSimpleState lMUSimpleState = new MXUSimpleState(aIid, this);
lMUSimpleState.setName(aName);
lMUSimpleState.appendToParentXml(getXmlNode());
return lMUSimpleState;
}
public MXUStateMachineState addSubMachine(IId aIid, String aName) {
MXUStateMachineState lMUStateMachineState = new MXUStateMachineState(aIid, this);
lMUStateMachineState.setName(aName);
lMUStateMachineState.appendToParentXml(getXmlNode());
return lMUStateMachineState;
}
public MXUTransition addTransition(IId aSourceStateUuid, IId aTargetStateUuid, MXUTrigger aMUTrigger) {
MXUTransition lMUTransition = new MXUTransition(aSourceStateUuid, aTargetStateUuid, aMUTrigger, this);
lMUTransition.appendToParentXml(getXmlNode());
return lMUTransition;
}
}
class MXURegionState extends MXURegionMachineShared {
MXURegionState(IId aIid, MXUStateBase aParent) {
super(aIid, aParent);
fXmlNode = createXmlElement();
}
@Override
Element createXmlElement() {
Element lXml = createElement("region");
lXml.setAttribute("xmi:type", "uml:Region");
lXml.setAttribute("xmi:id", getXmiId().toString());
return lXml;
}
}
class MXURootStateMachine extends MXUStateMachineState {
public MXURootStateMachine(IId aIid, MXUStateBase aParent) {
super(aIid, aParent);
fXmlNode = createXmlElement();
}
@Override
Element createXmlElement() {
Element lXml = createElement("packagedElement");
lXml.setAttribute("xmi:type", "uml:StateMachine");
lXml.setAttribute("xmi:id", getXmiId().toString());
lXml.setAttribute("name", "StateMachine");
return lXml;
}
}
class MXUSimpleState extends MXUStateBase {
MXUSimpleState(IId aIid, MXUStateBase aParent) {
super(aIid, aParent);
fXmlNode = createXmlElement();
}
@Override
Element createXmlElement() {
Element lXml = createElement("subvertex");
lXml.setAttribute("xmi:type", "uml:State");
lXml.setAttribute("xmi:id", getXmiId().toString());
return lXml;
}
}
abstract class MXUStateBase extends MXUNode {
MXUStateBase(IId aIid, MXUStateBase aParent) {
super(aIid, aParent);
}
MXUAction addEntryAction(String aName) {
MXUActionEntry lActionEntry = new MXUActionEntry(new UuidId(), aName, this);
lActionEntry.appendToParentXml(getXmlNode());
return lActionEntry;
}
MXUAction addExitAction(String aName) {
MXUActionExit lActionExit = new MXUActionExit(new UuidId(), aName, this);
lActionExit.appendToParentXml(getXmlNode());
return lActionExit;
}
}
class MXUStateMachineState extends MXURegionMachineShared {
MXUStateMachineState(IId aIid, MXUStateBase aParent) {
super(aIid, aParent);
fXmlNode = createXmlElement();
}
@Override
Element createXmlElement() {
Element lXml = createElement("subvertex");
lXml.setAttribute("xmi:type", "uml:State");
lXml.setAttribute("xmi:id", getXmiId().toString());
return lXml;
}
}
class MXUTransition extends MXUNode {
private final MXUTrigger fTrigger;
MXUTransition(IId aSourceStateUuid, IId aTargetStateUuid, MXUTrigger aMUTrigger, MXUNode aParent) {
super(new UuidId(), aParent);
fTrigger = aMUTrigger;
fXmlNode = createXmlElement();
setSource(aSourceStateUuid);
setTarget(aTargetStateUuid);
}
public MXUAction addAction(String aName) {
MXUAction lActionEntry = new MXUAction(new UuidId(), aName, this);
lActionEntry.appendToParentXml(getXmlNode());
return lActionEntry;
}
public MXUNode appendToParentXml(Element aNode) {
aNode.appendChild(fXmlNode);
return this;
}
public void setName(String aName) {
fXmlNode.setAttribute("name", aName);
}
public void setSource(IId aSource) {
fXmlNode.setAttribute("source", aSource.toString());
}
public void setTarget(IId aTarget) {
fXmlNode.setAttribute("target", aTarget.toString());
}
@Override
Element createXmlElement() {
Element lRet = createElement("transition");
lRet.setAttribute("xmi:type", "uml:Transition");
lRet.setAttribute("xmi:id", getXmiId().toString());
if (fTrigger != null) {
Element lTrigger = createElement("trigger");
lRet.appendChild(lTrigger);
lTrigger.setAttribute("xmi:id", new UuidId().toString());
lTrigger.setAttribute("name", fTrigger.getName());
lTrigger.setAttribute("event", fTrigger.getXmiId().toString());
}
return lRet;
}
MXUTransition setGuard(MXUGuard aGuard) {
aGuard.appendToParentXml(fXmlNode);
aGuard.setParent(this);
fXmlNode.setAttribute("guard", aGuard.getXmiId().toString());
return this;
}
}
class MXUTrigger extends MXUNode {
private final IMdtUml2Model.IMUTrigger.Type fType;
private String fEvent;
private Element fSignal;
public MXUTrigger(IId aId, Element aUmlModel, String aEvent, IMdtUml2Model.IMUTrigger.Type aType) {
super(aId, null);
fEvent = aEvent;
fType = aType;
createXmlElement(aUmlModel);
}
@Override
public String getName() {
return fEvent;
}
@Override
public void setName(String aName) {
fEvent = aName;
fXmlNode.setAttribute("name", fEvent + "Event");
fSignal.setAttribute("name", fEvent);
}
private void createXmlElement(Element aModel) {
fSignal = createElement("packagedElement");
fSignal.setAttribute("xmi:type", "uml:Signal");
String lValue = new UuidId().toString();
fSignal.setAttribute("xmi:id", lValue);
fSignal.setAttribute("name", fEvent);
aModel.appendChild(fSignal);
fXmlNode = createElement("packagedElement");
fXmlNode.setAttribute("xmi:type", "uml:SignalEvent");
fXmlNode.setAttribute("xmi:id", getXmiId().toString());
fXmlNode.setAttribute("name", fEvent + "Event");
fXmlNode.setAttribute("signal", lValue);
aModel.appendChild(fXmlNode);
}
@Override
Element createXmlElement() {
return null;
}
}
private final Map<IId, MXUGuard> fGuardMap = new HashMap<>();
private final Map<IId, MXUNode> fStateMap = new HashMap<>();
private MXURootStateMachine fRootState;
private Element fUmlModel;
public ModelUml() throws ParserConfigurationException {
appendStaticXml(getDocument());
}
public MXUTrigger addTrigger(IId aId, String aEvent, IMdtUml2Model.IMUTrigger.Type aType) {
return new MXUTrigger(aId, fUmlModel, aEvent, aType);
}
public MXUGuard createGuard(IId aGuardId) {
return new MXUGuard(aGuardId, null);
}
public MXUGuard findGuard(IId aId) {
return fGuardMap.get(aId);
}
public MXURootStateMachine getRootState() {
if (fRootState == null) {
fRootState = new MXURootStateMachine(new UuidId(), null);
fStateMap.put(fRootState.getXmiId(), fRootState);
}
return fRootState;
}
@Override
public void save(File aFilename) throws IOException {
super.save(new File(aFilename.getAbsolutePath() + ".uml"));
}
Text createTextNode(String data) {
return getDocument().createTextNode(data);
}
private void appendStaticXml(Document lDoc) {
Element lRoot = createElementNS("http://www.omg.org/spec/XMI/20131001", "xmi:XMI");
lDoc.appendChild(lRoot);
lRoot.setAttribute("xmi:version", "20131001");
lRoot.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:xmi", "http://www.omg.org/spec/XMI/20131001");
lRoot.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance");
// lRoot.setAttributeNS(
// "http://www.w3.org/2000/xmlns/", "xmlns:ActionLanguage",
// "http://www.omg.org/spec/ALF/20120827/ActionLanguage-Profile"
// );
lRoot.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:ecore", "http://www.eclipse.org/emf/2002/Ecore");
lRoot.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:uml", "http://www.eclipse.org/uml2/5.0.0/UML");
// lRoot.setAttributeNS(
// "http://www.w3.org/2001/XMLSchema-instance", "xsi:schemaLocation",
// "http://www.omg.org/spec/ALF/20120827/ActionLanguage-Profile pathmap://PAPYRUS_ACTIONLANGUAGE_PROFILE/ActionLanguage-Profile.profile.uml#_Kv8EIKFXEeS_KNX0nfvIVQ"
// );
fUmlModel = createElement("uml:Model");
fUmlModel.setAttribute("xmi:id", (new UuidId()).toString());
fUmlModel.setAttribute("name", "SsmDumper");
lRoot.appendChild(fUmlModel);
Element l3 = createElement("packageImport");
l3.setAttribute("xmi:type", "uml:PackageImport");
l3.setAttribute("xmi:id", (new UuidId()).toString());
fUmlModel.appendChild(l3);
Element l4 = createElement("importedPackage");
l4.setAttribute("xmi:type", "uml:Model");
l4.setAttribute("href", "pathmap://UML_LIBRARIES/UMLPrimitiveTypes.library.uml#_0");
l3.appendChild(l4);
//insert packageElement
getRootState().appendToParentXml(fUmlModel);
// l3 = createElement("profileApplication");
//// <profileApplication xmi:type="uml:ProfileApplication" xmi:id="_FEd_AGUUEeanQ99zIc7c2Q">
// l3.setAttribute("xmi:id", UUID.randomUUID().toString());
// l3.setAttribute("xmi:type", "uml:ProfileApplication");
// lUmlModel.appendChild(l3);
//// <eAnnotations xmi:type="ecore:EAnnotation" xmi:id="_FEf0MGUUEeanQ99zIc7c2Q" source="http://www.eclipse.org/uml2/2.0.0/UML">
// l4 = createElement("eAnnotations");
// l4.setAttribute("xmi:type", "ecore:EAnnotation");
// l4.setAttribute("source", "http://www.eclipse.org/uml2/2.0.0/UML");
// l4.setAttribute("xmi:id", UUID.randomUUID().toString());
//
// l3.appendChild(l4);
// Element l5 = createElement("references");
// l4.appendChild(l5);
// l3.setAttribute("xmi:type", "ecore:EPackage");
// l3.setAttribute(
// "href",
// "pathmap://PAPYRUS_ACTIONLANGUAGE_PROFILE/ActionLanguage-Profile.profile.uml#_Kv8EIKFXEeS_KNX0nfvIVQ"
// );
// <references xmi:type="ecore:EPackage" href="pathmap://PAPYRUS_ACTIONLANGUAGE_PROFILE/ActionLanguage-Profile.profile.uml#_Kv8EIKFXEeS_KNX0nfvIVQ"/>
// </eAnnotations>
// l4 = createElement("appliedProfile");
// l3.appendChild(l4);
// l4.setAttribute("xmi:type", "uml:Profile");
// l4.setAttribute(
// "href",
// "pathmap://PAPYRUS_ACTIONLANGUAGE_PROFILE/ActionLanguage-Profile.profile.uml#ActionLanguage"
// );
// <appliedProfile xmi:type="uml:Profile" href="pathmap://PAPYRUS_ACTIONLANGUAGE_PROFILE/ActionLanguage-Profile.profile.uml#ActionLanguage"/>
// </profileApplication>
// lUmlModel = createElement("ActionLanguage:TextualRepresentation");
// lUmlModel.setAttribute("xmi:id", UUID.randomUUID().toString());
// lUmlModel.setAttribute("base_Comment", "_FEPVgGUUEeanQ99zIc7c2Q");
// lUmlModel.setAttribute("language", "org.eclipse.papyrus.uml.textedit.transition.xtext.UmlTransition");
// lRoot.appendChild(lUmlModel);
}
}
| |
package com.querydsl.sql;
import static com.querydsl.core.Target.*;
import static com.querydsl.sql.Constants.employee;
import static org.junit.Assert.*;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
import com.mysema.commons.lang.CloseableIterator;
import com.querydsl.core.Tuple;
import com.querydsl.core.testutil.ExcludeIn;
import com.querydsl.core.types.Expression;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.SubQueryExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberPath;
import com.querydsl.sql.domain.Employee;
import com.querydsl.sql.domain.QEmployee;
public class UnionBase extends AbstractBaseTest {
@SuppressWarnings("unchecked")
@Test
@ExcludeIn({MYSQL, TERADATA})
public void in_union() {
assertTrue(query().from(employee)
.where(employee.id.in(
query().union(query().select(Expressions.ONE),
query().select(Expressions.TWO))))
.select(Expressions.ONE).fetchFirst() != null);
}
@Test
@SuppressWarnings("unchecked")
@ExcludeIn(FIREBIRD) // order is not properly supported
public void union() throws SQLException {
SubQueryExpression<Integer> sq1 = query().from(employee).select(employee.id.max().as("ID"));
SubQueryExpression<Integer> sq2 = query().from(employee).select(employee.id.min().as("ID"));
assertEquals(
ImmutableList.of(query().select(employee.id.min()).from(employee).fetchFirst(),
query().select(employee.id.max()).from(employee).fetchFirst()),
query().union(sq1, sq2).orderBy(employee.id.asc()).fetch());
}
@Test
@SuppressWarnings("unchecked")
public void union_list() throws SQLException {
SubQueryExpression<Integer> sq1 = query().from(employee).select(employee.id.max());
SubQueryExpression<Integer> sq2 = query().from(employee).select(employee.id.min());
assertEquals(
query().union(sq1, sq2).fetch(),
query().union(sq1, sq2).list());
}
@Test
@SuppressWarnings("unchecked")
public void union_all() {
SubQueryExpression<Integer> sq1 = query().from(employee).select(employee.id.max());
SubQueryExpression<Integer> sq2 = query().from(employee).select(employee.id.min());
List<Integer> list = query().unionAll(sq1, sq2).fetch();
assertFalse(list.isEmpty());
}
@SuppressWarnings("unchecked")
@Test
public void union_multiple_columns() throws SQLException {
SubQueryExpression<Tuple> sq1 = query().from(employee).select(employee.firstname, employee.lastname);
SubQueryExpression<Tuple> sq2 = query().from(employee).select(employee.lastname, employee.firstname);
List<Tuple> list = query().union(sq1, sq2).fetch();
assertFalse(list.isEmpty());
for (Tuple row : list) {
assertNotNull(row.get(0, Object.class));
assertNotNull(row.get(1, Object.class));
}
}
@SuppressWarnings("unchecked")
@Test
@ExcludeIn(DERBY)
public void union_multiple_columns2() throws SQLException {
SubQueryExpression<Tuple> sq1 = query().from(employee).select(employee.firstname, employee.lastname);
SubQueryExpression<Tuple> sq2 = query().from(employee).select(employee.firstname, employee.lastname);
SQLQuery<?> query = query();
query.union(sq1, sq2);
List<String> list = query.select(employee.firstname).fetch();
assertFalse(list.isEmpty());
for (String row : list) {
assertNotNull(row);
}
}
@SuppressWarnings("unchecked")
@Test
@ExcludeIn(DERBY)
public void union_multiple_columns3() throws SQLException {
SubQueryExpression<Tuple> sq1 = query().from(employee).select(employee.firstname, employee.lastname);
SubQueryExpression<Tuple> sq2 = query().from(employee).select(employee.firstname, employee.lastname);
SQLQuery<?> query = query();
query.union(sq1, sq2);
List<Tuple> list = query.select(employee.lastname, employee.firstname).fetch();
assertFalse(list.isEmpty());
for (Tuple row : list) {
System.out.println(row.get(0, String.class) + " " + row.get(1, String.class));
}
}
@Test
@SuppressWarnings("unchecked")
public void union_empty_result() throws SQLException {
SubQueryExpression<Integer> sq1 = query().from(employee).where(employee.firstname.eq("XXX")).select(employee.id);
SubQueryExpression<Integer> sq2 = query().from(employee).where(employee.firstname.eq("YYY")).select(employee.id);
List<Integer> list = query().union(sq1, sq2).fetch();
assertTrue(list.isEmpty());
}
@Test
@SuppressWarnings("unchecked")
public void union2() throws SQLException {
List<Integer> list = query().union(
query().from(employee).select(employee.id.max()),
query().from(employee).select(employee.id.min())).fetch();
assertFalse(list.isEmpty());
}
@Test
@SuppressWarnings("unchecked")
public void union3() throws SQLException {
SubQueryExpression<Tuple> sq3 = query().from(employee).select(new Expression[]{employee.id.max()});
SubQueryExpression<Tuple> sq4 = query().from(employee).select(new Expression[]{employee.id.min()});
List<Tuple> list2 = query().union(sq3, sq4).fetch();
assertFalse(list2.isEmpty());
}
@SuppressWarnings("unchecked")
@Test
@ExcludeIn({DERBY})
public void union4() {
SubQueryExpression<Tuple> sq1 = query().from(employee).select(employee.id, employee.firstname);
SubQueryExpression<Tuple> sq2 = query().from(employee).select(employee.id, employee.firstname);
assertEquals(1, query().union(employee, sq1, sq2).select(employee.id.count()).fetch().size());
}
// FIXME for CUBRID
// Teradata: The ORDER BY clause must contain only integer constants.
@SuppressWarnings("unchecked")
@Test
@ExcludeIn({DERBY, CUBRID, FIREBIRD, TERADATA})
@Ignore // FIXME
public void union5() {
/* (select e.ID, e.FIRSTNAME, superior.ID as sup_id, superior.FIRSTNAME as sup_name
* from EMPLOYEE e join EMPLOYEE superior on e.SUPERIOR_ID = superior.ID)
* union
* (select e.ID, e.FIRSTNAME, null, null from EMPLOYEE e)
* order by ID asc
*/
QEmployee superior = new QEmployee("superior");
SubQueryExpression<Tuple> sq1 = query().from(employee)
.join(employee.superiorIdKey, superior)
.select(employee.id, employee.firstname, superior.id.as("sup_id"), superior.firstname.as("sup_name"));
SubQueryExpression<Tuple> sq2 = query().from(employee)
.select(employee.id, employee.firstname, null, null);
List<Tuple> results = query().union(sq1, sq2).orderBy(employee.id.asc()).fetch();
for (Tuple result : results) {
System.err.println(Arrays.asList(result));
}
}
@Test
@ExcludeIn({FIREBIRD, TERADATA}) // The ORDER BY clause must contain only integer constants.
@SuppressWarnings("unchecked")
public void union_with_order() throws SQLException {
SubQueryExpression<Integer> sq1 = query().from(employee).select(employee.id);
SubQueryExpression<Integer> sq2 = query().from(employee).select(employee.id);
List<Integer> list = query().union(sq1, sq2).orderBy(employee.id.asc()).fetch();
assertFalse(list.isEmpty());
}
@SuppressWarnings("unchecked")
@Test
@ExcludeIn(FIREBIRD)
public void union_multi_column_projection_list() throws IOException {
SubQueryExpression<Tuple> sq1 = query().from(employee).select(employee.id.max(), employee.id.max().subtract(1));
SubQueryExpression<Tuple> sq2 = query().from(employee).select(employee.id.min(), employee.id.min().subtract(1));
List<Tuple> list = query().union(sq1, sq2).list();
assertEquals(2, list.size());
assertTrue(list.get(0) != null);
assertTrue(list.get(1) != null);
}
@SuppressWarnings("unchecked")
@Test
@ExcludeIn(FIREBIRD)
public void union_multi_column_projection_iterate() throws IOException {
SubQueryExpression<Tuple> sq1 = query().from(employee).select(employee.id.max(), employee.id.max().subtract(1));
SubQueryExpression<Tuple> sq2 = query().from(employee).select(employee.id.min(), employee.id.min().subtract(1));
CloseableIterator<Tuple> iterator = query().union(sq1,sq2).iterate();
try {
assertTrue(iterator.hasNext());
assertTrue(iterator.next() != null);
assertTrue(iterator.next() != null);
assertFalse(iterator.hasNext());
} finally {
iterator.close();
}
}
@SuppressWarnings("unchecked")
@Test
public void union_single_column_projections_list() throws IOException {
SubQueryExpression<Integer> sq1 = query().from(employee).select(employee.id.max());
SubQueryExpression<Integer> sq2 = query().from(employee).select(employee.id.min());
List<Integer> list = query().union(sq1, sq2).list();
assertEquals(2, list.size());
assertTrue(list.get(0) != null);
assertTrue(list.get(1) != null);
}
@SuppressWarnings("unchecked")
@Test
public void union_single_column_projections_iterate() throws IOException {
SubQueryExpression<Integer> sq1 = query().from(employee).select(employee.id.max());
SubQueryExpression<Integer> sq2 = query().from(employee).select(employee.id.min());
CloseableIterator<Integer> iterator = query().union(sq1,sq2).iterate();
try {
assertTrue(iterator.hasNext());
assertTrue(iterator.next() != null);
assertTrue(iterator.next() != null);
assertFalse(iterator.hasNext());
} finally {
iterator.close();
}
}
@SuppressWarnings("unchecked")
@Test
public void union_factoryExpression() {
SubQueryExpression<Employee> sq1 = query().from(employee)
.select(Projections.constructor(Employee.class, employee.id));
SubQueryExpression<Employee> sq2 = query().from(employee)
.select(Projections.constructor(Employee.class, employee.id));
List<Employee> employees = query().union(sq1, sq2).list();
for (Employee employee : employees) {
assertNotNull(employee);
}
}
@SuppressWarnings("unchecked")
@Test
@ExcludeIn({DERBY, CUBRID})
public void union_clone() {
NumberPath<Integer> idAlias = Expressions.numberPath(Integer.class, "id");
SubQueryExpression<Employee> sq1 = query().from(employee)
.select(Projections.constructor(Employee.class, employee.id.as(idAlias)));
SubQueryExpression<Employee> sq2 = query().from(employee)
.select(Projections.constructor(Employee.class, employee.id.as(idAlias)));
SQLQuery<?> query = query();
query.union(sq1, sq2);
assertEquals(10, query.clone().select(idAlias).fetch().size());
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.text.java;
import java.util.StringTokenizer;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.DefaultIndentLineAutoEditStrategy;
import org.eclipse.jface.text.DocumentCommand;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.ITypedRegion;
import org.eclipse.jface.text.TextUtilities;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.texteditor.ITextEditorExtension3;
import org.eclipse.jdt.ui.PreferenceConstants;
import org.eclipse.jdt.internal.ui.JavaPlugin;
/**
* Auto indent strategy for java strings
*/
public class JavaStringAutoIndentStrategy extends DefaultIndentLineAutoEditStrategy {
private String fPartitioning;
/**
* The input string doesn't contain any line delimiter.
*
* @param inputString the given input string
* @param indentation the indentation
* @param delimiter the line delimiter
* @return the display string
*/
private String displayString(String inputString, String indentation, String delimiter) {
int length = inputString.length();
StringBuffer buffer = new StringBuffer(length);
StringTokenizer tokenizer= new StringTokenizer(inputString, "\n\r", true); //$NON-NLS-1$
while (tokenizer.hasMoreTokens()){
String token = tokenizer.nextToken();
if (token.equals("\r")) { //$NON-NLS-1$
buffer.append("\\r"); //$NON-NLS-1$
if (tokenizer.hasMoreTokens()) {
token = tokenizer.nextToken();
if (token.equals("\n")) { //$NON-NLS-1$
buffer.append("\\n"); //$NON-NLS-1$
buffer.append("\" + " + delimiter); //$NON-NLS-1$
buffer.append(indentation);
buffer.append("\""); //$NON-NLS-1$
continue;
} else {
buffer.append("\" + " + delimiter); //$NON-NLS-1$
buffer.append(indentation);
buffer.append("\""); //$NON-NLS-1$
}
} else {
continue;
}
} else if (token.equals("\n")) { //$NON-NLS-1$
buffer.append("\\n"); //$NON-NLS-1$
buffer.append("\" + " + delimiter); //$NON-NLS-1$
buffer.append(indentation);
buffer.append("\""); //$NON-NLS-1$
continue;
}
StringBuffer tokenBuffer = new StringBuffer();
for (int i = 0; i < token.length(); i++){
char c = token.charAt(i);
switch (c) {
case '\r' :
tokenBuffer.append("\\r"); //$NON-NLS-1$
break;
case '\n' :
tokenBuffer.append("\\n"); //$NON-NLS-1$
break;
case '\b' :
tokenBuffer.append("\\b"); //$NON-NLS-1$
break;
case '\t' :
// keep tabs verbatim
tokenBuffer.append("\t"); //$NON-NLS-1$
break;
case '\f' :
tokenBuffer.append("\\f"); //$NON-NLS-1$
break;
case '\"' :
tokenBuffer.append("\\\""); //$NON-NLS-1$
break;
case '\\' :
tokenBuffer.append("\\\\"); //$NON-NLS-1$
break;
default :
tokenBuffer.append(c);
}
}
buffer.append(tokenBuffer);
}
return buffer.toString();
}
/**
* Creates a new Java string auto indent strategy for the given document partitioning.
*
* @param partitioning the document partitioning
*/
public JavaStringAutoIndentStrategy(String partitioning) {
super();
fPartitioning= partitioning;
}
private boolean isLineDelimiter(IDocument document, String text) {
String[] delimiters= document.getLegalLineDelimiters();
if (delimiters != null)
return TextUtilities.equals(delimiters, text) > -1;
return false;
}
private String getLineIndentation(IDocument document, int offset) throws BadLocationException {
// find start of line
int adjustedOffset= (offset == document.getLength() ? offset - 1 : offset);
IRegion line= document.getLineInformationOfOffset(adjustedOffset);
int start= line.getOffset();
// find white spaces
int end= findEndOfWhiteSpace(document, start, offset);
return document.get(start, end - start);
}
private String getModifiedText(String string, String indentation, String delimiter) {
return displayString(string, indentation, delimiter);
}
private void javaStringIndentAfterNewLine(IDocument document, DocumentCommand command) throws BadLocationException {
ITypedRegion partition= TextUtilities.getPartition(document, fPartitioning, command.offset, true);
int offset= partition.getOffset();
int length= partition.getLength();
if (command.offset == offset + length && document.getChar(offset + length - 1) == '\"')
return;
String indentation= getLineIndentation(document, command.offset);
String delimiter= TextUtilities.getDefaultLineDelimiter(document);
IRegion line= document.getLineInformationOfOffset(offset);
String string= document.get(line.getOffset(), offset - line.getOffset()).trim();
if (string.length() != 0 && !string.equals("+")) //$NON-NLS-1$
indentation += String.valueOf("\t\t"); //$NON-NLS-1$
IPreferenceStore preferenceStore= JavaPlugin.getDefault().getPreferenceStore();
boolean isLineDelimiter= isLineDelimiter(document, command.text);
if (preferenceStore.getBoolean(PreferenceConstants.EDITOR_WRAP_STRINGS) && isLineDelimiter)
command.text= "\" +" + command.text + indentation + "\""; //$NON-NLS-1$//$NON-NLS-2$
else if (command.text.length() > 1 && !isLineDelimiter && preferenceStore.getBoolean(PreferenceConstants.EDITOR_ESCAPE_STRINGS))
command.text= getModifiedText(command.text, indentation, delimiter);
}
private boolean isSmartMode() {
IWorkbenchPage page= JavaPlugin.getActivePage();
if (page != null) {
IEditorPart part= page.getActiveEditor();
if (part instanceof ITextEditorExtension3) {
ITextEditorExtension3 extension= (ITextEditorExtension3) part;
return extension.getInsertMode() == ITextEditorExtension3.SMART_INSERT;
}
}
return false;
}
/*
* @see org.eclipse.jface.text.IAutoIndentStrategy#customizeDocumentCommand(IDocument, DocumentCommand)
*/
@Override
public void customizeDocumentCommand(IDocument document, DocumentCommand command) {
try {
if (command.text == null)
return;
if (isSmartMode())
javaStringIndentAfterNewLine(document, command);
} catch (BadLocationException e) {
}
}
}
| |
// Copyright 2016 The Sawdust Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package nosubmmitted;
/**
* 248. Strobogrammatic Number III
* https://leetcode.com/problems/strobogrammatic-number-iii/
* <pre>
*
* Difficulty: Hard
* A strobogrammatic number is a number that looks the same when rotated 180 degrees (looked at upside down).
*
* Write a function to count the total strobogrammatic numbers that exist in the range of low <= num <= high.
*
* For example,
* Given low = "50", high = "100", return 3. Because 69, 88, and 96 are three strobogrammatic numbers.
*
* Note:
* Because the range might be a large number, the low and high numbers are represented as string.
*
* Hide Tags: Math, Recursion
* Hide Similar Problems (E) Strobogrammatic Number (M) Strobogrammatic Number II
*
* </pre>
*/
public class LC248StrobogrammaticNumberIII {
/**
* beat 99.52
* ans = all strobogrammatic of [low.length~high.length-1] + all strobogrammatic of of
* high.length and no larger than high - all strobogrammatic of of low.length and smaller than low.
*/
private static final char[][] stroPairs = {{'0', '0'}, {'1', '1'}, {'6', '9'}, {'8', '8'}, {'9', '6'}};
public int strobogrammaticInRange(String low, String high) {
char[] h = high.toCharArray(), l = low.toCharArray();
h[h.length - 1]++;
if (h.length < l.length || (h.length == l.length && comp(l, h, 0) == 0)) return 0; // low > high
int sum = 0;
for (int len = low.length(); len < high.length(); sum += stroN(len), len++) ;
return sum + stroSmallerThan(h) - stroSmallerThan(l);
}
private int stroFullN(int len) {
if (len == 0) return 1; // ""
if (len == 1) return 3; // 0,1,8
return 5 * stroFullN(len - 2); // 0...0,1...1,8...8,6...9,9...6
}
private int stroN(int len) {
if (len < 2) return stroFullN(len);
return 4 * stroFullN(len - 2);
}
private int stroSmallerThan(char[] limit) { //count the stros WITH limit's length and SMALLER THAN limit.
int len = limit.length;
char[] cur = new char[len];
return stroSmallerThan(0, len - 1, cur, limit);
}
private int stroSmallerThan(int i, int j, char[] cur, char[] limit) {
int sum = 0;
if (j < i)
return comp(cur, limit, i);
if (j == i) {
for (char[] pair : stroPairs)
if (pair[0] == pair[1] && pair[0] <= limit[i])
if (pair[0] < limit[i])
sum++;
else {
cur[i] = pair[0];
sum += comp(cur, limit, i);
}
return sum;
}
for (char[] pair : stroPairs) {
if (pair[0] < limit[i]) {
if (i != 0 || pair[0] != '0')
sum += stroFullN(j - i - 1);
} else if (pair[0] == limit[i]) {
cur[i] = pair[0];
cur[j] = pair[1];
sum += stroSmallerThan(i + 1, j - 1, cur, limit);
}
}
return sum;
}
int comp(char[] cur, char[] limit, int st) { //return 1 if cur < limit else 0
for (int i = st; i < cur.length; i++) {
if (cur[i] < limit[i]) return 1;
else if (cur[i] > limit[i]) return 0;
}
return 0;
}
/**
* same as above Basic Idea:
* return all valid nums under upper (inclusive) - all valid nums under low (exclusive).
* Suppose upper has length len.
* The numbers of valid nums of len_i's < len, can be very efficiently computed using recursion or Math.
* pow();.
* For valid nums with len, construct them all and aggressively discard them if they are higher than upper (pruning).
* After all, char array comparison is cheap : if(compareCharArray(chs, upper, i) > 0) break;
*/
private static char[][] pairs = new char[][]{{'0', '0'}, {'1', '1'}, {'6', '9'}, {'8', '8'}, {'9', '6'}};
public int strobogrammaticInRange2(String low, String high) {
if (low.length() > high.length() || low.length() == high.length() && high.compareTo(low) < 0) return 0;
return strobogrammaticInRangeFrom0(high, true) - strobogrammaticInRangeFrom0(low, false);
}
private int strobogrammaticInRangeFrom0(String num, boolean inclusive) {
int len = num.length();
if (len == 1) {
if (num.charAt(0) == '0') return inclusive ? 1 : 0; // 0?
else if (num.charAt(0) == '1') return inclusive ? 2 : 1; // 0,1?
else if (num.charAt(0) < '8') return 2; // 0,1
else if (num.charAt(0) == '8') return inclusive ? 3 : 2; // 0,1,8?
else return 3; // 0,1,8
}
int sum = 0;
for (int i = 1; i < len; i++)
sum += strobogrammaticDigit(i, true);
sum += strobogrammaticInRangeSameDigits(new char[len], 0, len - 1, num.toCharArray(), inclusive);
return sum;
}
private int strobogrammaticInRangeSameDigits(char[] chs, int i, int j, char[] upper, boolean inclusive) {
int sum = 0;
if (i > j) {
if (inclusive && compareCharArray(upper, chs, chs.length - 1) >= 0 || !inclusive && compareCharArray(upper, chs, chs.length - 1) > 0)
return 1;
else return 0;
}
for (char[] pair : pairs) {
if (i == 0 && pair[0] == '0' || i == j && (pair[0] == '6' || pair[0] == '9')) continue;
chs[i] = pair[0];
chs[j] = pair[1];
if (compareCharArray(chs, upper, i) > 0) break;
sum += strobogrammaticInRangeSameDigits(chs, i + 1, j - 1, upper, inclusive);
}
return sum;
}
private int strobogrammaticDigit(int digit, boolean outside) {
if (digit == 0) return 1;
if (digit == 1) return 3;
return outside ? strobogrammaticDigit(digit - 2, false) * 4 : strobogrammaticDigit(digit - 2, false) * 5;
}
private int compareCharArray(char[] arr1, char[] arr2, int idx) {
for (int i = 0; i <= idx; i++)
if (arr1[i] == arr2[i]) continue;
else if (arr1[i] > arr2[i]) return 1;
else return -1;
return 0;
}
/**
* other idea
* The idea is based on the fact that it's very easy to compute the count for any given length without a limit.
* That's just 4 * 5 ^ (len / 2 - 1), multiply by 3 if the length is odd. Or 5 ^ (len / 2) if a leading zero is allowed,
* again multiply by 3 if necessary.
This is all good for lengths between low.length() and high.length(), exclusive.
But what if there is a limit? Well, first let's calculate the count without any limits.
Then subtract the count of numbers that do not satisfy the limit. But how do we compute that?
Suppose it's the low limit. Then we need to find numbers below it, so it becomes a high limit.
OK, so we can take any strobo numbers with an MSD less than its MSD and they will all satisfy the limit.
And these numbers are composed by substituting any (no limit) strobo numbers,
including ones starting with 0, in the middle and adjusting the LSD.
Then we have to add the count of numbers with the same MSD, but also satisfying the limit.
This can be done by counting numbers of length decreased by 2 with the limit being the middle part of our limit.
It can become an inclusive limit depending on the value of MSD, but it can be done nevertheless.
About half of the code is for a special case of 1-2 digits. It can probably be made more concise,
but it's not that important since it's isolated. It didn't even fit within post limit, so I left it out.
It can be replaced by pre-computed arrays for further performance improvements.
Reentrant caching of powers of 5 is also included, although I doubt it gives any real performance boost.
==
other idea:
The basic idea is to find generate a list of strobogrammatic number with the length between the length of lower
bound and the length of upper bound.
Then we pass the list and ignore the numbers with the same length of lower bound or upper bound but not in the range.
*/
}
| |
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.mvstore;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
import org.h2.mvstore.cache.FilePathCache;
import org.h2.store.fs.FilePath;
import org.h2.store.fs.FilePathDisk;
import org.h2.store.fs.FilePathEncrypt;
import org.h2.store.fs.FilePathNio;
/**
* The default storage mechanism of the MVStore. This implementation persists
* data to a file. The file store is responsible to persist data and for free
* space management.
*/
public class FileStore {
/**
* The number of read operations.
*/
protected long readCount;
/**
* The number of read bytes.
*/
protected long readBytes;
/**
* The number of write operations.
*/
protected long writeCount;
/**
* The number of written bytes.
*/
protected long writeBytes;
/**
* The free spaces between the chunks. The first block to use is block 2
* (the first two blocks are the store header).
*/
protected final FreeSpaceBitSet freeSpace =
new FreeSpaceBitSet(2, MVStore.BLOCK_SIZE);
/**
* The file name.
*/
protected String fileName;
/**
* Whether this store is read-only.
*/
protected boolean readOnly;
/**
* The file size (cached).
*/
protected long fileSize;
/**
* The file.
*/
protected FileChannel file;
/**
* The encrypted file (if encryption is used).
*/
protected FileChannel encryptedFile;
/**
* The file lock.
*/
protected FileLock fileLock;
@Override
public String toString() {
return fileName;
}
/**
* Read from the file.
*
* @param pos the write position
* @param len the number of bytes to read
* @return the byte buffer
*/
public ByteBuffer readFully(long pos, int len) {
ByteBuffer dst = ByteBuffer.allocate(len);
DataUtils.readFully(file, pos, dst);
readCount++;
readBytes += len;
return dst;
}
/**
* Write to the file.
*
* @param pos the write position
* @param src the source buffer
*/
public void writeFully(long pos, ByteBuffer src) {
int len = src.remaining();
fileSize = Math.max(fileSize, pos + len);
DataUtils.writeFully(file, pos, src);
writeCount++;
writeBytes += len;
}
/**
* Try to open the file.
*
* @param fileName the file name
* @param readOnly whether the file should only be opened in read-only mode,
* even if the file is writable
* @param encryptionKey the encryption key, or null if encryption is not
* used
*/
public void open(String fileName, boolean readOnly, char[] encryptionKey) {
if (file != null) {
return;
}
if (fileName != null) {
// ensure the Cache file system is registered
FilePathCache.INSTANCE.getScheme();
FilePath p = FilePath.get(fileName);
// if no explicit scheme was specified, NIO is used
if (p instanceof FilePathDisk &&
!fileName.startsWith(p.getScheme() + ":")) {
// ensure the NIO file system is registered
FilePathNio.class.getName();
fileName = "nio:" + fileName;
}
}
this.fileName = fileName;
FilePath f = FilePath.get(fileName);
FilePath parent = f.getParent();
if (parent != null && !parent.exists()) {
throw DataUtils.newIllegalArgumentException(
"Directory does not exist: {0}", parent);
}
if (f.exists() && !f.canWrite()) {
readOnly = true;
}
this.readOnly = readOnly;
try {
file = f.open(readOnly ? "r" : "rw");
if (encryptionKey != null) {
byte[] key = FilePathEncrypt.getPasswordBytes(encryptionKey);
encryptedFile = file;
file = new FilePathEncrypt.FileEncrypt(fileName, key, file);
}
try {
if (readOnly) {
fileLock = file.tryLock(0, Long.MAX_VALUE, true);
} else {
fileLock = file.tryLock();
}
} catch (OverlappingFileLockException e) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_LOCKED,
"The file is locked: {0}", fileName, e);
}
if (fileLock == null) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_LOCKED,
"The file is locked: {0}", fileName);
}
fileSize = file.size();
} catch (IOException e) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_READING_FAILED,
"Could not open file {0}", fileName, e);
}
}
/**
* Close this store.
*/
public void close() {
try {
if (fileLock != null) {
fileLock.release();
fileLock = null;
}
file.close();
freeSpace.clear();
} catch (Exception e) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_WRITING_FAILED,
"Closing failed for file {0}", fileName, e);
} finally {
file = null;
}
}
/**
* Flush all changes.
*/
public void sync() {
try {
file.force(true);
} catch (IOException e) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_WRITING_FAILED,
"Could not sync file {0}", fileName, e);
}
}
/**
* Get the file size.
*
* @return the file size
*/
public long size() {
return fileSize;
}
/**
* Truncate the file.
*
* @param size the new file size
*/
public void truncate(long size) {
try {
writeCount++;
file.truncate(size);
fileSize = Math.min(fileSize, size);
} catch (IOException e) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_WRITING_FAILED,
"Could not truncate file {0} to size {1}",
fileName, size, e);
}
}
/**
* Get the file instance in use.
* <p>
* The application may read from the file (for example for online backup),
* but not write to it or truncate it.
*
* @return the file
*/
public FileChannel getFile() {
return file;
}
/**
* Get the encrypted file instance, if encryption is used.
* <p>
* The application may read from the file (for example for online backup),
* but not write to it or truncate it.
*
* @return the encrypted file, or null if encryption is not used
*/
public FileChannel getEncryptedFile() {
return encryptedFile;
}
/**
* Get the number of write operations since this store was opened.
* For file based stores, this is the number of file write operations.
*
* @return the number of write operations
*/
public long getWriteCount() {
return writeCount;
}
/**
* Get the number of written bytes since this store was opened.
*
* @return the number of write operations
*/
public long getWriteBytes() {
return writeBytes;
}
/**
* Get the number of read operations since this store was opened.
* For file based stores, this is the number of file read operations.
*
* @return the number of read operations
*/
public long getReadCount() {
return readCount;
}
/**
* Get the number of read bytes since this store was opened.
*
* @return the number of write operations
*/
public long getReadBytes() {
return readBytes;
}
public boolean isReadOnly() {
return readOnly;
}
/**
* Get the default retention time for this store in milliseconds.
*
* @return the retention time
*/
public int getDefaultRetentionTime() {
return 45000;
}
/**
* Mark the space as in use.
*
* @param pos the position in bytes
* @param length the number of bytes
*/
public void markUsed(long pos, int length) {
freeSpace.markUsed(pos, length);
}
/**
* Allocate a number of blocks and mark them as used.
*
* @param length the number of bytes to allocate
* @return the start position in bytes
*/
public long allocate(int length) {
return freeSpace.allocate(length);
}
/**
* Mark the space as free.
*
* @param pos the position in bytes
* @param length the number of bytes
*/
public void free(long pos, int length) {
freeSpace.free(pos, length);
}
public int getFillRate() {
return freeSpace.getFillRate();
}
long getFirstFree() {
return freeSpace.getFirstFree();
}
/**
* Mark the file as empty.
*/
public void clear() {
freeSpace.clear();
}
/**
* Get the file name.
*
* @return the file name
*/
public String getFileName() {
return fileName;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import com.gemstone.gemfire.internal.cache.lru.EnableLRU;
import com.gemstone.gemfire.internal.cache.persistence.DiskRecoveryStore;
import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
// macros whose definition changes this class:
// disk: DISK
// lru: LRU
// stats: STATS
// versioned: VERSIONED
// offheap: OFFHEAP
// One of the following key macros must be defined:
// key object: KEY_OBJECT
// key int: KEY_INT
// key long: KEY_LONG
// key uuid: KEY_UUID
// key string1: KEY_STRING1
// key string2: KEY_STRING2
/**
* Do not modify this class. It was generated.
* Instead modify LeafRegionEntry.cpp and then run
* bin/generateRegionEntryClasses.sh from the directory
* that contains your build.xml.
*/
public class VMThinDiskRegionEntryHeapStringKey1 extends VMThinDiskRegionEntryHeap {
public VMThinDiskRegionEntryHeapStringKey1 (RegionEntryContext context, String key,
Object value
, boolean byteEncode
) {
super(context,
(value instanceof RecoveredEntry ? null : value)
);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
initialize(context, value);
// caller has already confirmed that key.length <= MAX_INLINE_STRING_KEY
long tmpBits1 = 0L;
if (byteEncode) {
for (int i=key.length()-1; i >= 0; i--) {
// Note: we know each byte is <= 0x7f so the "& 0xff" is not needed. But I added it in to keep findbugs happy.
tmpBits1 |= (byte)key.charAt(i) & 0xff;
tmpBits1 <<= 8;
}
tmpBits1 |= 1<<6;
} else {
for (int i=key.length()-1; i >= 0; i--) {
tmpBits1 |= key.charAt(i);
tmpBits1 <<= 16;
}
}
tmpBits1 |= key.length();
this.bits1 = tmpBits1;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// common code
protected int hash;
private HashEntry<Object, Object> next;
@SuppressWarnings("unused")
private volatile long lastModified;
private static final AtomicLongFieldUpdater<VMThinDiskRegionEntryHeapStringKey1> lastModifiedUpdater
= AtomicLongFieldUpdater.newUpdater(VMThinDiskRegionEntryHeapStringKey1.class, "lastModified");
private volatile Object value;
@Override
protected final Object getValueField() {
return this.value;
}
@Override
protected void setValueField(Object v) {
this.value = v;
}
protected long getlastModifiedField() {
return lastModifiedUpdater.get(this);
}
protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) {
return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue);
}
/**
* @see HashEntry#getEntryHash()
*/
public final int getEntryHash() {
return this.hash;
}
protected void setEntryHash(int v) {
this.hash = v;
}
/**
* @see HashEntry#getNextEntry()
*/
public final HashEntry<Object, Object> getNextEntry() {
return this.next;
}
/**
* @see HashEntry#setNextEntry
*/
public final void setNextEntry(final HashEntry<Object, Object> n) {
this.next = n;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// disk code
protected void initialize(RegionEntryContext context, Object value) {
diskInitialize(context, value);
}
@Override
public int updateAsyncEntrySize(EnableLRU capacityController) {
throw new IllegalStateException("should never be called");
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private void diskInitialize(RegionEntryContext context, Object value) {
DiskRecoveryStore drs = (DiskRecoveryStore)context;
DiskStoreImpl ds = drs.getDiskStore();
long maxOplogSize = ds.getMaxOplogSize();
//get appropriate instance of DiskId implementation based on maxOplogSize
this.id = DiskId.createDiskId(maxOplogSize, true/* is persistence */, ds.needsLinkedList());
Helper.initialize(this, drs, value);
}
/**
* DiskId
*
* @since 5.1
*/
protected DiskId id;//= new DiskId();
public DiskId getDiskId() {
return this.id;
}
@Override
void setDiskId(RegionEntry old) {
this.id = ((AbstractDiskRegionEntry)old).getDiskId();
}
// // inlining DiskId
// // always have these fields
// /**
// * id consists of
// * most significant
// * 1 byte = users bits
// * 2-8 bytes = oplog id
// * least significant.
// *
// * The highest bit in the oplog id part is set to 1 if the oplog id
// * is negative.
// * @todo this field could be an int for an overflow only region
// */
// private long id;
// /**
// * Length of the bytes on disk.
// * This is always set. If the value is invalid then it will be set to 0.
// * The most significant bit is used by overflow to mark it as needing to be written.
// */
// protected int valueLength = 0;
// // have intOffset or longOffset
// // intOffset
// /**
// * The position in the oplog (the oplog offset) where this entry's value is
// * stored
// */
// private volatile int offsetInOplog;
// // longOffset
// /**
// * The position in the oplog (the oplog offset) where this entry's value is
// * stored
// */
// private volatile long offsetInOplog;
// // have overflowOnly or persistence
// // overflowOnly
// // no fields
// // persistent
// /** unique entry identifier * */
// private long keyId;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// key code
private final long bits1;
private int getKeyLength() {
return (int) (this.bits1 & 0x003fL);
}
private int getEncoding() {
// 0 means encoded as char
// 1 means encoded as bytes that are all <= 0x7f;
return (int) (this.bits1 >> 6) & 0x03;
}
@Override
public final Object getKey() {
int keylen = getKeyLength();
char[] chars = new char[keylen];
long tmpBits1 = this.bits1;
if (getEncoding() == 1) {
for (int i=0; i < keylen; i++) {
tmpBits1 >>= 8;
chars[i] = (char) (tmpBits1 & 0x00ff);
}
} else {
for (int i=0; i < keylen; i++) {
tmpBits1 >>= 16;
chars[i] = (char) (tmpBits1 & 0x00FFff);
}
}
return new String(chars);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public boolean isKeyEqual(Object k) {
if (k instanceof String) {
String str = (String)k;
int keylen = getKeyLength();
if (str.length() == keylen) {
long tmpBits1 = this.bits1;
if (getEncoding() == 1) {
for (int i=0; i < keylen; i++) {
tmpBits1 >>= 8;
char c = (char) (tmpBits1 & 0x00ff);
if (str.charAt(i) != c) {
return false;
}
}
} else {
for (int i=0; i < keylen; i++) {
tmpBits1 >>= 16;
char c = (char) (tmpBits1 & 0x00FFff);
if (str.charAt(i) != c) {
return false;
}
}
}
return true;
}
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.util.*;
import java.io.*;
import java.util.*;
import java.sql.Time;
/***************************************************
* PendingReplicationBlocks does the bookkeeping of all
* blocks that are getting replicated.
*
* It does the following:
* 1) record blocks that are getting replicated at this instant.
* 2) a coarse grain timer to track age of replication request
* 3) a thread that periodically identifies replication-requests
* that never made it.
*
***************************************************/
class PendingReplicationBlocks {
private Map<Block, PendingBlockInfo> pendingReplications;
private ArrayList<Block> timedOutItems;
Daemon timerThread = null;
private volatile boolean fsRunning = true;
//
// It might take anywhere between 5 to 10 minutes before
// a request is timed out.
//
private long timeout = 5 * 60 * 1000;
private long defaultRecheckInterval = 5 * 60 * 1000;
PendingReplicationBlocks(long timeoutPeriod) {
if ( timeoutPeriod > 0 ) {
this.timeout = timeoutPeriod;
}
init();
}
PendingReplicationBlocks() {
init();
}
void init() {
pendingReplications = new HashMap<Block, PendingBlockInfo>();
timedOutItems = new ArrayList<Block>();
this.timerThread = new Daemon(new PendingReplicationMonitor());
timerThread.start();
}
/**
* Add a block to the list of pending Replications
*/
void increment(Block block, int numReplicas) {
synchronized (pendingReplications) {
PendingBlockInfo found = pendingReplications.get(block);
if (found == null) {
pendingReplications.put(block, new PendingBlockInfo(numReplicas));
} else {
found.incrementReplicas(numReplicas);
found.setTimeStamp();
}
}
}
/**
* One replication request for this block has finished.
* Decrement the number of pending replication requests
* for this block.
*/
void decrement(Block block) {
synchronized (pendingReplications) {
PendingBlockInfo found = pendingReplications.get(block);
if (found != null) {
FSNamesystem.LOG.debug("Removing pending replication for block" + block);
found.decrementReplicas();
if (found.getNumReplicas() <= 0) {
pendingReplications.remove(block);
}
}
}
}
/**
* Remove the record about the given block from pendingReplications.
* @param block The given block whose pending replication requests need to be
* removed
*/
void remove(Block block) {
synchronized (pendingReplications) {
pendingReplications.remove(block);
}
}
/**
* The total number of blocks that are undergoing replication
*/
int size() {
return pendingReplications.size();
}
/**
* How many copies of this block is pending replication?
*/
int getNumReplicas(Block block) {
synchronized (pendingReplications) {
PendingBlockInfo found = pendingReplications.get(block);
if (found != null) {
return found.getNumReplicas();
}
}
return 0;
}
/**
* Returns a list of blocks that have timed out their
* replication requests. Returns null if no blocks have
* timed out.
*/
Block[] getTimedOutBlocks() {
synchronized (timedOutItems) {
if (timedOutItems.size() <= 0) {
return null;
}
Block[] blockList = timedOutItems.toArray(
new Block[timedOutItems.size()]);
timedOutItems.clear();
return blockList;
}
}
/**
* An object that contains information about a block that
* is being replicated. It records the timestamp when the
* system started replicating the most recent copy of this
* block. It also records the number of replication
* requests that are in progress.
*/
static class PendingBlockInfo {
private long timeStamp;
private int numReplicasInProgress;
PendingBlockInfo(int numReplicas) {
this.timeStamp = FSNamesystem.now();
this.numReplicasInProgress = numReplicas;
}
long getTimeStamp() {
return timeStamp;
}
void setTimeStamp() {
timeStamp = FSNamesystem.now();
}
void incrementReplicas(int increment) {
numReplicasInProgress += increment;
}
void decrementReplicas() {
numReplicasInProgress--;
assert(numReplicasInProgress >= 0);
}
int getNumReplicas() {
return numReplicasInProgress;
}
}
/*
* A periodic thread that scans for blocks that never finished
* their replication request.
*/
class PendingReplicationMonitor implements Runnable {
public void run() {
while (fsRunning) {
long period = Math.min(defaultRecheckInterval, timeout);
try {
pendingReplicationCheck();
Thread.sleep(period);
} catch (InterruptedException ie) {
FSNamesystem.LOG.debug(
"PendingReplicationMonitor thread received exception. " + ie);
}
}
}
/**
* Iterate through all items and detect timed-out items
*/
void pendingReplicationCheck() {
synchronized (pendingReplications) {
Iterator iter = pendingReplications.entrySet().iterator();
long now = FSNamesystem.now();
FSNamesystem.LOG.debug("PendingReplicationMonitor checking Q");
while (iter.hasNext()) {
Map.Entry entry = (Map.Entry) iter.next();
PendingBlockInfo pendingBlock = (PendingBlockInfo) entry.getValue();
if (now > pendingBlock.getTimeStamp() + timeout) {
Block block = (Block) entry.getKey();
synchronized (timedOutItems) {
timedOutItems.add(block);
}
FSNamesystem.LOG.warn(
"PendingReplicationMonitor timed out block " + block);
iter.remove();
}
}
}
}
}
/*
* Shuts down the pending replication monitor thread.
* Waits for the thread to exit.
*/
void stop() {
fsRunning = false;
timerThread.interrupt();
try {
timerThread.join(3000);
} catch (InterruptedException ie) {
}
}
/**
* Iterate through all items and print them.
*/
void metaSave(PrintWriter out) {
synchronized (pendingReplications) {
out.println("Metasave: Blocks being replicated: " +
pendingReplications.size());
Iterator iter = pendingReplications.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry entry = (Map.Entry) iter.next();
PendingBlockInfo pendingBlock = (PendingBlockInfo) entry.getValue();
Block block = (Block) entry.getKey();
out.println(block +
" StartTime: " + new Time(pendingBlock.timeStamp) +
" NumReplicaInProgress: " +
pendingBlock.numReplicasInProgress);
}
}
}
}
| |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.grpc.scanner;
import com.google.bigtable.v2.ReadRowsRequest;
import com.google.bigtable.v2.RowRange;
import com.google.bigtable.v2.RowSet;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.util.Arrays;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Test for the {@link ResumingStreamingResultScanner} */
@RunWith(JUnit4.class)
public class ReadRowsRequestManagerTest {
static final ByteString BLANK = ByteString.EMPTY;
private static ReadRowsRequest createRequest(RowRange range) {
return ReadRowsRequest.newBuilder().setRows(RowSet.newBuilder().addRowRanges(range)).build();
}
private static RowRange createRowRangeClosedStart(ByteString startClosed, ByteString endOpen) {
return RowRange.newBuilder().setStartKeyClosed(startClosed).setEndKeyOpen(endOpen).build();
}
private static RowRange createRowRangeOpenedStart(ByteString startOpened, ByteString endOpen) {
return RowRange.newBuilder().setStartKeyOpen(startOpened).setEndKeyOpen(endOpen).build();
}
private ReadRowsRequest createKeysRequest(Iterable<ByteString> keys) {
return ReadRowsRequest.newBuilder().setRows(createRowSet(keys)).build();
}
private RowSet createRowSet(Iterable<ByteString> keys) {
return RowSet.newBuilder().addAllRowKeys(keys).build();
}
/**
* Test a single, full table scan scenario for {@link
* ReadRowsRequestManager#buildUpdatedRequest()} .
*
* @throws IOException
*/
@Test
public void test_filterRows_testAllRange() {
ByteString key1 = ByteString.copyFrom("row1".getBytes());
ReadRowsRequest originalRequest =
createRequest(createRowRangeClosedStart(ByteString.EMPTY, ByteString.EMPTY));
ReadRowsRequestManager underTest = new ReadRowsRequestManager(originalRequest);
underTest.updateLastFoundKey(key1);
Assert.assertEquals(
createRequest(createRowRangeOpenedStart(key1, ByteString.EMPTY)),
underTest.buildUpdatedRequest());
}
/**
* Test a single, full table scan scenario for {@link
* ReadRowsRequestManager#buildUpdatedRequest()}} .
*/
@Test
public void test_filterRows_empty() {
ByteString key1 = ByteString.copyFrom("row1".getBytes());
ReadRowsRequest originalRequest = createRequest(RowRange.getDefaultInstance());
ReadRowsRequestManager underTest = new ReadRowsRequestManager(originalRequest);
underTest.updateLastFoundKey(key1);
RowSet actual = underTest.buildUpdatedRequest().getRows();
Assert.assertEquals(0, actual.getRowKeysCount());
Assert.assertEquals(1, actual.getRowRangesCount());
RowRange actualRange = actual.getRowRanges(0);
Assert.assertEquals(key1, actualRange.getStartKeyOpen());
RowRange.EndKeyCase endKeyCase = actualRange.getEndKeyCase();
Assert.assertTrue(endKeyCase == RowRange.EndKeyCase.ENDKEY_NOT_SET);
}
/**
* Test rowKeys scenario for {@link ReadRowsRequestManager#buildUpdatedRequest()}}.
*
* @throws IOException
*/
@Test
public void test_filterRows_rowKeys() {
ByteString key1 = ByteString.copyFrom("row1".getBytes());
ByteString key2 = ByteString.copyFrom("row2".getBytes());
ByteString key3 = ByteString.copyFrom("row3".getBytes());
ReadRowsRequest originalRequest = createKeysRequest(Arrays.asList(key1, key2, key3));
ReadRowsRequestManager underTest = new ReadRowsRequestManager(originalRequest);
Assert.assertEquals(originalRequest, underTest.buildUpdatedRequest());
underTest.updateLastFoundKey(key1);
Assert.assertEquals(
createKeysRequest(Arrays.asList(key2, key3)), underTest.buildUpdatedRequest());
}
/**
* Test multiple rowset filter scenarios for {@link
* ReadRowsRequestManager#buildUpdatedRequest()}}.
*
* @throws IOException
*/
@Test
public void test_filterRows_multiRowSetFilters() {
ByteString key1 = ByteString.copyFrom("row1".getBytes());
ByteString key2 = ByteString.copyFrom("row2".getBytes());
ByteString key3 = ByteString.copyFrom("row3".getBytes());
RowSet fullRowSet =
RowSet.newBuilder()
.addAllRowKeys(Arrays.asList(key1, key2, key3)) // row1 should be filtered out
.addRowRanges(
RowRange.newBuilder()
.setStartKeyOpen(BLANK)
.setEndKeyClosed(key1)) // should be filtered out
.addRowRanges(
RowRange.newBuilder()
.setStartKeyOpen(BLANK)
.setEndKeyOpen(key1)) // should be filtered out
.addRowRanges(
RowRange.newBuilder().setStartKeyOpen(key1).setEndKeyOpen(key2)) // should stay
.addRowRanges(
RowRange.newBuilder()
.setStartKeyClosed(key1)
.setEndKeyOpen(key2)) // should be converted (key1 -> key2)
.addRowRanges(
RowRange.newBuilder()
.setStartKeyClosed(key1)
.setEndKeyClosed(key2)) // should be converted (key1 -> key2]
.addRowRanges(
RowRange.newBuilder().setStartKeyOpen(key2).setEndKeyOpen(key3)) // should stay
.addRowRanges(
RowRange.newBuilder().setStartKeyClosed(key2).setEndKeyOpen(key3)) // should stay
.build();
RowSet filteredRowSet =
RowSet.newBuilder()
.addAllRowKeys(Arrays.asList(key2, key3)) // row1 should be filtered out
.addRowRanges(
RowRange.newBuilder().setStartKeyOpen(key1).setEndKeyOpen(key2)) // should stay
.addRowRanges(
RowRange.newBuilder()
.setStartKeyOpen(key1)
.setEndKeyOpen(key2)) // should be converted (key1 -> key2)
.addRowRanges(
RowRange.newBuilder()
.setStartKeyOpen(key1)
.setEndKeyClosed(key2)) // should be converted (key1 -> key2]
.addRowRanges(
RowRange.newBuilder().setStartKeyOpen(key2).setEndKeyOpen(key3)) // should stay
.addRowRanges(
RowRange.newBuilder().setStartKeyClosed(key2).setEndKeyOpen(key3)) // should stay
.build();
ReadRowsRequest originalRequest = ReadRowsRequest.newBuilder().setRows(fullRowSet).build();
ReadRowsRequest filteredRequest = ReadRowsRequest.newBuilder().setRows(filteredRowSet).build();
ReadRowsRequestManager underTest = new ReadRowsRequestManager(originalRequest);
Assert.assertEquals(originalRequest, underTest.buildUpdatedRequest());
underTest.updateLastFoundKey(key1);
Assert.assertEquals(filteredRequest, underTest.buildUpdatedRequest());
}
/**
* Test that resume handles key requests as unsigned bytes
*
* @throws IOException
*/
@Test
public void test_filterRows_unsignedRange() throws IOException {
ByteString key1 = ByteString.copyFrom(new byte[] {0x7f});
ByteString key2 = ByteString.copyFrom(new byte[] {(byte) 0x80});
ReadRowsRequest originalRequest =
createRequest(createRowRangeClosedStart(key1, ByteString.EMPTY));
ReadRowsRequestManager underTest = new ReadRowsRequestManager(originalRequest);
underTest.updateLastFoundKey(key2);
Assert.assertEquals(
createRequest(createRowRangeOpenedStart(key2, ByteString.EMPTY)),
underTest.buildUpdatedRequest());
}
/**
* Test that resume handles row ranges as unsigned bytes
*
* @throws IOException
*/
@Test
public void test_filterRows_unsignedRows() throws IOException {
ByteString key1 = ByteString.copyFrom(new byte[] {0x7f});
ByteString key2 = ByteString.copyFrom(new byte[] {(byte) 0x80});
ReadRowsRequest originalRequest = createKeysRequest(Arrays.asList(key1, key2));
ReadRowsRequestManager underTest = new ReadRowsRequestManager(originalRequest);
underTest.updateLastFoundKey(key1);
Assert.assertEquals(createKeysRequest(Arrays.asList(key2)), underTest.buildUpdatedRequest());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.logs;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.common.io.Closer;
import com.google.common.io.Files;
import com.google.common.util.concurrent.AbstractScheduledService;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.Setter;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.util.DatasetFilterUtils;
import org.apache.gobblin.util.FileListUtils;
import org.apache.gobblin.util.filesystem.FileSystemSupplier;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A utility service that periodically reads log files in a source log file directory for changes
* since the last reads and appends the changes to destination log files with the same names as
* the source log files in a destination log directory. The source and destination log files
* can be on different {@link FileSystem}s.
*
* <p>
* This class extends the {@link AbstractScheduledService} so it can be used with a
* {@link com.google.common.util.concurrent.ServiceManager} that manages the lifecycle of
* a {@link LogCopier}.
* </p>
*
* <p>
* This class is intended to be used in the following pattern:
*
* <pre>
* {@code
* LogCopier.Builder logCopierBuilder = LogCopier.newBuilder();
* LogCopier logCopier = logCopierBuilder
* .useSrcFileSystem(FileSystem.getLocal(new Configuration()))
* .useDestFileSystem(FileSystem.get(URI.create(destFsUri), new Configuration()))
* .readFrom(new Path(srcLogDir))
* .writeTo(new Path(destLogDir))
* .useSourceLogFileMonitorInterval(60)
* .useTimeUnit(TimeUnit.SECONDS)
* .build();
*
* ServiceManager serviceManager = new ServiceManager(Lists.newArrayList(logCopier));
* serviceManager.startAsync();
*
* // ...
* serviceManager.stopAsync().awaitStopped(60, TimeUnit.SECONDS);
* }
* </pre>
*
* Checkout the Javadoc of {@link LogCopier.Builder} to see the available options for customization.
* </p>
*
* @author Yinan Li
*/
public class LogCopier extends AbstractScheduledService {
private static final Logger LOGGER = LoggerFactory.getLogger(LogCopier.class);
private static final long DEFAULT_SOURCE_LOG_FILE_MONITOR_INTERVAL = 120;
private static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.SECONDS;
private static final int DEFAULT_LINES_WRITTEN_BEFORE_FLUSH = 100;
private static final int DEFAULT_NUM_COPY_THREADS = 10;
private FileSystem srcFs;
private FileSystem destFs;
private final List<Path> srcLogDirs;
private final Path destLogDir;
private final long sourceLogFileMonitorInterval;
private final TimeUnit timeUnit;
private final FileSystemSupplier destFsSupplier;
private final FileSystemSupplier srcFsSupplier;
private final Set<String> logFileExtensions;
private final int numCopyThreads;
private final String currentLogFileName;
private final Optional<List<Pattern>> includingRegexPatterns;
private final Optional<List<Pattern>> excludingRegexPatterns;
private final Optional<String> logFileNamePrefix;
private final int linesWrittenBeforeFlush;
private final ExecutorService executorService;
@Setter
private boolean needToUpdateDestFs;
@Setter
private boolean needToUpdateSrcFs;
@Getter
private final Set<String> copiedFileNames = Sets.newConcurrentHashSet();
private boolean shouldCopyCurrentLogFile;
private LogCopier(Builder builder) throws IOException {
this.destFsSupplier = builder.destFsSupplier;
this.srcFsSupplier = builder.srcFsSupplier;
this.srcFs = this.srcFsSupplier != null ? this.srcFsSupplier.getFileSystem() : builder.srcFs;
Preconditions.checkArgument(this.srcFs != null, "srcFs or srcFsSupplier has not been set");
this.destFs = this.destFsSupplier != null ? this.destFsSupplier.getFileSystem() : builder.destFs;
Preconditions.checkArgument(this.destFs != null, "destFs or destFsSupplier has not been set");
this.srcLogDirs = builder.srcLogDirs.stream().map(d -> this.srcFs.makeQualified(d)).collect(Collectors.toList());
this.destLogDir = this.destFs.makeQualified(builder.destLogDir);
this.sourceLogFileMonitorInterval = builder.sourceLogFileMonitorInterval;
this.timeUnit = builder.timeUnit;
this.logFileExtensions = builder.logFileExtensions;
this.currentLogFileName = builder.currentLogFileName;
this.shouldCopyCurrentLogFile = false;
this.needToUpdateDestFs = false;
this.needToUpdateSrcFs = false;
this.includingRegexPatterns = Optional.fromNullable(builder.includingRegexPatterns);
this.excludingRegexPatterns = Optional.fromNullable(builder.excludingRegexPatterns);
this.logFileNamePrefix = Optional.fromNullable(builder.logFileNamePrefix);
this.linesWrittenBeforeFlush = builder.linesWrittenBeforeFlush;
this.numCopyThreads = builder.numCopyThreads;
this.executorService = Executors.newFixedThreadPool(numCopyThreads);
}
@Override
protected void shutDown() throws Exception {
try {
//We need to copy the current log file as part of shutdown sequence.
shouldCopyCurrentLogFile = true;
runOneIteration();
//Close the Filesystem objects, since these were created with auto close disabled.
LOGGER.debug("Closing FileSystem objects...");
this.destFs.close();
this.srcFs.close();
} finally {
super.shutDown();
}
}
@Override
protected void runOneIteration() throws IOException {
checkSrcLogFiles();
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedRateSchedule(0, this.sourceLogFileMonitorInterval, this.timeUnit);
}
private boolean shouldIncludeLogFile(FileStatus logFile) {
Path logFilePath = logFile.getPath();
//Skip copy of current log file if current log file copy is disabled
if (currentLogFileName.equals(Files.getNameWithoutExtension(logFilePath.getName()))) {
return shouldCopyCurrentLogFile;
}
//Skip copy of log file if it has already been copied previously.
if (copiedFileNames.contains(logFilePath.getName())) {
return false;
}
//Special case to accept all log file extensions.
if (LogCopier.this.logFileExtensions.isEmpty()) {
return true;
}
return LogCopier.this.logFileExtensions.contains(Files.getFileExtension(logFilePath.getName()));
}
/**
* Prune the set of copied files by removing the set of files which have been already deleted from the source.
* This keeps the copiedFileNames from growing unboundedly and is useful when log rotation is enabled on the
* source dirs with maximum number of backups.
* @param srcLogFileNames
*/
@VisibleForTesting
void pruneCopiedFileNames(Set<String> srcLogFileNames) {
Iterator<String> copiedFilesIterator = copiedFileNames.iterator();
while (copiedFilesIterator.hasNext()) {
String fileName = copiedFilesIterator.next();
if (!srcLogFileNames.contains(fileName)) {
copiedFilesIterator.remove();
}
}
}
/**
* Perform a check on new source log files and submit copy tasks for new log files.
*/
@VisibleForTesting
void checkSrcLogFiles() throws IOException {
List<FileStatus> srcLogFiles = new ArrayList<>();
Set<String> srcLogFileNames = new HashSet<>();
Set<Path> newLogFiles = new HashSet<>();
for (Path logDirPath : srcLogDirs) {
srcLogFiles.addAll(FileListUtils.listFilesRecursively(srcFs, logDirPath));
//Remove the already copied files from the list of files to copy
for (FileStatus srcLogFile : srcLogFiles) {
if (shouldIncludeLogFile(srcLogFile)) {
newLogFiles.add(srcLogFile.getPath());
}
srcLogFileNames.add(srcLogFile.getPath().getName());
}
}
if (newLogFiles.isEmpty()) {
LOGGER.warn("No log file found under directories " + this.srcLogDirs);
return;
}
List<Future> futures = new ArrayList<>();
// Schedule a copy task for each new log file
for (final Path srcLogFile : newLogFiles) {
String destLogFileName =
this.logFileNamePrefix.isPresent() ? this.logFileNamePrefix.get() + "." + srcLogFile.getName()
: srcLogFile.getName();
final Path destLogFile = new Path(this.destLogDir, destLogFileName);
futures.add(this.executorService.submit(new LogCopyTask(srcLogFile, destLogFile)));
}
//Wait for copy tasks to finish
for (Future future : futures) {
try {
future.get();
} catch (InterruptedException e) {
LOGGER.error("LogCopyTask was interrupted - {}", e);
} catch (ExecutionException e) {
LOGGER.error("Failed LogCopyTask - {}", e);
}
}
if (needToUpdateDestFs) {
if (destFsSupplier == null) {
throw new IOException("Try to update dest fileSystem but destFsSupplier has not been set");
}
this.destFs.close();
this.destFs = destFsSupplier.getFileSystem();
LOGGER.info("Dest fs updated" + destFs.toString());
needToUpdateDestFs = false;
}
if (needToUpdateSrcFs) {
if (srcFsSupplier == null) {
throw new IOException("Try to update source fileSystem but srcFsSupplier has not been set");
}
this.srcFs.close();
this.srcFs = srcFsSupplier.getFileSystem();
LOGGER.info("Src fs updated" + srcFs.toString());
needToUpdateSrcFs = false;
}
pruneCopiedFileNames(srcLogFileNames);
}
/**
* Get a new {@link LogCopier.Builder} instance for building a {@link LogCopier}.
*
* @return a new {@link LogCopier.Builder} instance
*/
public static Builder newBuilder() {
return new Builder();
}
/**
* A builder class for {@link LogCopier}.
*/
public static class Builder {
private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults();
private FileSystem srcFs = null;
private List<Path> srcLogDirs;
private FileSystem destFs = null;
private Path destLogDir;
private FileSystemSupplier destFsSupplier = null;
private FileSystemSupplier srcFsSupplier = null;
private long sourceLogFileMonitorInterval = DEFAULT_SOURCE_LOG_FILE_MONITOR_INTERVAL;
private int numCopyThreads = DEFAULT_NUM_COPY_THREADS;
private TimeUnit timeUnit = DEFAULT_TIME_UNIT;
private Set<String> logFileExtensions;
private String currentLogFileName;
private List<Pattern> includingRegexPatterns;
private List<Pattern> excludingRegexPatterns;
private String logFileNamePrefix;
private int linesWrittenBeforeFlush = DEFAULT_LINES_WRITTEN_BEFORE_FLUSH;
/**
* Set the interval between two checks for the source log file monitor.
*
* @param sourceLogFileMonitorInterval the interval between two checks for the source log file monitor
* @return this {@link LogCopier.Builder} instance
*/
public Builder useSourceLogFileMonitorInterval(long sourceLogFileMonitorInterval) {
Preconditions.checkArgument(sourceLogFileMonitorInterval > 0,
"Source log file monitor interval must be positive");
this.sourceLogFileMonitorInterval = sourceLogFileMonitorInterval;
return this;
}
/**
* Set the {@link TimeUnit} used for the source log file monitor interval.
*
* @param timeUnit the {@link TimeUnit} used for the log file monitor interval
* @return this {@link LogCopier.Builder} instance
*/
public Builder useTimeUnit(TimeUnit timeUnit) {
Preconditions.checkNotNull(timeUnit);
this.timeUnit = timeUnit;
return this;
}
/**
* Set the {@link FileSystemSupplier} used for generating new Dest FileSystem later when token been updated.
*
* @param supplier the {@link FileSystemSupplier} used for generating new Dest FileSystem
* @return this {@link LogCopier.Builder} instance
*/
public Builder useDestFsSupplier(FileSystemSupplier supplier) {
Preconditions.checkNotNull(supplier);
this.destFsSupplier = supplier;
return this;
}
/**
* Set the {@link FileSystemSupplier} used for generating new source FileSystem later when token been updated.
*
* @param supplier the {@link FileSystemSupplier} used for generating new source FileSystem
* @return this {@link LogCopier.Builder} instance
*/
public Builder useSrcFsSupplier(FileSystemSupplier supplier) {
Preconditions.checkNotNull(supplier);
this.srcFsSupplier = supplier;
return this;
}
/**
* Set the set of acceptable log file extensions.
*
* @param logFileExtensions the set of acceptable log file extensions
* @return this {@link LogCopier.Builder} instance
*/
public Builder acceptsLogFileExtensions(Set<String> logFileExtensions) {
Preconditions.checkNotNull(logFileExtensions);
this.logFileExtensions = ImmutableSet.copyOf(logFileExtensions);
return this;
}
/**
* Set the regex patterns used to filter logs that should be copied.
*
* @param regexList a comma-separated list of regex patterns
* @return this {@link LogCopier.Builder} instance
*/
public Builder useIncludingRegexPatterns(String regexList) {
Preconditions.checkNotNull(regexList);
this.includingRegexPatterns = DatasetFilterUtils.getPatternsFromStrings(COMMA_SPLITTER.splitToList(regexList));
return this;
}
/**
* Set the regex patterns used to filter logs that should not be copied.
*
* @param regexList a comma-separated list of regex patterns
* @return this {@link LogCopier.Builder} instance
*/
public Builder useExcludingRegexPatterns(String regexList) {
Preconditions.checkNotNull(regexList);
this.excludingRegexPatterns = DatasetFilterUtils.getPatternsFromStrings(COMMA_SPLITTER.splitToList(regexList));
return this;
}
/**
* Set the source {@link FileSystem} for reading the source log file.
*
* @param srcFs the source {@link FileSystem} for reading the source log file
* @return this {@link LogCopier.Builder} instance
*/
public Builder useSrcFileSystem(FileSystem srcFs) {
Preconditions.checkNotNull(srcFs);
this.srcFs = srcFs;
return this;
}
/**
* Set the destination {@link FileSystem} for writing the destination log file.
*
* @param destFs the destination {@link FileSystem} for writing the destination log file
* @return this {@link LogCopier.Builder} instance
*/
public Builder useDestFileSystem(FileSystem destFs) {
Preconditions.checkNotNull(destFs);
this.destFs = destFs;
return this;
}
/**
* Set the path of the source log file directory to read from.
*
* @param srcLogDir the path of the source log file directory to read from
* @return this {@link LogCopier.Builder} instance
*/
public Builder readFrom(Path srcLogDir) {
Preconditions.checkNotNull(srcLogDir);
this.srcLogDirs = ImmutableList.of(srcLogDir);
return this;
}
/**
* Set the paths of the source log file directories to read from.
*
* @param srcLogDirs the paths of the source log file directories to read from
* @return this {@link LogCopier.Builder} instance
*/
public Builder readFrom(List<Path> srcLogDirs) {
Preconditions.checkNotNull(srcLogDirs);
this.srcLogDirs = srcLogDirs;
return this;
}
/**
* Set the path of the destination log file directory to write to.
*
* @param destLogDir the path of the destination log file directory to write to
* @return this {@link LogCopier.Builder} instance
*/
public Builder writeTo(Path destLogDir) {
Preconditions.checkNotNull(destLogDir);
this.destLogDir = destLogDir;
return this;
}
/**
* Set the log file name prefix at the destination.
*
* @param logFileNamePrefix the log file name prefix at the destination
* @return this {@link LogCopier.Builder} instance
*/
public Builder useLogFileNamePrefix(String logFileNamePrefix) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(logFileNamePrefix),
"Invalid log file name prefix: " + logFileNamePrefix);
this.logFileNamePrefix = logFileNamePrefix;
return this;
}
/**
* Set the number of lines written before they are flushed to disk.
*
* @param linesWrittenBeforeFlush the number of lines written before they are flushed to disk
* @return this {@link LogCopier.Builder} instance
*/
public Builder useLinesWrittenBeforeFlush(int linesWrittenBeforeFlush) {
Preconditions.checkArgument(linesWrittenBeforeFlush > 0,
"The value specifying the lines to write before flush must be positive");
this.linesWrittenBeforeFlush = linesWrittenBeforeFlush;
return this;
}
/**
* Set the current log file name
*/
public Builder useCurrentLogFileName(String currentLogFileName) {
this.currentLogFileName = currentLogFileName;
return this;
}
/**
* Set the number of threads to use for copying container log files to dest FS.
* @param numCopyThreads
*/
public Builder useNumCopyThreads(int numCopyThreads) {
this.numCopyThreads = numCopyThreads;
return this;
}
/**
* Build a new {@link LogCopier} instance.
*
* @return a new {@link LogCopier} instance
*/
public LogCopier build() throws IOException {
return new LogCopier(this);
}
}
private class LogCopyTask implements Callable<Void> {
private final Path srcLogFile;
private final Path destLogFile;
public LogCopyTask(Path srcLogFile, Path destLogFile) {
this.srcLogFile = srcLogFile;
this.destLogFile = destLogFile;
}
@Override
public Void call() {
try {
copyChangesOfLogFile(LogCopier.this.srcFs.makeQualified(this.srcLogFile),
LogCopier.this.destFs.makeQualified(this.destLogFile));
} catch (IOException ioe) {
LOGGER.error(String.format("Failed while copying logs from %s to %s", this.srcLogFile, this.destLogFile), ioe);
}
return null;
}
/**
* Copy log files that have been rolled over.
*/
private void copyChangesOfLogFile(Path srcFile, Path destFile) throws IOException {
LOGGER.info("Copying changes from {} to {}", srcFile.toString(), destFile.toString());
if (!LogCopier.this.srcFs.exists(srcFile)) {
LOGGER.warn("Source log file not found: " + srcFile);
return;
}
FSDataInputStream fsDataInputStream = null;
try (Closer closer = Closer.create()) {
fsDataInputStream = closer.register(LogCopier.this.srcFs.open(srcFile));
BufferedReader srcLogFileReader = closer.register(
new BufferedReader(new InputStreamReader(fsDataInputStream, ConfigurationKeys.DEFAULT_CHARSET_ENCODING)));
FSDataOutputStream outputStream = LogCopier.this.destFs.create(destFile);
BufferedWriter destLogFileWriter = closer.register(
new BufferedWriter(new OutputStreamWriter(outputStream, ConfigurationKeys.DEFAULT_CHARSET_ENCODING)));
String line;
int linesProcessed = 0;
while (!Thread.currentThread().isInterrupted() && (line = srcLogFileReader.readLine()) != null) {
if (!shouldCopyLine(line)) {
continue;
}
destLogFileWriter.write(line);
destLogFileWriter.newLine();
linesProcessed++;
if (linesProcessed % LogCopier.this.linesWrittenBeforeFlush == 0) {
destLogFileWriter.flush();
}
}
//Add the copied file to the list of files already copied to the destination.
LogCopier.this.copiedFileNames.add(srcFile.getName());
}
}
/**
* Check if a log line should be copied.
*
* <p>
* A line should be copied if and only if all of the following conditions satisfy:
*
* <ul>
* <li>
* It doesn't match any of the excluding regex patterns. If there's no excluding regex patterns,
* this condition is considered satisfied.
* </li>
* <li>
* It matches at least one of the including regex patterns. If there's no including regex patterns,
* this condition is considered satisfied.
* </li>
* </ul>
* </p>
*/
private boolean shouldCopyLine(String line) {
boolean including =
!LogCopier.this.includingRegexPatterns.isPresent() || DatasetFilterUtils.stringInPatterns(line,
LogCopier.this.includingRegexPatterns.get());
boolean excluding = LogCopier.this.excludingRegexPatterns.isPresent() && DatasetFilterUtils.stringInPatterns(line,
LogCopier.this.excludingRegexPatterns.get());
return !excluding && including;
}
}
}
| |
/**
* Copyright 2014 National University of Ireland, Galway.
*
* This file is part of the SIREn project. Project and contact information:
*
* https://github.com/rdelbru/SIREn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sindice.siren.index.codecs.block;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
/**
* This class is used to generate {@link AForFrameDecompressor}.
*/
public class AForFrameDecompressorGenerator {
private FileWriter writer;
public static final int[] frameSizes = new int[99];
private final int[] MASK = { 0x00000000, 0x00000001, 0x00000003, 0x00000007, 0x0000000f, 0x0000001f, 0x0000003f,
0x0000007f, 0x000000ff, 0x000001ff, 0x000003ff, 0x000007ff, 0x00000fff, 0x00001fff,
0x00003fff, 0x00007fff, 0x0000ffff, 0x0001ffff, 0x0003ffff, 0x0007ffff, 0x000fffff,
0x001fffff, 0x003fffff, 0x007fffff, 0x00ffffff, 0x01ffffff, 0x03ffffff, 0x07ffffff,
0x0fffffff, 0x1fffffff, 0x3fffffff, 0x7fffffff, 0xffffffff };
public AForFrameDecompressorGenerator() throws IOException {
this.generateFrameSizeTable();
}
protected void generateFrameSizeTable() throws IOException {
int frameSize = 0;
frameSizes[0] = 0;
for (int i = 1; i <= 32; i++) {
frameSize += 4;
frameSizes[i] = frameSize;
}
frameSizes[33] = 0;
frameSize = 0;
for (int i = 1; i <= 32; i++) {
frameSize += 2;
frameSizes[i+33] = frameSize;
}
frameSizes[66] = 0;
for (int i = 1; i <= 32; i++) {
frameSizes[i+66] = i;
}
}
public void generate(final File file) throws IOException {
writer = new FileWriter(file);
writer.append(FILE_HEADER); writer.append('\n');
this.generateClass();
writer.close();
}
protected void generateClass() throws IOException {
writer.append("/**\n");
writer.append(" * This class contains a lookup table of functors for decompressing fames.\n");
writer.append(" */\n");
writer.append("public class AForFrameDecompressor {\n\n");
this.generateTable();
this.generateAbstractInnerClass();
for (int i = 0; i < 99; i++) {
this.generateInnerClass(i);
}
writer.append("}\n");
}
protected void generateTable() throws IOException {
writer.append(" public static final FrameDecompressor[] decompressors = new FrameDecompressor[] {\n");
for (int i = 0; i < 99; i++) {
writer.append(" new FrameDecompressor"+i+"(),\n");
}
writer.append(" };\n\n");
}
protected void generateAbstractInnerClass() throws IOException {
writer.append(" static abstract class FrameDecompressor {\n");
writer.append(" public abstract void decompress(final BytesRef input, final IntsRef output);\n");
writer.append(" }\n\n");
}
protected void generateInnerClass(final int numFramebits) throws IOException {
writer.append(" static final class FrameDecompressor" + Integer.toString(numFramebits) + " extends FrameDecompressor {\n");
this.generateMethod(numFramebits);
writer.append(" }\n\n");
}
private void zeros(final int numFrameBits)
throws IOException {
final int nb;
if (numFrameBits <= 32)
nb = 32;
else if (numFrameBits <= 65)
nb = 16;
else
nb = 8;
writer.append(" ");
writer.append("final int[] unCompressedData = output.ints;\n");
writer.append(" ");
writer.append("final int outOffset = output.offset;\n");
for (int i = 0; i < nb; i++) {
writer.append(" ");
writer.append("unCompressedData[outOffset" + ((i == 0) ? "" : " + " +i) + "] = 0;\n");
}
writer.append(" output.offset += " + nb + ";\n");
writer.append(" input.offset += 1;\n");
writer.append(" }\n");
}
private void generate816Routines(final int numFrameBits) throws IOException {
for (int i = 0, j = 0; j < 16; ) {
if (i == 0)
writer.append(" unCompressedData[outOffset] = " + (numFrameBits == 8 ? "compressedArray[inOffset] & 0xFF" :
"((compressedArray[inOffset] & 0xFF) << 8) | (compressedArray[inOffset + 1] & 0xFF)") + ";\n");
else
writer.append(" unCompressedData[outOffset + " + j + "] = " + (numFrameBits == 8 ? "compressedArray[inOffset + " + i + "] & 0xFF" :
"((compressedArray[inOffset + " + i + "] & 0xFF) << 8) | (compressedArray[inOffset + " + (i+1) +"] & 0xFF)") + ";\n");
if (numFrameBits == 16)
i += 2;
else
i++;
j++;
}
this.generateMethodFooter(numFrameBits + 33);
writer.append(" }\n");
}
protected void generateMethod(final int numFrameBits) throws IOException {
writer.append(" public final void decompress(final BytesRef input, final IntsRef output) {\n");
// Zeros case
if (numFrameBits == 0 || numFrameBits == 33 || numFrameBits == 66) {
this.zeros(numFrameBits);
return;
}
this.generateMethodHeader(numFrameBits);
// for bits < 7, use byte variables
if (numFrameBits < 41 && numFrameBits > 33) {
this.generate1To7Routines(numFrameBits - 33);
writer.append(" }\n");
return;
}
// 8 and 16 BFS special case
if (numFrameBits == 41 || numFrameBits == 49) {
this.generate816Routines(numFrameBits - 33);
return;
}
if (numFrameBits <= 32)
this.generateIntValues(numFrameBits);
else if (numFrameBits <= 65)
this.generateShortValues(numFrameBits - 33);
else
this.generateByteValues(numFrameBits - 66);
if (numFrameBits <= 32)
this.generateInstructions32(numFrameBits);
else if (numFrameBits <= 65)
this.generateInstructions16(numFrameBits - 33);
else
this.generateInstructions8(numFrameBits - 66);
this.generateMethodFooter(numFrameBits);
writer.append(" }\n");
}
private void generate1To7Routines(final int numFrameBits)
throws IOException {
final int mask = (1 << numFrameBits) - 1;
int shift = 8;
int bytePtr = 0, intPtr = 0;
this.generateByteValues(numFrameBits*2);
while (intPtr != 16) { // while we didn't process 16 integers
while (shift >= numFrameBits) {
shift -= numFrameBits;
writer.append(" ");
if (shift == 0) {
writer.append("unCompressedData[outOffset + "+intPtr+"] = i"+bytePtr+" & "+mask+";\n");
}
else {
writer.append("unCompressedData[outOffset + "+intPtr+"] = (i"+bytePtr+" >>> "+shift+") & "+mask+";\n");
}
intPtr++;
}
if (shift > 0) {
writer.append(" ");
writer.append("unCompressedData[outOffset + "+intPtr+"] = ((i"+bytePtr+" & "+((1 << shift) - 1)+") << "+(numFrameBits - shift)+")");
bytePtr++;
shift = 8 - (numFrameBits - shift);
writer.append(" | (i"+bytePtr+" >>> "+shift+") & "+((1 << (8 - shift)) - 1)+";\n");
intPtr++;
}
else {
bytePtr++;
shift = 8;
}
}
this.generateMethodFooter(numFrameBits + 33);
}
protected void generateMethodFooter(final int numFrameBits) throws IOException {
writer.append(" ");
writer.append("input.offset += " + (1 + frameSizes[numFrameBits]) + ";\n");
writer.append(" ");
if (numFrameBits <= 32)
writer.append("output.offset += 32;\n");
else if (numFrameBits <= 65)
writer.append("output.offset += 16;\n");
else
writer.append("output.offset += 8;\n");
}
protected void generateMethodHeader(final int numFrameBits) throws IOException {
writer.append(" ");
writer.append("final int[] unCompressedData = output.ints;\n");
writer.append(" ");
writer.append("final byte[] compressedArray = input.bytes;\n");
writer.append(" ");
writer.append("final int inOffset = input.offset + 1;\n");
writer.append(" ");
writer.append("final int outOffset = output.offset;\n");
}
protected void generateIntValues(final int numFrameBits) throws IOException {
for (int i = 0, j = 0; i < numFrameBits; i++, j += 4) {
writer.append(" ");
writer.append("final int i"+i+" = ((compressedArray[inOffset + "+j+"] & 0xFF) << 24) | ");
writer.append("((compressedArray[inOffset + "+(j+1)+"] & 0xFF) << 16) | ");
writer.append("((compressedArray[inOffset + "+(j+2)+"] & 0xFF) << 8) | ");
writer.append("((compressedArray[inOffset + "+(j+3)+"] & 0xFF));\n");
}
writer.append("\n");
}
protected void generateByteValues(final int numFrameBits) throws IOException {
for (int i = 0; i < numFrameBits; i++) {
writer.append(" final byte i" + i + " = compressedArray[inOffset + " + i + "];\n");
}
writer.append("\n");
}
protected void generateShortValues( final int numFrameBits )
throws IOException
{
for ( int i = 0, j = 0; i < numFrameBits; i++, j += 2 )
{
writer.append( " " );
writer.append( "final short i" + i + " = (short) (((compressedArray[inOffset + " + j + "] & 0xFF) << 8) | " );
writer.append( "((compressedArray[inOffset + " + ( j + 1 ) + "] & 0xFF)));\n" );
}
writer.append( "\n" );
}
protected void generateInstructions32(final int numFrameBits) throws IOException {
final int mask = (1 << numFrameBits) - 1;
int shift = 32;
int bytePtr = 0, intPtr = 0;
while (intPtr != 32) { // while we didn't process 32 integers
while (shift >= numFrameBits) {
shift -= numFrameBits;
writer.append(" ");
if (shift == 0 && mask != 0) {
writer.append("unCompressedData[outOffset + "+intPtr+"] = i"+bytePtr+" & "+mask+";\n");
}
else if (shift == 0 && mask == 0) {
writer.append("unCompressedData[outOffset + "+intPtr+"] = i"+bytePtr+";\n");
}
else if (shift + numFrameBits == 32) {
writer.append("unCompressedData[outOffset + "+intPtr+"] = (i"+bytePtr+" >>> "+shift+");\n");
}
else {
writer.append("unCompressedData[outOffset + "+intPtr+"] = (i"+bytePtr+" >>> "+shift+") & "+mask+";\n");
}
intPtr++;
}
if (shift > 0) {
writer.append(" ");
// writer.append("unCompressedData[outOffset + "+intPtr+"] = ((i"+bytePtr+" & "+((1 << shift) - 1)+") << "+(numFrameBits - shift)+")");
writer.append("unCompressedData[outOffset + "+intPtr+"] = ((i"+bytePtr+" << "+(numFrameBits - shift)+")");
bytePtr++;
shift = 32 - (numFrameBits - shift);
// writer.append(" | (i"+bytePtr+" >>> "+shift+") & "+((1 << (32 - shift)) - 1)+";\n");
writer.append(" | (i"+bytePtr+" >>> "+shift+")) & "+mask+";\n");
intPtr++;
}
else {
bytePtr++;
shift = 32;
}
}
}
protected void generateInstructions8(final int numFrameBits) throws IOException {
final int mask = (1 << numFrameBits) - 1;
int shift = 8;
int bytePtr = 0, intPtr = 0;
while (intPtr < 8) {
while (shift >= numFrameBits) {
shift -= numFrameBits;
writer.append(" ");
if (shift == 0 && mask != 0) {
writer.append("unCompressedData[outOffset + " + intPtr + "] = i" + bytePtr + " & " + mask + ";\n");
} else if (shift == 0 && mask == 0) {
writer.append("unCompressedData[outOffset + " + intPtr + "] = i" + bytePtr + ";\n");
}
else {
writer.append("unCompressedData[outOffset + " + intPtr + "] = (i" + bytePtr + " >>> " + shift + ") & " + mask + ";\n");
}
intPtr++;
}
if (shift > 0) {
writer.append(" ");
writer.append("unCompressedData[outOffset + " + intPtr + "] = ((i" + bytePtr + " << " + (numFrameBits - shift) + ")");
bytePtr++;
shift = 8 - (numFrameBits - shift);
if (shift >= 0) {
if (mask == 0)
writer.append(" | (i" + bytePtr + " >>> " + shift + " & " + MASK[8 - shift] + "));\n");
else
writer.append(" | (i" + bytePtr + " >>> " + shift + " & " + MASK[8 - shift] + ")) & " + mask + ";\n");
}
else {
writer.append(" | (i" + bytePtr + " << " + (-shift) + " & " + MASK[8 - shift] + ")");
while (shift + 8 < 0) {
writer.append(" | (i" + (++bytePtr) + " << " + (-8 - shift) + " & " + MASK[-shift] + ")");
shift += 8;
}
if (mask == 0)
writer.append(" | (i" + (bytePtr + 1) + " >>> " + (8 + shift) + " & " + MASK[-shift] + "));\n");
else
writer.append(" | (i" + (bytePtr + 1) + " >>> " + (8 + shift) + " & " + MASK[-shift] + ")) & " + mask + ";\n");
}
intPtr++;
} else {
bytePtr++;
shift += 8;
}
}
}
protected void generateInstructions16(final int numFrameBits) throws IOException {
final int mask = ( 1 << numFrameBits ) - 1;
int shift = 16;
int bytePtr = 0, intPtr = 0;
while ( intPtr < 16 ) {
while ( shift >= numFrameBits ) {
shift -= numFrameBits;
writer.append( " " );
if ( shift == 0 && mask != 0 )
{
writer.append( "unCompressedData[outOffset + " + intPtr + "] = i" + bytePtr + " & " + mask + ";\n" );
}
else if ( shift == 0 && mask == 0 )
{
writer.append( "unCompressedData[outOffset + " + intPtr + "] = i" + bytePtr + ";\n" );
}
else {
writer.append( "unCompressedData[outOffset + " + intPtr + "] = (i" + bytePtr + " >>> " + shift + ") & " + mask + ";\n" );
}
intPtr++;
}
if ( shift > 0 ) {
writer.append( " " );
writer.append( "unCompressedData[outOffset + " + intPtr + "] = ((i" + bytePtr + " << " + ( numFrameBits - shift ) + ")" );
bytePtr++;
shift = 16 - ( numFrameBits - shift );
if ( shift >= 0 )
{
if ( mask == 0 )
writer.append( " | (i" + bytePtr + " >>> " + shift + " & " + MASK[16 - shift] + "));\n" );
else
writer.append( " | (i" + bytePtr + " >>> " + shift + " & " + MASK[16 - shift] + ")) & " + mask + ";\n" );
}
else {
if ( mask == 0 )
writer.append( " | (i" + bytePtr + " << " + ( -shift ) + " & " + MASK[16 - shift] + ") | (i" + ( bytePtr + 1 ) + " >>> " + ( 16 + shift ) + " & " + MASK[-shift] + "));\n" );
else
writer.append( " | (i" + bytePtr + " << " + ( -shift ) + " & " + MASK[16 - shift] + ") | (i" + ( bytePtr + 1 ) + " >>> " + ( 16 + shift ) + " & " + MASK[-shift] + ")) & " + mask + ";\n" );
}
intPtr++;
}
else {
bytePtr++;
shift += 16;
}
}
}
private static final String FILE_HEADER =
"/**\n" +
" * Copyright 2014 National University of Ireland, Galway.\n" +
" *\n" +
" * This file is part of the SIREn project. Project and contact information:\n" +
" *\n" +
" * https://github.com/rdelbru/SIREn\n" +
" *\n" +
" * Licensed under the Apache License, Version 2.0 (the \"License\");\n" +
" * you may not use this file except in compliance with the License.\n" +
" * You may obtain a copy of the License at\n" +
" *\n" +
" * http://www.apache.org/licenses/LICENSE-2.0\n" +
" *\n" +
" * Unless required by applicable law or agreed to in writing, software\n" +
" * distributed under the License is distributed on an \"AS IS\" BASIS,\n" +
" * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" +
" * See the License for the specific language governing permissions and\n" +
" * limitations under the License.\n" +
" */ \n" +
"/* This program is generated, do not modify. See AForFrameDecompressorGenerator.java */\n" +
"package org.sindice.siren.index.codecs.block;\n" +
"\n" +
"import org.apache.lucene.util.BytesRef;\n" +
"import org.apache.lucene.util.IntsRef;\n";
/**
* @param args
* @throws IOException
*/
public static void main(final String[] args) throws IOException {
final File file = new File("./src/main/java/org/sindice/siren/index/codecs/block", "AForFrameDecompressor.java");
final AForFrameDecompressorGenerator generator = new AForFrameDecompressorGenerator();
generator.generate(file);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.suggest.document.Completion84PostingsFormat;
import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
import org.apache.lucene.search.suggest.document.CompletionQuery;
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
import org.apache.lucene.search.suggest.document.PrefixCompletionQuery;
import org.apache.lucene.search.suggest.document.RegexCompletionQuery;
import org.apache.lucene.search.suggest.document.SuggestField;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.NumberType;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* Mapper for completion field. The field values are indexed as a weighted FST for
* fast auto-completion/search-as-you-type functionality.<br>
*
* Type properties:<br>
* <ul>
* <li>"analyzer": "simple", (default)</li>
* <li>"search_analyzer": "simple", (default)</li>
* <li>"preserve_separators" : true, (default)</li>
* <li>"preserve_position_increments" : true (default)</li>
* <li>"min_input_length": 50 (default)</li>
* <li>"contexts" : CONTEXTS</li>
* </ul>
* see {@link ContextMappings#load(Object)} for CONTEXTS<br>
* see {@link #parse(ParseContext)} for acceptable inputs for indexing<br>
* <p>
* This field type constructs completion queries that are run
* against the weighted FST index by the {@link CompletionSuggester}.
* This field can also be extended to add search criteria to suggestions
* for query-time filtering and boosting (see {@link ContextMappings}
*/
public class CompletionFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "completion";
/**
* Maximum allowed number of completion contexts in a mapping.
*/
static final int COMPLETION_CONTEXTS_LIMIT = 10;
@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), builder.defaultAnalyzer, builder.indexVersionCreated).init(this);
}
public static class Defaults {
public static final FieldType FIELD_TYPE = new FieldType();
static {
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.freeze();
}
public static final boolean DEFAULT_PRESERVE_SEPARATORS = true;
public static final boolean DEFAULT_POSITION_INCREMENTS = true;
public static final int DEFAULT_MAX_INPUT_LENGTH = 50;
}
public static class Fields {
// Content field names
public static final String CONTENT_FIELD_NAME_INPUT = "input";
public static final String CONTENT_FIELD_NAME_WEIGHT = "weight";
public static final String CONTENT_FIELD_NAME_CONTEXTS = "contexts";
}
private static Builder builder(FieldMapper in) {
return ((CompletionFieldMapper)in).builder;
}
/**
* Builder for {@link CompletionFieldMapper}
*/
public static class Builder extends FieldMapper.Builder {
private final Parameter<NamedAnalyzer> analyzer;
private final Parameter<NamedAnalyzer> searchAnalyzer;
private final Parameter<Boolean> preserveSeparators = Parameter.boolParam("preserve_separators", false,
m -> builder(m).preserveSeparators.get(), Defaults.DEFAULT_PRESERVE_SEPARATORS)
.alwaysSerialize();
private final Parameter<Boolean> preservePosInc = Parameter.boolParam("preserve_position_increments", false,
m -> builder(m).preservePosInc.get(), Defaults.DEFAULT_POSITION_INCREMENTS)
.alwaysSerialize();
private final Parameter<ContextMappings> contexts = new Parameter<>("contexts", false, () -> null,
(n, c, o) -> ContextMappings.load(o), m -> builder(m).contexts.get())
.setSerializer((b, n, c) -> {
if (c == null) {
return;
}
b.startArray(n);
c.toXContent(b, ToXContent.EMPTY_PARAMS);
b.endArray();
}, Objects::toString);
private final Parameter<Integer> maxInputLength = Parameter.intParam("max_input_length", true,
m -> builder(m).maxInputLength.get(), Defaults.DEFAULT_MAX_INPUT_LENGTH)
.addDeprecatedName("max_input_len")
.setValidator(Builder::validateInputLength)
.alwaysSerialize();
private final Parameter<Map<String, String>> meta = Parameter.metaParam();
private final NamedAnalyzer defaultAnalyzer;
private final Version indexVersionCreated;
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(Builder.class);
/**
* @param name of the completion field to build
*/
public Builder(String name, NamedAnalyzer defaultAnalyzer, Version indexVersionCreated) {
super(name);
this.defaultAnalyzer = defaultAnalyzer;
this.indexVersionCreated = indexVersionCreated;
this.analyzer = Parameter.analyzerParam("analyzer", false, m -> builder(m).analyzer.get(), () -> defaultAnalyzer)
.alwaysSerialize();
this.searchAnalyzer
= Parameter.analyzerParam("search_analyzer", true, m -> builder(m).searchAnalyzer.get(), analyzer::getValue);
}
private static void validateInputLength(int maxInputLength) {
if (maxInputLength <= 0) {
throw new IllegalArgumentException("[max_input_length] must be > 0 but was [" + maxInputLength + "]");
}
}
@Override
protected List<Parameter<?>> getParameters() {
return List.of(analyzer, searchAnalyzer, preserveSeparators, preservePosInc, maxInputLength, contexts, meta);
}
NamedAnalyzer buildAnalyzer() {
return new NamedAnalyzer(analyzer.get().name(), AnalyzerScope.INDEX,
new CompletionAnalyzer(analyzer.get(), preserveSeparators.get(), preservePosInc.get()));
}
@Override
public CompletionFieldMapper build(ContentPath contentPath) {
checkCompletionContextsLimit();
NamedAnalyzer completionAnalyzer = new NamedAnalyzer(this.searchAnalyzer.getValue().name(), AnalyzerScope.INDEX,
new CompletionAnalyzer(this.searchAnalyzer.getValue(), preserveSeparators.getValue(), preservePosInc.getValue()));
CompletionFieldType ft
= new CompletionFieldType(buildFullName(contentPath), completionAnalyzer, meta.getValue());
ft.setContextMappings(contexts.getValue());
return new CompletionFieldMapper(name, ft,
multiFieldsBuilder.build(this, contentPath), copyTo.build(), this);
}
private void checkCompletionContextsLimit() {
if (this.contexts.getValue() != null && this.contexts.getValue().size() > COMPLETION_CONTEXTS_LIMIT) {
if (indexVersionCreated.onOrAfter(Version.V_8_0_0)) {
throw new IllegalArgumentException(
"Limit of completion field contexts [" + COMPLETION_CONTEXTS_LIMIT + "] has been exceeded");
} else {
deprecationLogger.deprecate(DeprecationCategory.MAPPINGS, "excessive_completion_contexts",
"You have defined more than [" + COMPLETION_CONTEXTS_LIMIT + "] completion contexts" +
" in the mapping for field [" + name() + "]. " +
"The maximum allowed number of completion contexts in a mapping will be limited to " +
"[" + COMPLETION_CONTEXTS_LIMIT + "] starting in version [8.0].");
}
}
}
}
public static final Set<String> ALLOWED_CONTENT_FIELD_NAMES = Sets.newHashSet(Fields.CONTENT_FIELD_NAME_INPUT,
Fields.CONTENT_FIELD_NAME_WEIGHT, Fields.CONTENT_FIELD_NAME_CONTEXTS);
public static final TypeParser PARSER
= new TypeParser((n, c) -> new Builder(n, c.getIndexAnalyzers().get("simple"), c.indexVersionCreated()));
public static final class CompletionFieldType extends TermBasedFieldType {
private static PostingsFormat postingsFormat;
private ContextMappings contextMappings = null;
public CompletionFieldType(String name, NamedAnalyzer searchAnalyzer, Map<String, String> meta) {
super(name, true, false, false, new TextSearchInfo(Defaults.FIELD_TYPE, null, searchAnalyzer, searchAnalyzer), meta);
}
public void setContextMappings(ContextMappings contextMappings) {
this.contextMappings = contextMappings;
}
/**
* @return true if there are one or more context mappings defined
* for this field type
*/
public boolean hasContextMappings() {
return contextMappings != null;
}
/**
* @return associated context mappings for this field type
*/
public ContextMappings getContextMappings() {
return contextMappings;
}
/**
* @return postings format to use for this field-type
*/
public static synchronized PostingsFormat postingsFormat() {
if (postingsFormat == null) {
postingsFormat = new Completion84PostingsFormat();
}
return postingsFormat;
}
/**
* Completion prefix query
*/
public CompletionQuery prefixQuery(Object value) {
return new PrefixCompletionQuery(getTextSearchInfo().getSearchAnalyzer().analyzer(),
new Term(name(), indexedValueForSearch(value)));
}
/**
* Completion prefix regular expression query
*/
public CompletionQuery regexpQuery(Object value, int flags, int maxDeterminizedStates) {
return new RegexCompletionQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates);
}
/**
* Completion prefix fuzzy query
*/
public CompletionQuery fuzzyQuery(String value, Fuzziness fuzziness, int nonFuzzyPrefixLength,
int minFuzzyPrefixLength, int maxExpansions, boolean transpositions,
boolean unicodeAware) {
return new FuzzyCompletionQuery(getTextSearchInfo().getSearchAnalyzer().analyzer(),
new Term(name(), indexedValueForSearch(value)), null,
fuzziness.asDistance(), transpositions, nonFuzzyPrefixLength, minFuzzyPrefixLength,
unicodeAware, maxExpansions);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new ArraySourceValueFetcher(name(), context) {
@Override
protected List<?> parseSourceValue(Object value) {
if (value instanceof List) {
return (List<?>) value;
} else {
return List.of(value);
}
}
};
}
}
private final int maxInputLength;
private final Builder builder;
public CompletionFieldMapper(String simpleName, MappedFieldType mappedFieldType,
MultiFields multiFields, CopyTo copyTo, Builder builder) {
super(simpleName, mappedFieldType, builder.buildAnalyzer(), multiFields, copyTo);
this.builder = builder;
this.maxInputLength = builder.maxInputLength.getValue();
}
@Override
public CompletionFieldType fieldType() {
return (CompletionFieldType) super.fieldType();
}
@Override
public boolean parsesArrayValue() {
return true;
}
int getMaxInputLength() {
return builder.maxInputLength.get();
}
/**
* Parses and indexes inputs
*
* Parsing:
* Acceptable format:
* "STRING" - interpreted as field value (input)
* "ARRAY" - each element can be one of "OBJECT" (see below)
* "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT }
*
* Indexing:
* if context mappings are defined, delegates to {@link ContextMappings#addField(ParseContext.Document, String, String, int, Map)}
* else adds inputs as a {@link org.apache.lucene.search.suggest.document.SuggestField}
*/
@Override
public void parse(ParseContext context) throws IOException {
// parse
XContentParser parser = context.parser();
Token token = parser.currentToken();
Map<String, CompletionInputMetadata> inputMap = new HashMap<>(1);
if (context.externalValueSet()) {
inputMap = getInputMapFromExternalValue(context);
} else if (token == Token.VALUE_NULL) { // ignore null values
return;
} else if (token == Token.START_ARRAY) {
while ((token = parser.nextToken()) != Token.END_ARRAY) {
parse(context, token, parser, inputMap);
}
} else {
parse(context, token, parser, inputMap);
}
// index
for (Map.Entry<String, CompletionInputMetadata> completionInput : inputMap.entrySet()) {
String input = completionInput.getKey();
if (input.trim().isEmpty()) {
context.addIgnoredField(mappedFieldType.name());
continue;
}
// truncate input
if (input.length() > maxInputLength) {
int len = maxInputLength;
if (Character.isHighSurrogate(input.charAt(len - 1))) {
assert input.length() >= len + 1 && Character.isLowSurrogate(input.charAt(len));
len += 1;
}
input = input.substring(0, len);
}
CompletionInputMetadata metadata = completionInput.getValue();
if (fieldType().hasContextMappings()) {
fieldType().getContextMappings().addField(context.doc(), fieldType().name(),
input, metadata.weight, metadata.contexts);
} else {
context.doc().add(new SuggestField(fieldType().name(), input, metadata.weight));
}
}
createFieldNamesField(context);
for (CompletionInputMetadata metadata: inputMap.values()) {
ParseContext externalValueContext = context.createExternalValueContext(metadata);
multiFields.parse(this, externalValueContext);
}
}
private Map<String, CompletionInputMetadata> getInputMapFromExternalValue(ParseContext context) {
Map<String, CompletionInputMetadata> inputMap;
if (isExternalValueOfClass(context, CompletionInputMetadata.class)) {
CompletionInputMetadata inputAndMeta = (CompletionInputMetadata) context.externalValue();
inputMap = Collections.singletonMap(inputAndMeta.input, inputAndMeta);
} else {
String fieldName = context.externalValue().toString();
inputMap = Collections.singletonMap(fieldName, new CompletionInputMetadata(fieldName, Collections.emptyMap(), 1));
}
return inputMap;
}
private boolean isExternalValueOfClass(ParseContext context, Class<?> clazz) {
return context.externalValue().getClass().equals(clazz);
}
/**
* Acceptable inputs:
* "STRING" - interpreted as the field value (input)
* "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT }
*/
private void parse(ParseContext parseContext, Token token,
XContentParser parser, Map<String, CompletionInputMetadata> inputMap) throws IOException {
String currentFieldName = null;
if (token == Token.VALUE_STRING) {
inputMap.put(parser.text(), new CompletionInputMetadata(parser.text(), Collections.<String, Set<String>>emptyMap(), 1));
} else if (token == Token.START_OBJECT) {
Set<String> inputs = new HashSet<>();
int weight = 1;
Map<String, Set<String>> contextsMap = new HashMap<>();
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) {
throw new IllegalArgumentException("unknown field name [" + currentFieldName
+ "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES);
}
} else if (currentFieldName != null) {
if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) {
if (token == Token.VALUE_STRING) {
inputs.add(parser.text());
} else if (token == Token.START_ARRAY) {
while ((token = parser.nextToken()) != Token.END_ARRAY) {
if (token == Token.VALUE_STRING) {
inputs.add(parser.text());
} else {
throw new IllegalArgumentException("input array must have string values, but was ["
+ token.name() + "]");
}
}
} else {
throw new IllegalArgumentException("input must be a string or array, but was [" + token.name() + "]");
}
} else if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) {
final Number weightValue;
if (token == Token.VALUE_STRING) {
try {
weightValue = Long.parseLong(parser.text());
} catch (NumberFormatException e) {
throw new IllegalArgumentException("weight must be an integer, but was [" + parser.text() + "]");
}
} else if (token == Token.VALUE_NUMBER) {
NumberType numberType = parser.numberType();
if (NumberType.LONG != numberType && NumberType.INT != numberType) {
throw new IllegalArgumentException("weight must be an integer, but was [" + parser.numberValue() + "]");
}
weightValue = parser.numberValue();
} else {
throw new IllegalArgumentException("weight must be a number or string, but was [" + token.name() + "]");
}
// always parse a long to make sure we don't get overflow
if (weightValue.longValue() < 0 || weightValue.longValue() > Integer.MAX_VALUE) {
throw new IllegalArgumentException("weight must be in the interval [0..2147483647], but was ["
+ weightValue.longValue() + "]");
}
weight = weightValue.intValue();
} else if (Fields.CONTENT_FIELD_NAME_CONTEXTS.equals(currentFieldName)) {
if (fieldType().hasContextMappings() == false) {
throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().name() + "]");
}
ContextMappings contextMappings = fieldType().getContextMappings();
XContentParser.Token currentToken = parser.currentToken();
if (currentToken == XContentParser.Token.START_OBJECT) {
ContextMapping<?> contextMapping = null;
String fieldName = null;
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
contextMapping = contextMappings.get(fieldName);
} else {
assert fieldName != null;
assert !contextsMap.containsKey(fieldName);
contextsMap.put(fieldName, contextMapping.parseContext(parseContext, parser));
}
}
} else {
throw new IllegalArgumentException("contexts must be an object or an array , but was [" + currentToken + "]");
}
}
}
}
for (String input : inputs) {
if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) {
inputMap.put(input, new CompletionInputMetadata(input, contextsMap, weight));
}
}
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [" + parser.currentName()
+ "]: expected text or object, but got " + token.name());
}
}
static class CompletionInputMetadata {
public final String input;
public final Map<String, Set<String>> contexts;
public final int weight;
CompletionInputMetadata(String input, Map<String, Set<String>> contexts, int weight) {
this.input = input;
this.contexts = contexts;
this.weight = weight;
}
@Override
public String toString() {
return input;
}
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// no-op
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public void doValidate(MappingLookup mappers) {
if (fieldType().hasContextMappings()) {
for (ContextMapping<?> contextMapping : fieldType().getContextMappings()) {
contextMapping.validateReferences(builder.indexVersionCreated, s -> mappers.fieldTypes().get(s));
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.FailedToCreateConsumerException;
import org.apache.camel.LoggingLevel;
import org.apache.camel.PollingConsumerPollingStrategy;
import org.apache.camel.Processor;
import org.apache.camel.SuspendableService;
import org.apache.camel.spi.PollingConsumerPollStrategy;
import org.apache.camel.spi.ScheduledPollConsumerScheduler;
import org.apache.camel.spi.UriParam;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ServiceHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A useful base class for any consumer which is polling based
*/
public abstract class ScheduledPollConsumer extends DefaultConsumer implements Runnable, SuspendableService, PollingConsumerPollingStrategy {
private static final Logger LOG = LoggerFactory.getLogger(ScheduledPollConsumer.class);
private ScheduledPollConsumerScheduler scheduler;
private ScheduledExecutorService scheduledExecutorService;
// if adding more options then align with ScheduledPollEndpoint#configureScheduledPollConsumerProperties
@UriParam
private boolean startScheduler = true;
@UriParam
private long initialDelay = 1000;
@UriParam
private long delay = 500;
@UriParam
private TimeUnit timeUnit = TimeUnit.MILLISECONDS;
@UriParam
private boolean useFixedDelay = true;
@UriParam
private PollingConsumerPollStrategy pollStrategy = new DefaultPollingConsumerPollStrategy();
@UriParam
private LoggingLevel runLoggingLevel = LoggingLevel.TRACE;
@UriParam
private boolean sendEmptyMessageWhenIdle;
@UriParam
private boolean greedy;
@UriParam
private int backoffMultiplier;
@UriParam
private int backoffIdleThreshold;
@UriParam
private int backoffErrorThreshold;
private Map<String, Object> schedulerProperties;
// state during running
private volatile boolean polling;
private volatile int backoffCounter;
private volatile long idleCounter;
private volatile long errorCounter;
public ScheduledPollConsumer(Endpoint endpoint, Processor processor) {
super(endpoint, processor);
}
public ScheduledPollConsumer(Endpoint endpoint, Processor processor, ScheduledExecutorService scheduledExecutorService) {
super(endpoint, processor);
// we have been given an existing thread pool, so we should not manage its lifecycle
// so we should keep shutdownExecutor as false
this.scheduledExecutorService = scheduledExecutorService;
ObjectHelper.notNull(scheduledExecutorService, "scheduledExecutorService");
}
/**
* Invoked whenever we should be polled
*/
public void run() {
// avoid this thread to throw exceptions because the thread pool wont re-schedule a new thread
try {
// log starting
if (LoggingLevel.ERROR == runLoggingLevel) {
LOG.error("Scheduled task started on: {}", this.getEndpoint());
} else if (LoggingLevel.WARN == runLoggingLevel) {
LOG.warn("Scheduled task started on: {}", this.getEndpoint());
} else if (LoggingLevel.INFO == runLoggingLevel) {
LOG.info("Scheduled task started on: {}", this.getEndpoint());
} else if (LoggingLevel.DEBUG == runLoggingLevel) {
LOG.debug("Scheduled task started on: {}", this.getEndpoint());
} else {
LOG.trace("Scheduled task started on: {}", this.getEndpoint());
}
// execute scheduled task
doRun();
// log completed
if (LoggingLevel.ERROR == runLoggingLevel) {
LOG.error("Scheduled task completed on: {}", this.getEndpoint());
} else if (LoggingLevel.WARN == runLoggingLevel) {
LOG.warn("Scheduled task completed on: {}", this.getEndpoint());
} else if (LoggingLevel.INFO == runLoggingLevel) {
LOG.info("Scheduled task completed on: {}", this.getEndpoint());
} else if (LoggingLevel.DEBUG == runLoggingLevel) {
LOG.debug("Scheduled task completed on: {}", this.getEndpoint());
} else {
LOG.trace("Scheduled task completed on: {}", this.getEndpoint());
}
} catch (Error e) {
// must catch Error, to ensure the task is re-scheduled
LOG.error("Error occurred during running scheduled task on: " + this.getEndpoint() + ", due: " + e.getMessage(), e);
}
}
private void doRun() {
if (isSuspended()) {
LOG.trace("Cannot start to poll: {} as its suspended", this.getEndpoint());
return;
}
// should we backoff if its enabled, and either the idle or error counter is > the threshold
if (backoffMultiplier > 0
// either idle or error threshold could be not in use, so check for that and use MAX_VALUE if not in use
&& (idleCounter >= (backoffIdleThreshold > 0 ? backoffIdleThreshold : Integer.MAX_VALUE))
|| errorCounter >= (backoffErrorThreshold > 0 ? backoffErrorThreshold : Integer.MAX_VALUE)) {
if (backoffCounter++ < backoffMultiplier) {
// yes we should backoff
if (idleCounter > 0) {
LOG.debug("doRun() backoff due subsequent {} idles (backoff at {}/{})", new Object[]{idleCounter, backoffCounter, backoffMultiplier});
} else {
LOG.debug("doRun() backoff due subsequent {} errors (backoff at {}/{})", new Object[]{errorCounter, backoffCounter, backoffMultiplier});
}
return;
} else {
// we are finished with backoff so reset counters
idleCounter = 0;
errorCounter = 0;
backoffCounter = 0;
LOG.trace("doRun() backoff finished, resetting counters.");
}
}
int retryCounter = -1;
boolean done = false;
Throwable cause = null;
int polledMessages = 0;
while (!done) {
try {
cause = null;
// eager assume we are done
done = true;
if (isPollAllowed()) {
if (retryCounter == -1) {
LOG.trace("Starting to poll: {}", this.getEndpoint());
} else {
LOG.debug("Retrying attempt {} to poll: {}", retryCounter, this.getEndpoint());
}
// mark we are polling which should also include the begin/poll/commit
polling = true;
try {
boolean begin = pollStrategy.begin(this, getEndpoint());
if (begin) {
retryCounter++;
polledMessages = poll();
LOG.trace("Polled {} messages", polledMessages);
if (polledMessages == 0 && isSendEmptyMessageWhenIdle()) {
// send an "empty" exchange
processEmptyMessage();
}
pollStrategy.commit(this, getEndpoint(), polledMessages);
if (polledMessages > 0 && isGreedy()) {
done = false;
retryCounter = -1;
LOG.trace("Greedy polling after processing {} messages", polledMessages);
}
} else {
LOG.debug("Cannot begin polling as pollStrategy returned false: {}", pollStrategy);
}
} finally {
polling = false;
}
}
LOG.trace("Finished polling: {}", this.getEndpoint());
} catch (Exception e) {
try {
boolean retry = pollStrategy.rollback(this, getEndpoint(), retryCounter, e);
if (retry) {
// do not set cause as we retry
done = false;
} else {
cause = e;
done = true;
}
} catch (Throwable t) {
cause = t;
done = true;
}
} catch (Throwable t) {
cause = t;
done = true;
}
if (cause != null && isRunAllowed()) {
// let exception handler deal with the caused exception
// but suppress this during shutdown as the logs may get flooded with exceptions during shutdown/forced shutdown
try {
getExceptionHandler().handleException("Consumer " + this + " failed polling endpoint: " + getEndpoint()
+ ". Will try again at next poll", cause);
} catch (Throwable e) {
LOG.warn("Error handling exception. This exception will be ignored.", e);
}
}
}
if (cause != null) {
idleCounter = 0;
errorCounter++;
} else {
idleCounter = polledMessages == 0 ? ++idleCounter : 0;
errorCounter = 0;
}
LOG.trace("doRun() done with idleCounter={}, errorCounter={}", idleCounter, errorCounter);
// avoid this thread to throw exceptions because the thread pool wont re-schedule a new thread
}
/**
* No messages to poll so send an empty message instead.
*
* @throws Exception is thrown if error processing the empty message.
*/
protected void processEmptyMessage() throws Exception {
Exchange exchange = getEndpoint().createExchange();
log.debug("Sending empty message as there were no messages from polling: {}", this.getEndpoint());
getProcessor().process(exchange);
}
// Properties
// -------------------------------------------------------------------------
protected boolean isPollAllowed() {
return isRunAllowed() && !isSuspended();
}
/**
* Whether polling is currently in progress
*/
protected boolean isPolling() {
return polling;
}
public ScheduledPollConsumerScheduler getScheduler() {
return scheduler;
}
/**
* Sets a custom scheduler to use for scheduling running this task (poll).
*
* @param scheduler the custom scheduler
*/
public void setScheduler(ScheduledPollConsumerScheduler scheduler) {
this.scheduler = scheduler;
}
public Map<String, Object> getSchedulerProperties() {
return schedulerProperties;
}
/**
* Additional properties to configure on the custom scheduler.
*/
public void setSchedulerProperties(Map<String, Object> schedulerProperties) {
this.schedulerProperties = schedulerProperties;
}
public long getInitialDelay() {
return initialDelay;
}
public void setInitialDelay(long initialDelay) {
this.initialDelay = initialDelay;
}
public long getDelay() {
return delay;
}
public void setDelay(long delay) {
this.delay = delay;
}
public TimeUnit getTimeUnit() {
return timeUnit;
}
/**
* Sets the time unit to use.
* <p/>
* Notice that both {@link #getDelay()} and {@link #getInitialDelay()} are using
* the same time unit. So if you change this value, then take into account that the
* default value of {@link #getInitialDelay()} is 1000. So you may to adjust this value accordingly.
*
* @param timeUnit the time unit.
*/
public void setTimeUnit(TimeUnit timeUnit) {
this.timeUnit = timeUnit;
}
public boolean isUseFixedDelay() {
return useFixedDelay;
}
public void setUseFixedDelay(boolean useFixedDelay) {
this.useFixedDelay = useFixedDelay;
}
public LoggingLevel getRunLoggingLevel() {
return runLoggingLevel;
}
public void setRunLoggingLevel(LoggingLevel runLoggingLevel) {
this.runLoggingLevel = runLoggingLevel;
}
public PollingConsumerPollStrategy getPollStrategy() {
return pollStrategy;
}
public void setPollStrategy(PollingConsumerPollStrategy pollStrategy) {
this.pollStrategy = pollStrategy;
}
public boolean isStartScheduler() {
return startScheduler;
}
/**
* Sets whether the scheduler should be started when this consumer starts.
* <p/>
* This option is default true.
*
* @param startScheduler whether to start scheduler
*/
public void setStartScheduler(boolean startScheduler) {
this.startScheduler = startScheduler;
}
public void setSendEmptyMessageWhenIdle(boolean sendEmptyMessageWhenIdle) {
this.sendEmptyMessageWhenIdle = sendEmptyMessageWhenIdle;
}
public boolean isSendEmptyMessageWhenIdle() {
return sendEmptyMessageWhenIdle;
}
public boolean isGreedy() {
return greedy;
}
/**
* If greedy then a poll is executed immediate after a previous poll that polled 1 or more messages.
*/
public void setGreedy(boolean greedy) {
this.greedy = greedy;
}
public int getBackoffCounter() {
return backoffCounter;
}
public int getBackoffMultiplier() {
return backoffMultiplier;
}
public void setBackoffMultiplier(int backoffMultiplier) {
this.backoffMultiplier = backoffMultiplier;
}
public int getBackoffIdleThreshold() {
return backoffIdleThreshold;
}
public void setBackoffIdleThreshold(int backoffIdleThreshold) {
this.backoffIdleThreshold = backoffIdleThreshold;
}
public int getBackoffErrorThreshold() {
return backoffErrorThreshold;
}
public void setBackoffErrorThreshold(int backoffErrorThreshold) {
this.backoffErrorThreshold = backoffErrorThreshold;
}
public ScheduledExecutorService getScheduledExecutorService() {
return scheduledExecutorService;
}
/**
* Whether the scheduler has been started.
* <p/>
* The scheduler can be started with the {@link #startScheduler()} method.
*
* @return <tt>true</tt> if started, <tt>false</tt> if not.
*/
public boolean isSchedulerStarted() {
return scheduler.isSchedulerStarted();
}
/**
* Sets a custom shared {@link ScheduledExecutorService} to use as thread pool
* <p/>
* <b>Notice: </b> When using a custom thread pool, then the lifecycle of this thread
* pool is not controlled by this consumer (eg this consumer will not start/stop the thread pool
* when the consumer is started/stopped etc.)
*
* @param scheduledExecutorService the custom thread pool to use
*/
public void setScheduledExecutorService(ScheduledExecutorService scheduledExecutorService) {
this.scheduledExecutorService = scheduledExecutorService;
}
// Implementation methods
// -------------------------------------------------------------------------
/**
* The polling method which is invoked periodically to poll this consumer
*
* @return number of messages polled, will be <tt>0</tt> if no message was polled at all.
* @throws Exception can be thrown if an exception occurred during polling
*/
protected abstract int poll() throws Exception;
@Override
protected void doStart() throws Exception {
super.doStart();
// validate that if backoff multiplier is in use, the threshold values is set correctly
if (backoffMultiplier > 0) {
if (backoffIdleThreshold <= 0 && backoffErrorThreshold <= 0) {
throw new IllegalArgumentException("backoffIdleThreshold and/or backoffErrorThreshold must be configured to a positive value when using backoffMultiplier");
}
LOG.debug("Using backoff[multiplier={}, idleThreshold={}, errorThreshold={}] on {}", new Object[]{backoffMultiplier, backoffIdleThreshold, backoffErrorThreshold, getEndpoint()});
}
if (scheduler == null) {
scheduler = new DefaultScheduledPollConsumerScheduler();
}
scheduler.setCamelContext(getEndpoint().getCamelContext());
scheduler.onInit(this);
scheduler.scheduleTask(this);
// configure scheduler with options from this consumer
Map<String, Object> properties = new HashMap<String, Object>();
IntrospectionSupport.getProperties(this, properties, null);
IntrospectionSupport.setProperties(getEndpoint().getCamelContext().getTypeConverter(), scheduler, properties);
if (schedulerProperties != null && !schedulerProperties.isEmpty()) {
// need to use a copy in case the consumer is restarted so we keep the properties
Map<String, Object> copy = new HashMap<String, Object>(schedulerProperties);
IntrospectionSupport.setProperties(getEndpoint().getCamelContext().getTypeConverter(), scheduler, copy);
if (copy.size() > 0) {
throw new FailedToCreateConsumerException(getEndpoint(), "There are " + copy.size()
+ " scheduler parameters that couldn't be set on the endpoint."
+ " Check the uri if the parameters are spelt correctly and that they are properties of the endpoint."
+ " Unknown parameters=[" + copy + "]");
}
}
ObjectHelper.notNull(scheduler, "scheduler", this);
ObjectHelper.notNull(pollStrategy, "pollStrategy", this);
ServiceHelper.startService(scheduler);
if (isStartScheduler()) {
startScheduler();
}
}
/**
* Starts the scheduler.
* <p/>
* If the scheduler is already started, then this is a noop method call.
*/
public void startScheduler() {
scheduler.startScheduler();
}
@Override
protected void doStop() throws Exception {
scheduler.unscheduleTask();
ServiceHelper.stopAndShutdownServices(scheduler);
// clear counters
backoffCounter = 0;
idleCounter = 0;
errorCounter = 0;
super.doStop();
}
@Override
protected void doShutdown() throws Exception {
ServiceHelper.stopAndShutdownServices(scheduler);
super.doShutdown();
}
@Override
protected void doSuspend() throws Exception {
// dont stop/cancel the future task since we just check in the run method
}
@Override
public void onInit() throws Exception {
// make sure the scheduler is starter
startScheduler = true;
}
@Override
public long beforePoll(long timeout) throws Exception {
LOG.trace("Before poll {}", getEndpoint());
// resume or start our self
if (!ServiceHelper.resumeService(this)) {
ServiceHelper.startService(this);
}
// ensure at least timeout is as long as one poll delay
return Math.max(timeout, getDelay());
}
@Override
public void afterPoll() throws Exception {
LOG.trace("After poll {}", getEndpoint());
// suspend or stop our self
if (!ServiceHelper.suspendService(this)) {
ServiceHelper.stopService(this);
}
}
}
| |
package etri.sdn.controller.module.statemanager;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.LinkedList;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.MappingJsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.module.SimpleModule;
import org.codehaus.jackson.type.TypeReference;
import org.joda.time.Interval;
import org.joda.time.Period;
import org.projectfloodlight.openflow.protocol.OFAggregateStatsRequest;
import org.projectfloodlight.openflow.protocol.OFDescStatsReply;
import org.projectfloodlight.openflow.protocol.OFFactories;
import org.projectfloodlight.openflow.protocol.OFFactory;
import org.projectfloodlight.openflow.protocol.OFFlowStatsEntry;
import org.projectfloodlight.openflow.protocol.OFFlowStatsReply;
import org.projectfloodlight.openflow.protocol.OFFlowStatsRequest;
import org.projectfloodlight.openflow.protocol.OFPortStatsEntry;
import org.projectfloodlight.openflow.protocol.OFPortStatsReply;
import org.projectfloodlight.openflow.protocol.OFPortStatsRequest;
import org.projectfloodlight.openflow.protocol.OFStatsReply;
import org.projectfloodlight.openflow.protocol.match.Match;
import org.projectfloodlight.openflow.types.OFGroup;
import org.projectfloodlight.openflow.types.OFPort;
import org.projectfloodlight.openflow.types.TableId;
import org.projectfloodlight.openflow.util.HexString;
import org.restlet.Request;
import org.restlet.Response;
import org.restlet.Restlet;
import org.restlet.data.MediaType;
import org.restlet.data.Method;
import etri.sdn.controller.OFModel;
import etri.sdn.controller.module.linkdiscovery.PrettyLink;
import etri.sdn.controller.protocol.OFProtocol;
import etri.sdn.controller.protocol.io.IOFSwitch;
import etri.sdn.controller.protocol.rest.serializer.ModuleListSerializerModule;
import etri.sdn.controller.protocol.rest.serializer.OFFeaturesReplySerializerModule;
import etri.sdn.controller.protocol.rest.serializer.OFFlowStatisticsReplySerializerModule;
import etri.sdn.controller.protocol.rest.serializer.OFTypeSerializerModule;
import etri.sdn.controller.util.StackTrace;
/**
* Model that represents the internal data of {@link OFMStateManager}.
*
* @author bjlee
*
*/
public class State extends OFModel {
private OFMStateManager manager;
private long timeInitiated;
private long totalMemory;
private OFProtocol protocol;
/**
* Custom Serializer for FEATURES_REPLY message.
* This is used to handle the REST URI /wm/core/switch/{switchid}/features/json.
*/
private OFFeaturesReplySerializerModule features_reply_module;
/**
* list of REST APIs
*/
private RESTApi[] apis;
/**
* Create the State instance.
*
* @param manager reference to the OFMStateManager module.
*/
public State(OFMStateManager manager) {
this.manager = manager;
this.timeInitiated = Calendar.getInstance().getTimeInMillis();
this.totalMemory = Runtime.getRuntime().totalMemory();
this.protocol = (OFProtocol) manager.getController().getProtocol();
this.features_reply_module = new OFFeaturesReplySerializerModule(this.protocol);
initRestApis();
}
/**
* Initialize REST API list.
*/
private void initRestApis() {
/**
* Array of RESTApi objects.
* Each objects represent a REST call handler routine bound to a specific URI.
*/
RESTApi[] tmp = {
/**
* This object is to implement a REST handler routine for retrieving
* all switch information
*/
new RESTApi(
"/wm/core/controller/switches/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
StringWriter sWriter = new StringWriter();
JsonFactory f = new JsonFactory();
JsonGenerator g = null;
try {
g = f.createJsonGenerator(sWriter);
g.writeStartArray();
for ( IOFSwitch sw : manager.getController().getSwitches() ) {
g.writeStartObject();
g.writeFieldName("dpid");
g.writeString(HexString.toHexString(sw.getId()));
g.writeFieldName("inetAddress");
g.writeString(sw.getConnection().getClient().getRemoteAddress().toString());
g.writeFieldName("connectedSince");
g.writeNumber(sw.getConnectedSince().getTime());
g.writeEndObject();
}
g.writeEndArray();
g.close();
} catch (IOException e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
}
String r = sWriter.toString();
response.setEntity(r, MediaType.APPLICATION_JSON);
}
}
),
/**
* This object is to implement a REST handler routine
* for retrieving switch aggregate flow statistics
*/
new RESTApi(
"/wm/core/switch/{switchid}/aggregate/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw == null ) {
return; // switch is not completely set up.
}
OFFactory fac = OFFactories.getFactory(sw.getVersion());
OFAggregateStatsRequest.Builder req = fac.buildAggregateStatsRequest();
Match match = fac.matchWildcardAll();
req.setMatch(match);
req.setOutPort(OFPort.ANY /* NONE for 1.0 */);
try {
// this should be fixed to accept OFGroup object in the further release of Loxigen.
req.setOutGroup(OFGroup.ANY);
req.setTableId(TableId.ALL);
} catch ( UnsupportedOperationException u ) {
// does nothing.
}
List<OFStatsReply> reply = protocol.getSwitchStatistics(sw, req.build());
HashMap<String, List<OFStatsReply>> output = new HashMap<String, List<OFStatsReply>>();
if ( reply != null && ! reply.isEmpty() ) {
output.put(switchIdStr, reply);
}
// create an object mapper.
ObjectMapper om = new ObjectMapper();
om.registerModule(type_module);
try {
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(output);
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
}
),
/**
* This object is to implement a REST handler routine
* for retrieving switch aggregate flow statistics
*/
new RESTApi(
"/wm/core/switch/{switchid}/aggregateflow/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
HashMap<String, Object> rr = new HashMap<String, Object>();
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw != null ) {
rr.put("dpid", switchIdStr);
rr.put("aggregate", ((LinkedList<OFStatsReply>) manager.getAggregateFlow(switchId)).pollFirst());
}
// create an object mapper.
ObjectMapper om = new ObjectMapper();
// om.registerModule(flow_statistics_reply_module);
om.registerModule(type_module);
try {
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(rr);
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
}
),
/**
* This is to implement a REST handler
* for retrieving switch description.
*/
new RESTApi(
"/wm/core/switch/{switchid}/desc/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw == null ) {
return; // switch is not completely set up.
}
StringWriter sWriter = new StringWriter();
JsonFactory f = new JsonFactory();
JsonGenerator g = null;
OFDescStatsReply desc = protocol.getSwitchInformation(sw).getDescStatsReply();
try {
g = f.createJsonGenerator(sWriter);
g.writeStartObject();
g.writeFieldName(HexString.toHexString(sw.getId()));
g.writeStartArray();
g.writeStartObject();
g.writeFieldName("datapathDescription");
g.writeString( desc!=null ? desc.getDpDesc() : "-" );
g.writeFieldName("hardwareDescription");
g.writeString( desc!=null ? desc.getHwDesc() : "-" );
g.writeFieldName("manufacturerDescription");
g.writeString( desc!=null ? desc.getMfrDesc() : "-" );
g.writeFieldName("serialNumber");
g.writeString( desc!=null ? desc.getSerialNum() : "-" );
g.writeFieldName("softwareDescription");
g.writeString( desc!=null ? desc.getSwDesc() : "-" );
g.writeEndObject();
g.writeEndArray();
g.writeEndObject();
g.close();
} catch (IOException e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
}
String r = sWriter.toString();
response.setEntity(r, MediaType.APPLICATION_JSON);
}
}
),
/**
* This is to implement a REST handler
* for retrieving switch description.
*/
new RESTApi(
"/wm/core/switch/{switchid}/description/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw == null ) {
return; // switch is not completely set up.
}
StringWriter sWriter = new StringWriter();
JsonFactory f = new JsonFactory();
JsonGenerator g = null;
OFDescStatsReply desc = protocol.getSwitchInformation(sw).getDescStatsReply();
try {
g = f.createJsonGenerator(sWriter);
g.writeStartObject();
g.writeStringField("dpid", switchIdStr);
g.writeFieldName("desciption");
g.writeStartObject();
g.writeFieldName("datapathDescription");
g.writeString( desc!=null ? desc.getDpDesc() : "-" );
g.writeFieldName("hardwareDescription");
g.writeString( desc!=null ? desc.getHwDesc() : "-" );
g.writeFieldName("manufacturerDescription");
g.writeString( desc!=null ? desc.getMfrDesc() : "-" );
g.writeFieldName("serialNumber");
g.writeString( desc!=null ? desc.getSerialNum() : "-" );
g.writeFieldName("softwareDescription");
g.writeString( desc!=null ? desc.getSwDesc() : "-" );
g.writeEndObject();
// g.writeEndArray();
g.writeEndObject();
g.close();
} catch (IOException e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
}
String r = sWriter.toString();
response.setEntity(r, MediaType.APPLICATION_JSON);
}
}
),
/**
* This object is to implement a REST handler
* for retrieving switch port information (all ports)
*/
new RESTApi(
"/wm/core/switch/{switchid}/port/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw == null ) { //switch is not ready, return null list
try {
ObjectMapper om = new ObjectMapper();
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(new HashMap<String, Object>());
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
List<OFPortStatsEntry> resultValues = new java.util.LinkedList<OFPortStatsEntry>();
OFPortStatsRequest.Builder req = OFFactories.getFactory(sw.getVersion()).buildPortStatsRequest();
req.setPortNo(OFPort.ANY /* NONE for 1.0 */);
List<OFStatsReply> reply = protocol.getSwitchStatistics(sw, req.build());
for ( OFStatsReply s : reply ) {
if ( s instanceof OFPortStatsReply ) {
resultValues.addAll( ((OFPortStatsReply)s).getEntries() );
OFMStateManager.logger.debug("OFPortStatsReply Entries={}", resultValues);
}
}
StringWriter sWriter = new StringWriter();
JsonFactory f = new JsonFactory();
try {
JsonGenerator g = f.createJsonGenerator(sWriter);
g.writeStartObject();
g.writeFieldName(switchIdStr);
g.writeStartArray();
for ( OFPortStatsEntry entry : resultValues ) {
g.writeStartObject();
try {
g.writeNumberField("portNumber", entry.getPortNo().getPortNumber());
g.writeNumberField("transmitBytes", entry.getTxBytes().getValue());
g.writeNumberField("receiveBytes", entry.getRxBytes().getValue());
g.writeNumberField("transmitPackets", entry.getTxPackets().getValue());
g.writeNumberField("receivePackets", entry.getRxPackets().getValue());
g.writeNumberField("transmitDropped", entry.getTxDropped().getValue());
g.writeNumberField("receiveDropped", entry.getRxDropped().getValue());
g.writeNumberField("transmitErrors", entry.getTxErrors().getValue());
g.writeNumberField("receiveErrors", entry.getRxErrors().getValue());
g.writeNumberField("receiveFrameErrors", entry.getRxFrameErr().getValue());
g.writeNumberField("receiveOverErrors", entry.getRxOverErr().getValue());
g.writeNumberField("receiveCrcErros", entry.getRxCrcErr().getValue());
g.writeNumberField("collisions", entry.getCollisions().getValue());
g.writeNumberField("durationSec", entry.getDurationSec());
g.writeNumberField("durationNSec", entry.getDurationNsec());
} catch ( UnsupportedOperationException u ) {
// does nothing.
}
g.writeEndObject();
}
g.writeEndArray();
g.writeEndObject();
g.close();
} catch (IOException e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
String r = sWriter.toString();
response.setEntity(r, MediaType.APPLICATION_JSON);
}
}
),
/**
* This object is to implement a REST handler
* for retrieving switch port information (all ports)
*/
new RESTApi(
"/wm/core/switch/{switchid}/ports/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw == null ) { //switch is not ready, return null list
try {
ObjectMapper om = new ObjectMapper();
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(new HashMap<String, Object>());
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
// response.setEntity(new String(""), MediaType.APPLICATION_JSON);
// return; // switch is not completely set up.
}
List<OFPortStatsEntry> resultValues = new java.util.LinkedList<OFPortStatsEntry>();
OFPortStatsRequest.Builder req = OFFactories.getFactory(sw.getVersion()).buildPortStatsRequest();
req.setPortNo(OFPort.ANY /* NONE for 1.0 */);
List<OFStatsReply> reply = protocol.getSwitchStatistics(sw, req.build());
for ( OFStatsReply s : reply ) {
if ( s instanceof OFPortStatsReply ) {
resultValues.addAll( ((OFPortStatsReply)s).getEntries() );
OFMStateManager.logger.debug("OFPortStatsReply Entries={}", resultValues);
}
}
StringWriter sWriter = new StringWriter();
JsonFactory f = new JsonFactory();
try {
JsonGenerator g = f.createJsonGenerator(sWriter);
g.writeStartObject();
g.writeStringField("dpid", switchIdStr);
g.writeFieldName("ports");
g.writeStartArray();
for ( OFPortStatsEntry entry : resultValues ) {
g.writeStartObject();
try {
g.writeNumberField("portNumber", entry.getPortNo().getPortNumber());
g.writeNumberField("transmitBytes", entry.getTxBytes().getValue());
g.writeNumberField("receiveBytes", entry.getRxBytes().getValue());
g.writeNumberField("transmitPackets", entry.getTxPackets().getValue());
g.writeNumberField("receivePackets", entry.getRxPackets().getValue());
g.writeNumberField("transmitDropped", entry.getTxDropped().getValue());
g.writeNumberField("receiveDropped", entry.getRxDropped().getValue());
g.writeNumberField("transmitErrors", entry.getTxErrors().getValue());
g.writeNumberField("receiveErrors", entry.getRxErrors().getValue());
g.writeNumberField("receiveFrameErrors", entry.getRxFrameErr().getValue());
g.writeNumberField("receiveOverErrors", entry.getRxOverErr().getValue());
g.writeNumberField("receiveCrcErros", entry.getRxCrcErr().getValue());
g.writeNumberField("collisions", entry.getCollisions().getValue());
g.writeNumberField("durationSec", entry.getDurationSec());
g.writeNumberField("durationNSec", entry.getDurationNsec());
} catch ( UnsupportedOperationException u ) {
// does nothing.
}
g.writeEndObject();
}
g.writeEndArray();
g.writeEndObject();
g.close();
} catch (IOException e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
String r = sWriter.toString();
response.setEntity(r, MediaType.APPLICATION_JSON);
}
}
),
/**
* This object is to implement a REST handler
* to retrieve switch feature (FEATURES_REPLY)
*/
new RESTApi(
"/wm/core/switch/{switchid}/features/json",
// this API implementation is refactored into a separate class.
new RESTFeaturesApi( protocol, manager, Arrays.<SimpleModule>asList(features_reply_module) )
),
/**
* This object is to implement a REST handler
* to retrieve FLOW_STATISTICS_REPLY message content
*/
new RESTApi(
"/wm/core/switch/{switchid}/flow/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw == null ) {
return; // switch is not completely set up.
}
OFFactory fac = OFFactories.getFactory(sw.getVersion());
HashMap<String, List<OFFlowStatsEntry>> result =
new HashMap<String, List<OFFlowStatsEntry>>();
List<OFFlowStatsEntry> resultValues =
new java.util.LinkedList<OFFlowStatsEntry>();
result.put(switchIdStr, resultValues);
OFFlowStatsRequest.Builder req = fac.buildFlowStatsRequest();
req
.setMatch( fac.matchWildcardAll() )
.setOutPort( OFPort.ANY /* NONE for 1.0*/ );
try {
req
.setOutGroup(OFGroup.ANY)
.setTableId(TableId.ALL);
} catch ( UnsupportedOperationException u ) {}
try {
List<OFStatsReply> reply = protocol.getSwitchStatistics(sw, req.build());
for ( OFStatsReply s : reply ) {
if ( s instanceof OFFlowStatsReply ) {
resultValues.addAll( ((OFFlowStatsReply)s).getEntries() );
}
}
} catch ( Exception e ) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
// create an object mapper.
ObjectMapper om = new ObjectMapper();
om.registerModule(flow_statistics_reply_module);
om.registerModule(type_module);
try {
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(result);
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
}
),
/**
* This object is to implement a REST handler
* to retrieve FLOW_STATISTICS_REPLY message content for aggregate status
*/
new RESTApi(
"/wm/core/switch/{switchid}/flows/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
HashMap<String, Object> rr = new HashMap<String, Object>();
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw != null ) {
rr.put("dpid", switchIdStr);
rr.put("flows", manager.getFlows(switchId));
}
// create an object mapper.
ObjectMapper om = new ObjectMapper();
om.registerModule(flow_statistics_reply_module);
om.registerModule(type_module);
try {
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(rr);
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
}
),
/**
* This object is to implement a REST handler
* to retrieve FLOW_STATISTICS_REPLY message content for filtered by src_mac and dst_mac
*/
new RESTApi(
"/wm/core/switch/{switchid}/flows/match/srcmac/{src_mac}/dstmac/{dst_mac}/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
Long srcMac = HexString.toLong((String) request.getAttributes().get("src_mac"));
Long dstMac = HexString.toLong((String) request.getAttributes().get("dst_mac"));
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
HashMap<String, Object> rr = new HashMap<String, Object>();
// result.put(switchIdStr, manager.getFlows(switchId, srcMac, dstMac));
if ( sw != null ) {
rr.put("dpid", switchIdStr);
rr.put("flows", manager.getFlows(switchId, srcMac, dstMac));
}
// create an object mapper.
ObjectMapper om = new ObjectMapper();
om.registerModule(flow_statistics_reply_module);
om.registerModule(type_module);
try {
// String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(result);
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(rr);
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
}
),
/**
* This object is to implement a REST handler
* to retrieve FLOW_STATISTICS_REPLY message content for filtered by match
*/
new RESTApi(
"/wm/core/switch/{switchid}/matchedflows/json",
new Restlet() {
@SuppressWarnings({ "unchecked" })
@Override
public void handle(Request request, Response response) {
String entityText = request.getEntityAsText();
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
IOFSwitch sw = manager.getController().getSwitch(switchId);
ObjectMapper mapper = new ObjectMapper();
Method m = request.getMethod();
// entityText = entityText.replaceAll("[\']", "");
byte[] JsonData = entityText.getBytes();
HashMap<String, String> matchMap = new HashMap<String,String>();
JsonNode matchNode;
if (m == Method.POST) {
try {
JsonNode rootNode = mapper.readTree(JsonData);
matchNode = rootNode.path("match");
matchMap = (HashMap<String, String>) mapper.readValue(matchNode.toString().getBytes(), HashMap.class);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
// e.printStackTrace();
return;
}
HashMap<String, Object> rr = new HashMap<String, Object>();
if ( sw != null ) {
rr.put("dpid", switchIdStr);
rr.put("flows", manager.getFlows(switchId, matchMap));
}
// create an object mapper.
ObjectMapper om = new ObjectMapper();
om.registerModule(flow_statistics_reply_module);
om.registerModule(type_module);
try {
// String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(result);
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(rr);
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
// String switchIdStr = (String) request.getAttributes().get("switchid");
// Long srcMac = HexString.toLong((String) request.getAttributes().get("src_mac"));
// Long dstMac = HexString.toLong((String) request.getAttributes().get("dst_mac"));
// Long switchId = HexString.toLong(switchIdStr);
// IOFSwitch sw = manager.getController().getSwitch(switchId);
// HashMap<String, Object> rr = new HashMap<String, Object>();
//
//// result.put(switchIdStr, manager.getFlows(switchId, srcMac, dstMac));
// if ( sw != null ) {
// rr.put("dpid", switchIdStr);
// rr.put("flows", manager.getFlows(switchId, srcMac, dstMac));
// }
//
//
// // create an object mapper.
// ObjectMapper om = new ObjectMapper();
// om.registerModule(flow_statistics_reply_module);
// om.registerModule(type_module);
//
// try {
//// String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(result);
// String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(rr);
// response.setEntity(r, MediaType.APPLICATION_JSON);
// } catch (Exception e) {
// OFMStateManager.logger.error("error={}", StackTrace.of(e));
// return;
// }
}
}
),
/**
* This object is to implement a REST handler
* to retrieve FLOW_STATISTICS_REPLY message content for filtered by port
*/
new RESTApi(
"/wm/core/switch/{switchid}/flows/match/inport/{in_port}/outport/{out_port}/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String switchIdStr = (String) request.getAttributes().get("switchid");
int inPort, outPort;
try {
inPort = Integer.parseInt((String) request.getAttributes().get("in_port"));
} catch (Exception e) {
inPort = 0;
}
try {
outPort = Integer.parseInt((String) request.getAttributes().get("out_port"));
} catch (Exception e) {
outPort = 0;
}
Long switchId = HexString.toLong(switchIdStr);
HashMap<String, Object> rr = new HashMap<String, Object>();
IOFSwitch sw = manager.getController().getSwitch(switchId);
if ( sw != null ) {
rr.put("dpid", switchIdStr);
rr.put("flows", manager.getFlows(switchId, inPort, outPort));
}
// create an object mapper.
ObjectMapper om = new ObjectMapper();
om.registerModule(flow_statistics_reply_module);
om.registerModule(type_module);
try {
//String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(result);
String r = om/*.writerWithDefaultPrettyPrinter()*/.writeValueAsString(rr);
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
}
),
/**
* This object is to implement a REST handler
* to retrieve controller system health-related information
*/
new RESTApi(
"/wm/core/health/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
StringWriter sWriter = new StringWriter();
JsonFactory f = new JsonFactory();
JsonGenerator g = null;
try {
g = f.createJsonGenerator(sWriter);
g.writeStartObject();
g.writeFieldName("host");
g.writeString("localhost");
g.writeFieldName("ofport");
g.writeNumber(manager.getController().getServer().getPortNumber());
g.writeFieldName("uptime");
Interval temp = new Interval(timeInitiated, Calendar.getInstance().getTimeInMillis());
Period tempPeriod = temp.toPeriod();
g.writeString(
String.format(
"System is up for %d days %d hours %d minutes %d seconds",
tempPeriod.getDays(),
tempPeriod.getHours(),
tempPeriod.getMinutes(),
tempPeriod.getSeconds()
)
);
g.writeFieldName("free");
g.writeString(Runtime.getRuntime().freeMemory()/1024/1024 + "M");
g.writeFieldName("total");
g.writeString(totalMemory/1024/1024 + "M");
g.writeFieldName("healthy");
g.writeBoolean(true);
g.writeFieldName("modules");
g.writeStartArray();
String[] moduleNames = manager.getController().getModuleNames();
if ( moduleNames != null ) {
for ( String s : moduleNames ) {
g.writeString(s);
}
}
g.writeEndArray();
g.writeFieldName("moduleText");
g.writeString(manager.getController().getConcatenatedModuleNames());
g.writeEndObject();
g.close();
} catch (IOException e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
}
String r = sWriter.toString();
response.setEntity(r, MediaType.APPLICATION_JSON);
}
}
),
/**
* This object is to implement a REST handler
* for retrieving module information (list of modules)
*/
new RESTApi(
"/wm/core/module/{type}/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
String typeStr = (String) request.getAttributes().get("type");
if ( typeStr.equals("loaded") ) {
// create an object mapper.
ObjectMapper om = new ObjectMapper();
om.registerModule( new ModuleListSerializerModule());
try {
String r = om.writerWithDefaultPrettyPrinter().writeValueAsString( manager.getController() );
response.setEntity(r, MediaType.APPLICATION_JSON);
} catch (Exception e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
return;
}
}
}
}
),
/**
* This object is to implement a REST handler
* for retrieving module information (list of modules)
*/
new RESTApi(
"/wm/core/switch/{switchid}/matchedoutpath/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
MappingJsonFactory f = new MappingJsonFactory();
ObjectMapper mapper = new ObjectMapper(f);
String req = request.getEntityAsText();
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
// HashMap<String, String> rr = new HashMap<String, String>();
// IOFSwitch sw = manager.getController().getSwitch(switchId);
try {
HashMap<String, HashMap<String, String>> matchMap = mapper.readValue(req, new TypeReference<HashMap<String, HashMap<String, String>>>() {});
List<PrettyLink> links = manager.getOutGoingPath (switchId, matchMap.get("match"));
ObjectMapper om = new ObjectMapper();
om.registerModule(new OFTypeSerializerModule());
String reply = om.defaultPrettyPrintingWriter().writeValueAsString(links);
response.setEntity(reply, MediaType.APPLICATION_JSON);
} catch ( Exception e ) {
e.printStackTrace();
}
}
}
),
new RESTApi(
"/wm/core/switch/{switchid}/matchedinpath/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
MappingJsonFactory f = new MappingJsonFactory();
ObjectMapper mapper = new ObjectMapper(f);
String req = request.getEntityAsText();
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
// HashMap<String, String> rr = new HashMap<String, String>();
// IOFSwitch sw = manager.getController().getSwitch(switchId);
try {
HashMap<String, HashMap<String, String>> matchMap = mapper.readValue(req, new TypeReference<HashMap<String, HashMap<String, String>>>() {});
List<PrettyLink> links = manager.getIncommingPath (switchId, matchMap.get("match"));
ObjectMapper om = new ObjectMapper();
om.registerModule(new OFTypeSerializerModule());
String reply = om.defaultPrettyPrintingWriter().writeValueAsString(links);
response.setEntity(reply, MediaType.APPLICATION_JSON);
} catch ( Exception e ) {
e.printStackTrace();
}
}
}
),
new RESTApi(
"/wm/core/switch/{switchid}/matchedpath/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
MappingJsonFactory f = new MappingJsonFactory();
ObjectMapper mapper = new ObjectMapper(f);
String req = request.getEntityAsText();
String switchIdStr = (String) request.getAttributes().get("switchid");
Long switchId = HexString.toLong(switchIdStr);
// HashMap<String, String> rr = new HashMap<String, String>();
// IOFSwitch sw = manager.getController().getSwitch(switchId);
try {
HashMap<String, HashMap<String, String>> matchMap = mapper.readValue(req, new TypeReference<HashMap<String, HashMap<String, String>>>() {});
List<PrettyLink> links = manager.getPath (switchId, matchMap.get("match"));
ObjectMapper om = new ObjectMapper();
om.registerModule(new OFTypeSerializerModule());
String reply = om.defaultPrettyPrintingWriter().writeValueAsString(links);
response.setEntity(reply, MediaType.APPLICATION_JSON);
} catch ( Exception e ) {
e.printStackTrace();
}
}
}
),
/**
* This object is to implement a REST handler
* that exports memory status.
*/
new RESTApi(
"/wm/core/memory/json",
new Restlet() {
@Override
public void handle(Request request, Response response) {
StringWriter sWriter = new StringWriter();
JsonFactory f = new JsonFactory();
JsonGenerator g = null;
try {
g = f.createJsonGenerator(sWriter);
g.writeStartObject();
g.writeFieldName("total");
g.writeString(totalMemory/1024/1024 + "M");
g.writeFieldName("free");
g.writeString(Runtime.getRuntime().freeMemory()/1024/1024 + "M");
g.writeEndObject();
g.close();
} catch (IOException e) {
OFMStateManager.logger.error("error={}", StackTrace.of(e));
}
String r = sWriter.toString();
response.setEntity(r, MediaType.APPLICATION_JSON);
}
}
)
};
this.apis = tmp;
}
/**
* Custom Serializer for OF types
*/
private OFTypeSerializerModule type_module = new OFTypeSerializerModule();
/**
* Custom Serializer for FLOW_STATISTICS_REPLY message.
* This is used to handle the REST URI /wm/core/switch/{switchid}/flow/json.
*/
private OFFlowStatisticsReplySerializerModule flow_statistics_reply_module
= new OFFlowStatisticsReplySerializerModule();
/**
* Returns the list of all RESTApi objects
*
* @return array of all RESTApi objects
*/
@Override
public RESTApi[] getAllRestApi() {
return this.apis;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.starter;
import java.util.Map;
import org.apache.cxf.spring.boot.autoconfigure.openapi.OpenApiAutoConfiguration;
import org.apache.syncope.common.keymaster.client.api.ConfParamOps;
import org.apache.syncope.common.keymaster.client.api.ServiceOps;
import org.apache.syncope.common.keymaster.client.api.model.NetworkService;
import org.apache.syncope.common.keymaster.client.api.startstop.KeymasterStop;
import org.apache.syncope.common.lib.info.SystemInfo;
import org.apache.syncope.core.logic.LogicProperties;
import org.apache.syncope.core.persistence.api.DomainHolder;
import org.apache.syncope.core.persistence.api.ImplementationLookup;
import org.apache.syncope.core.persistence.api.dao.AnyObjectDAO;
import org.apache.syncope.core.persistence.api.dao.AnySearchDAO;
import org.apache.syncope.core.persistence.api.dao.AnyTypeClassDAO;
import org.apache.syncope.core.persistence.api.dao.AnyTypeDAO;
import org.apache.syncope.core.persistence.api.dao.ExternalResourceDAO;
import org.apache.syncope.core.persistence.api.dao.GroupDAO;
import org.apache.syncope.core.persistence.api.dao.NotificationDAO;
import org.apache.syncope.core.persistence.api.dao.PlainAttrDAO;
import org.apache.syncope.core.persistence.api.dao.PlainAttrValueDAO;
import org.apache.syncope.core.persistence.api.dao.PlainSchemaDAO;
import org.apache.syncope.core.persistence.api.dao.PolicyDAO;
import org.apache.syncope.core.persistence.api.dao.RoleDAO;
import org.apache.syncope.core.persistence.api.dao.SecurityQuestionDAO;
import org.apache.syncope.core.persistence.api.dao.TaskDAO;
import org.apache.syncope.core.persistence.api.dao.UserDAO;
import org.apache.syncope.core.persistence.api.dao.VirSchemaDAO;
import org.apache.syncope.core.persistence.api.entity.EntityFactory;
import org.apache.syncope.core.persistence.jpa.PersistenceProperties;
import org.apache.syncope.core.provisioning.api.AnyObjectProvisioningManager;
import org.apache.syncope.core.provisioning.api.AuditManager;
import org.apache.syncope.core.provisioning.api.ConnIdBundleManager;
import org.apache.syncope.core.provisioning.api.GroupProvisioningManager;
import org.apache.syncope.core.provisioning.api.UserProvisioningManager;
import org.apache.syncope.core.provisioning.api.cache.VirAttrCache;
import org.apache.syncope.core.provisioning.api.notification.NotificationManager;
import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor;
import org.apache.syncope.core.provisioning.java.ProvisioningProperties;
import org.apache.syncope.core.spring.security.PasswordGenerator;
import org.apache.syncope.core.spring.security.SecurityProperties;
import org.apache.syncope.core.starter.actuate.DomainsHealthIndicator;
import org.apache.syncope.core.starter.actuate.ExternalResourcesHealthIndicator;
import org.apache.syncope.core.starter.actuate.DefaultSyncopeCoreInfoContributor;
import org.apache.syncope.core.starter.actuate.SyncopeCoreInfoContributor;
import org.apache.syncope.core.workflow.api.AnyObjectWorkflowAdapter;
import org.apache.syncope.core.workflow.api.GroupWorkflowAdapter;
import org.apache.syncope.core.workflow.api.UserWorkflowAdapter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.actuate.mail.MailHealthIndicator;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.http.HttpMessageConvertersAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.JdbcTemplateAutoConfiguration;
import org.springframework.boot.autoconfigure.quartz.QuartzAutoConfiguration;
import org.springframework.boot.autoconfigure.web.servlet.error.ErrorMvcAutoConfiguration;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
import org.springframework.context.PayloadApplicationEvent;
import org.springframework.context.annotation.Bean;
import org.springframework.context.event.EventListener;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.mail.javamail.JavaMailSenderImpl;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.transaction.annotation.EnableTransactionManagement;
@SpringBootApplication(exclude = {
ErrorMvcAutoConfiguration.class,
HttpMessageConvertersAutoConfiguration.class,
OpenApiAutoConfiguration.class,
DataSourceAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class,
JdbcTemplateAutoConfiguration.class,
QuartzAutoConfiguration.class }, proxyBeanMethods = false)
@EnableTransactionManagement
public class SyncopeCoreApplication extends SpringBootServletInitializer {
public static void main(final String[] args) {
new SpringApplicationBuilder(SyncopeCoreApplication.class).
properties("spring.config.name:core").
build().run(args);
}
@Override
protected SpringApplicationBuilder configure(final SpringApplicationBuilder builder) {
return builder.properties(Map.of("spring.config.name", "core")).sources(SyncopeCoreApplication.class);
}
@ConditionalOnMissingBean
@Bean
public SyncopeCoreInfoContributor syncopeCoreInfoContributor(
final SecurityProperties securityProperties,
final PersistenceProperties persistenceProperties,
final ProvisioningProperties provisioningProperties,
final LogicProperties logicProperties,
final AnyTypeDAO anyTypeDAO,
final AnyTypeClassDAO anyTypeClassDAO,
final UserDAO userDAO,
final GroupDAO groupDAO,
final AnyObjectDAO anyObjectDAO,
final ExternalResourceDAO resourceDAO,
final ConfParamOps confParamOps,
final ServiceOps serviceOps,
final ConnIdBundleManager bundleManager,
final PropagationTaskExecutor propagationTaskExecutor,
final AnyObjectWorkflowAdapter awfAdapter,
final UserWorkflowAdapter uwfAdapter,
final GroupWorkflowAdapter gwfAdapter,
final AnyObjectProvisioningManager aProvisioningManager,
final UserProvisioningManager uProvisioningManager,
final GroupProvisioningManager gProvisioningManager,
final VirAttrCache virAttrCache,
final NotificationManager notificationManager,
final AuditManager auditManager,
final PasswordGenerator passwordGenerator,
final EntityFactory entityFactory,
final PlainSchemaDAO plainSchemaDAO,
final PlainAttrDAO plainAttrDAO,
final PlainAttrValueDAO plainAttrValueDAO,
final AnySearchDAO anySearchDAO,
final ImplementationLookup implLookup,
final PolicyDAO policyDAO,
final NotificationDAO notificationDAO,
final TaskDAO taskDAO,
final VirSchemaDAO virSchemaDAO,
final RoleDAO roleDAO,
final SecurityQuestionDAO securityQuestionDAO,
@Qualifier("asyncConnectorFacadeExecutor")
final ThreadPoolTaskExecutor asyncConnectorFacadeExecutor,
@Qualifier("propagationTaskExecutorAsyncExecutor")
final ThreadPoolTaskExecutor propagationTaskExecutorAsyncExecutor) {
return new DefaultSyncopeCoreInfoContributor(securityProperties,
persistenceProperties,
provisioningProperties,
logicProperties,
anyTypeDAO,
anyTypeClassDAO,
userDAO,
groupDAO,
anyObjectDAO,
resourceDAO,
confParamOps,
serviceOps,
bundleManager,
propagationTaskExecutor,
awfAdapter,
uwfAdapter,
gwfAdapter,
aProvisioningManager,
uProvisioningManager,
gProvisioningManager,
virAttrCache,
notificationManager,
auditManager,
passwordGenerator,
entityFactory,
plainSchemaDAO,
plainAttrDAO,
plainAttrValueDAO,
anySearchDAO,
implLookup,
policyDAO,
notificationDAO,
taskDAO,
virSchemaDAO,
roleDAO,
securityQuestionDAO,
asyncConnectorFacadeExecutor,
propagationTaskExecutorAsyncExecutor);
}
@ConditionalOnMissingBean
@Bean
public DomainsHealthIndicator domainsHealthIndicator() {
return new DomainsHealthIndicator();
}
@ConditionalOnMissingBean
@Bean
public MailHealthIndicator mailHealthIndicator(final JavaMailSender mailSender) {
return new MailHealthIndicator((JavaMailSenderImpl) mailSender);
}
@ConditionalOnClass(name = { "org.apache.syncope.core.logic.ResourceLogic" })
@ConditionalOnMissingBean
@Bean
public ExternalResourcesHealthIndicator externalResourcesHealthIndicator() {
return new ExternalResourcesHealthIndicator();
}
@ConditionalOnMissingBean
@Bean
public SyncopeCoreStart keymasterStart(final DomainHolder domainHolder) {
return new SyncopeCoreStart(domainHolder);
}
@Bean
public KeymasterStop keymasterStop() {
return new KeymasterStop(NetworkService.Type.CORE);
}
@Bean
public SyncopeStarterEventListener syncopeCoreEventListener(
@Qualifier("syncopeCoreInfoContributor")
final SyncopeCoreInfoContributor syncopeCoreInfoContributor) {
return new DefaultSyncopeStarterEventListener(syncopeCoreInfoContributor);
}
@FunctionalInterface
public interface SyncopeStarterEventListener {
void addLoadInstant(PayloadApplicationEvent<SystemInfo.LoadInstant> event);
}
public static class DefaultSyncopeStarterEventListener implements SyncopeStarterEventListener {
private final SyncopeCoreInfoContributor contributor;
public DefaultSyncopeStarterEventListener(final SyncopeCoreInfoContributor contributor) {
this.contributor = contributor;
}
@EventListener
@Override
public void addLoadInstant(final PayloadApplicationEvent<SystemInfo.LoadInstant> event) {
contributor.addLoadInstant(event);
}
}
}
| |
package org.olyapp.sdk.test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.olyapp.sdk.CameraMainAPI;
import org.olyapp.sdk.Coordinate;
import org.olyapp.sdk.Dimensions;
import org.olyapp.sdk.FocusResult;
import org.olyapp.sdk.ImageResult;
import org.olyapp.sdk.LiveViewAPI;
import org.olyapp.sdk.LiveViewHandler;
import org.olyapp.sdk.LiveViewImageData;
import org.olyapp.sdk.Property;
import org.olyapp.sdk.PropertyDesc;
import org.olyapp.sdk.ProtocolError;
import org.olyapp.sdk.TakeResult;
import org.olyapp.sdk.TakeResult.TakeStatus;
import org.olyapp.sdk.utils.StringUtils;
public class LiveViewAPITest {
CameraMainAPI cameraMainAPI;
LiveViewAPI liveViewAPI;
@Before
public void init() throws ProtocolError {
cameraMainAPI = new CameraMainAPI();
cameraMainAPI.setPlayMode();
liveViewAPI = cameraMainAPI.setLiveViewMode(20000,1000,new Dimensions(640,480));
liveViewAPI.setProperty(Property.DRIVE_MODE, "lowvib-normal");
}
@Test
public void getAllDescsTest() throws ProtocolError, InterruptedException {
for (Property property : Property.values()) {
if (!property.isSuperProperty()) {
System.out.println(liveViewAPI.getPropertyDesc(property));
}
}
}
@Test
public void getAllValuesTest() throws ProtocolError, InterruptedException {
for (Property property : Property.values()) {
if (!property.isSuperProperty()) {
System.out.println(liveViewAPI.getPropertyValue(property));
}
}
}
@Test
public void getSetRevertValuesTest() throws ProtocolError, InterruptedException {
for (Property property : Property.values()) {
PropertyDesc desc = liveViewAPI.getPropertyDesc(property);
if (desc.getType().contains("set")) {
String originalValue = desc.getValue();
List<String> values = desc.getValues();
int id = values.indexOf(originalValue);
int newId = ((id+1) % values.size());
String newValue = values.get(newId);
Assert.assertFalse(newValue.equals(originalValue));
liveViewAPI.setProperty(property, newValue);
String updatedValue = liveViewAPI.getPropertyValue(property);
Assert.assertTrue(newValue.equals(updatedValue));
liveViewAPI.setProperty(property, originalValue);
String revertedValue = liveViewAPI.getPropertyValue(property);
Assert.assertTrue(originalValue.equals(revertedValue));
}
}
}
@Test
public void startStopLiveStreamTest() throws ProtocolError, InterruptedException {
liveViewAPI.startLiveView(new LiveViewHandler() {
@Override
public void onTimeout(long ms) {
System.out.println("timeout");
}
@Override
public void onImage(LiveViewImageData imageData) {
try {
Files.write(Paths.get(imageData.getImageId() + ".jpg"),imageData.getData());
} catch (IOException e) {
e.printStackTrace();
}
}
});
Thread.sleep(5000);
liveViewAPI.stopLiveView();
}
@Test
public void runLiveStreamTest1() throws ProtocolError, InterruptedException {
long startTime = System.currentTimeMillis();
List<LiveViewImageData> images = liveViewAPI.runLiveView(-1, 5000);
long endTime = System.currentTimeMillis();
System.out.println("Total: " + images.size() + " images in " + (endTime-startTime) + " ms");
images.forEach(imageData->{
try {
Files.write(Paths.get(imageData.getImageId() + ".jpg"),imageData.getData());
} catch (IOException e) {
e.printStackTrace();
}
});
}
@Test
public void runLiveStreamTest2() throws ProtocolError, InterruptedException {
long startTime = System.currentTimeMillis();
List<LiveViewImageData> images = liveViewAPI.runLiveView(80, -1);
long endTime = System.currentTimeMillis();
System.out.println("Total: " + images.size() + " images in " + (endTime-startTime) + " ms");
images.forEach(imageData->{
try {
Files.write(Paths.get(imageData.getImageId() + ".jpg"),imageData.getData());
} catch (IOException e) {
e.printStackTrace();
}
});
}
@Test
public void runLiveStreamTest3() throws ProtocolError, InterruptedException {
long startTime = System.currentTimeMillis();
List<LiveViewImageData> images = liveViewAPI.runLiveView(1, -1);
long endTime = System.currentTimeMillis();
System.out.println("Total: " + images.size() + " images in " + (endTime-startTime) + " ms");
images.forEach(imageData->{
try {
Files.write(Paths.get(imageData.getImageId() + ".jpg"),imageData.getData());
} catch (IOException e) {
e.printStackTrace();
}
});
}
@Test
public void focusTest() throws ProtocolError, InterruptedException {
liveViewAPI.startLiveView(new LiveViewHandler() {
@Override
public void onTimeout(long ms) {
System.out.println("timeout expired");
}
@Override
public void onImage(LiveViewImageData imageData) {
System.out.println(Thread.currentThread().getId() + " - Image consumed: " + StringUtils.toHex(imageData.getImageId()));
}
});
Thread.sleep(1000);
FocusResult focusResult = liveViewAPI.acquireFocus(new Coordinate(40, 40));
liveViewAPI.releaseFocus();
liveViewAPI.stopLiveView();
System.out.println(focusResult);
}
@Test
public void takePictureTest() throws ProtocolError, InterruptedException, IOException {
liveViewAPI.startLiveView(new LiveViewHandler() {
@Override
public void onTimeout(long ms) {
System.out.println("timeout expired");
}
@Override
public void onImage(LiveViewImageData imageData) {
System.out.println("Image consumed: " + StringUtils.toHex(imageData.getImageId()));
}
});
Thread.sleep(1000);
TakeResult takeResult = liveViewAPI.takePicture();
System.out.println("Picture-taking result: " + takeResult);
liveViewAPI.stopLiveView();
System.out.println("getting small jpeg");
Files.write(Paths.get("test_small.jpg"), liveViewAPI.requestLastTakenSmallSizeJpeg());
System.out.println("getting big jpeg");
Files.write(Paths.get("test_big.jpg"), liveViewAPI.requestLastTakenFullSizeJpeg());
System.out.println("done");
}
@Test
public void takeSmallSizeJpegTest() throws ProtocolError, InterruptedException, IOException {
ImageResult imageResult = liveViewAPI.takeSmallSizeJpeg();
if (imageResult.getTakeResult().getTakeStatus()==TakeStatus.OK) {
Files.write(Paths.get("test_take_small.jpg"), imageResult.getImage());
System.out.println(imageResult.getTakeResult());
} else {
System.err.println("Failed to take picture: " + imageResult.getTakeResult());
}
}
@Test
public void takeFullSizeJpegTest() throws ProtocolError, InterruptedException, IOException {
ImageResult imageResult = liveViewAPI.takeFullSizeJpeg();
if (imageResult.getTakeResult().getTakeStatus()==TakeStatus.OK) {
Files.write(Paths.get("test_take_big.jpg"), imageResult.getImage());
System.out.println(imageResult.getTakeResult());
} else {
System.err.println("Failed to take picture: " + imageResult.getTakeResult());
}
}
}
| |
/*
* Copyright 2016 Carlos Ballesteros Velasco
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sun.misc;
import java.lang.reflect.Field;
public final class Unsafe {
public static final int INVALID_FIELD_OFFSET = -1;
public static final int ARRAY_BOOLEAN_BASE_OFFSET = 0;
public static final int ARRAY_BYTE_BASE_OFFSET = 0;
public static final int ARRAY_SHORT_BASE_OFFSET = 0;
public static final int ARRAY_CHAR_BASE_OFFSET = 0;
public static final int ARRAY_INT_BASE_OFFSET = 0;
public static final int ARRAY_LONG_BASE_OFFSET = 0;
public static final int ARRAY_FLOAT_BASE_OFFSET = 0;
public static final int ARRAY_DOUBLE_BASE_OFFSET = 0;
public static final int ARRAY_OBJECT_BASE_OFFSET = 0;
public static final int ARRAY_BOOLEAN_INDEX_SCALE = 0;
public static final int ARRAY_BYTE_INDEX_SCALE = 0;
public static final int ARRAY_SHORT_INDEX_SCALE = 0;
public static final int ARRAY_CHAR_INDEX_SCALE = 0;
public static final int ARRAY_INT_INDEX_SCALE = 0;
public static final int ARRAY_LONG_INDEX_SCALE = 0;
public static final int ARRAY_FLOAT_INDEX_SCALE = 0;
public static final int ARRAY_DOUBLE_INDEX_SCALE = 0;
public static final int ARRAY_OBJECT_INDEX_SCALE = 0;
public static final int ADDRESS_SIZE = 0;
private Unsafe() {
}
private static Unsafe theUnsafe;
public static Unsafe getUnsafe() {
if (theUnsafe == null) theUnsafe = new Unsafe();
return theUnsafe;
}
public native int getInt(Object var1, long var2);
public native void putInt(Object var1, long var2, int var4);
public native Object getObject(Object var1, long var2);
public native void putObject(Object var1, long var2, Object var4);
public native boolean getBoolean(Object var1, long var2);
public native void putBoolean(Object var1, long var2, boolean var4);
public native byte getByte(Object var1, long var2);
public native void putByte(Object var1, long var2, byte var4);
public native short getShort(Object var1, long var2);
public native void putShort(Object var1, long var2, short var4);
public native char getChar(Object var1, long var2);
public native void putChar(Object var1, long var2, char var4);
public native long getLong(Object var1, long var2);
public native void putLong(Object var1, long var2, long var4);
public native float getFloat(Object var1, long var2);
public native void putFloat(Object var1, long var2, float var4);
public native double getDouble(Object var1, long var2);
public native void putDouble(Object var1, long var2, double var4);
native public int getInt(Object var1, int var2);
native public void putInt(Object var1, int var2, int var3);
native public Object getObject(Object var1, int var2);
native public void putObject(Object var1, int var2, Object var3);
native public boolean getBoolean(Object var1, int var2);
native public void putBoolean(Object var1, int var2, boolean var3);
native public byte getByte(Object var1, int var2);
native public void putByte(Object var1, int var2, byte var3);
native public short getShort(Object var1, int var2);
native public void putShort(Object var1, int var2, short var3);
native public char getChar(Object var1, int var2);
native public void putChar(Object var1, int var2, char var3);
native public long getLong(Object var1, int var2);
native public void putLong(Object var1, int var2, long var3);
native public float getFloat(Object var1, int var2);
native public void putFloat(Object var1, int var2, float var3);
native public double getDouble(Object var1, int var2);
native public void putDouble(Object var1, int var2, double var3);
public native byte getByte(long var1);
public native void putByte(long var1, byte var3);
public native short getShort(long var1);
public native void putShort(long var1, short var3);
public native char getChar(long var1);
public native void putChar(long var1, char var3);
public native int getInt(long var1);
public native void putInt(long var1, int var3);
public native long getLong(long var1);
public native void putLong(long var1, long var3);
public native float getFloat(long var1);
public native void putFloat(long var1, float var3);
public native double getDouble(long var1);
public native void putDouble(long var1, double var3);
public native long getAddress(long var1);
public native void putAddress(long var1, long var3);
public native long allocateMemory(long var1);
public native long reallocateMemory(long var1, long var3);
public native void setMemory(Object var1, long var2, long var4, byte var6);
native public void setMemory(long var1, long var3, byte var5);
public native void copyMemory(Object var1, long var2, Object var4, long var5, long var7);
native public void copyMemory(long var1, long var3, long var5);
public native void freeMemory(long var1);
native public int fieldOffset(Field var1);
native public Object staticFieldBase(Class<?> var1);
public native long staticFieldOffset(Field var1);
public native long objectFieldOffset(Field var1);
public native Object staticFieldBase(Field var1);
public native boolean shouldBeInitialized(Class<?> var1);
public native void ensureClassInitialized(Class<?> var1);
public native int arrayBaseOffset(Class<?> var1);
public native int arrayIndexScale(Class<?> var1);
public native int addressSize();
public native int pageSize();
//public native Class<?> defineClass(String var1, byte[] var2, int var3, int var4, ClassLoader var5, ProtectionDomain var6);
public native Class<?> defineAnonymousClass(Class<?> var1, byte[] var2, Object[] var3);
public native Object allocateInstance(Class<?> var1) throws InstantiationException;
public native void monitorEnter(Object var1);
public native void monitorExit(Object var1);
public native boolean tryMonitorEnter(Object var1);
public native void throwException(Throwable var1);
public final native boolean compareAndSwapObject(Object var1, long var2, Object var4, Object var5);
public final native boolean compareAndSwapInt(Object var1, long var2, int var4, int var5);
public final native boolean compareAndSwapLong(Object var1, long var2, long var4, long var6);
public native Object getObjectVolatile(Object var1, long var2);
public native void putObjectVolatile(Object var1, long var2, Object var4);
public native int getIntVolatile(Object var1, long var2);
public native void putIntVolatile(Object var1, long var2, int var4);
public native boolean getBooleanVolatile(Object var1, long var2);
public native void putBooleanVolatile(Object var1, long var2, boolean var4);
public native byte getByteVolatile(Object var1, long var2);
public native void putByteVolatile(Object var1, long var2, byte var4);
public native short getShortVolatile(Object var1, long var2);
public native void putShortVolatile(Object var1, long var2, short var4);
public native char getCharVolatile(Object var1, long var2);
public native void putCharVolatile(Object var1, long var2, char var4);
public native long getLongVolatile(Object var1, long var2);
public native void putLongVolatile(Object var1, long var2, long var4);
public native float getFloatVolatile(Object var1, long var2);
public native void putFloatVolatile(Object var1, long var2, float var4);
public native double getDoubleVolatile(Object var1, long var2);
public native void putDoubleVolatile(Object var1, long var2, double var4);
public native void putOrderedObject(Object var1, long var2, Object var4);
public native void putOrderedInt(Object var1, long var2, int var4);
public native void putOrderedLong(Object var1, long var2, long var4);
public native void unpark(Object var1);
public native void park(boolean var1, long var2);
public native int getLoadAverage(double[] var1, int var2);
native public final int getAndAddInt(Object var1, long var2, int var4);
native public final long getAndAddLong(Object var1, long var2, long var4);
native public final int getAndSetInt(Object var1, long var2, int var4);
native public final long getAndSetLong(Object var1, long var2, long var4);
native public final Object getAndSetObject(Object var1, long var2, Object var4);
public native void loadFence();
public native void storeFence();
public native void fullFence();
//static {
// theUnsafe = new Unsafe();
// ARRAY_BOOLEAN_BASE_OFFSET = theUnsafe.arrayBaseOffset(boolean[].class);
// ARRAY_BYTE_BASE_OFFSET = theUnsafe.arrayBaseOffset(byte[].class);
// ARRAY_SHORT_BASE_OFFSET = theUnsafe.arrayBaseOffset(short[].class);
// ARRAY_CHAR_BASE_OFFSET = theUnsafe.arrayBaseOffset(char[].class);
// ARRAY_INT_BASE_OFFSET = theUnsafe.arrayBaseOffset(int[].class);
// ARRAY_LONG_BASE_OFFSET = theUnsafe.arrayBaseOffset(long[].class);
// ARRAY_FLOAT_BASE_OFFSET = theUnsafe.arrayBaseOffset(float[].class);
// ARRAY_DOUBLE_BASE_OFFSET = theUnsafe.arrayBaseOffset(double[].class);
// ARRAY_OBJECT_BASE_OFFSET = theUnsafe.arrayBaseOffset(Object[].class);
// ARRAY_BOOLEAN_INDEX_SCALE = theUnsafe.arrayIndexScale(boolean[].class);
// ARRAY_BYTE_INDEX_SCALE = theUnsafe.arrayIndexScale(byte[].class);
// ARRAY_SHORT_INDEX_SCALE = theUnsafe.arrayIndexScale(short[].class);
// ARRAY_CHAR_INDEX_SCALE = theUnsafe.arrayIndexScale(char[].class);
// ARRAY_INT_INDEX_SCALE = theUnsafe.arrayIndexScale(int[].class);
// ARRAY_LONG_INDEX_SCALE = theUnsafe.arrayIndexScale(long[].class);
// ARRAY_FLOAT_INDEX_SCALE = theUnsafe.arrayIndexScale(float[].class);
// ARRAY_DOUBLE_INDEX_SCALE = theUnsafe.arrayIndexScale(double[].class);
// ARRAY_OBJECT_INDEX_SCALE = theUnsafe.arrayIndexScale(Object[].class);
// ADDRESS_SIZE = theUnsafe.addressSize();
//}
}
| |
/**
* Copyright (C) 2016 Rizki Mufrizal (https://rizkimufrizal.github.io/) (mufrizalrizki@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rizki.mufrizal.aplikasi.inventory.controller;
import com.rizki.mufrizal.aplikasi.inventory.App;
import com.rizki.mufrizal.aplikasi.inventory.abstractTableModel.PembelianAbstractTableModel;
import com.rizki.mufrizal.aplikasi.inventory.abstractTableModel.PembelianDetailAbstractTableModel;
import com.rizki.mufrizal.aplikasi.inventory.abstractTableModel.PembelianSementaraAbstractTableModel;
import com.rizki.mufrizal.aplikasi.inventory.abstractTableModel.TableAutoResizeColumn;
import com.rizki.mufrizal.aplikasi.inventory.domain.Barang;
import com.rizki.mufrizal.aplikasi.inventory.domain.JenisBarang;
import com.rizki.mufrizal.aplikasi.inventory.domain.Pembelian;
import com.rizki.mufrizal.aplikasi.inventory.domain.PembelianDetail;
import com.rizki.mufrizal.aplikasi.inventory.domain.PembelianSementara;
import com.rizki.mufrizal.aplikasi.inventory.view.PembelianSimpanView;
import com.rizki.mufrizal.aplikasi.inventory.view.PembelianView;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import javax.swing.JOptionPane;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @Author Rizki Mufrizal <mufrizalrizki@gmail.com>
* @Since Mar 25, 2016
* @Time 5:03:06 PM
* @Encoding UTF-8
* @Project Aplikasi-Inventory
* @Package com.rizki.mufrizal.aplikasi.inventory.controller
*
*/
public class PembelianController {
private static final Logger LOGGER = LoggerFactory.getLogger(PembelianController.class);
private PembelianView pembelianView;
private PembelianSimpanView pembelianSimpanView;
private final TableAutoResizeColumn tableAutoResizeColumn = new TableAutoResizeColumn();
@SuppressWarnings("FieldMayBeFinal")
private List<PembelianSementara> pembelianSementaras = new ArrayList<>();
@SuppressWarnings("FieldMayBeFinal")
private List<String> isiComboBox = new ArrayList<>();
private PembelianAbstractTableModel pembelianAbstractTableModel;
private PembelianDetailAbstractTableModel pembelianDetailAbstractTableModel;
private PembelianSementaraAbstractTableModel pembelianSementaraAbstractTableModel;
public PembelianController(PembelianView pembelianView) {
this.pembelianView = pembelianView;
}
public PembelianController(PembelianSimpanView pembelianSimpanView) {
this.pembelianSimpanView = pembelianSimpanView;
}
// halaman pembelian view
// paging pembelian
private Integer totalRowsPembelian = 0;
private Integer pageNumberPembelian = 1;
private Integer totalPagePembelian = 1;
private Integer rowsPerPagePembelian = 10;
public void ambilDataPembelian() {
LOGGER.info("Ambil data barang");
rowsPerPagePembelian
= Integer.valueOf(this.pembelianView.getPerPage().getSelectedItem().toString());
totalRowsPembelian = App.pembelianService().jumlahPembelian();
Double dbTotalPage
= Math.ceil(totalRowsPembelian.doubleValue() / rowsPerPagePembelian.doubleValue());
totalPagePembelian = dbTotalPage.intValue();
if (pageNumberPembelian == 1) {
this.pembelianView.getFirst().setEnabled(Boolean.FALSE);
this.pembelianView.getPrevious().setEnabled(Boolean.FALSE);
} else {
this.pembelianView.getFirst().setEnabled(Boolean.TRUE);
this.pembelianView.getPrevious().setEnabled(Boolean.TRUE);
}
if (pageNumberPembelian.equals(totalPagePembelian)) {
this.pembelianView.getNext().setEnabled(Boolean.FALSE);
this.pembelianView.getLast().setEnabled(Boolean.FALSE);
} else {
this.pembelianView.getNext().setEnabled(Boolean.TRUE);
this.pembelianView.getLast().setEnabled(Boolean.TRUE);
}
this.pembelianView.getLabelPaging()
.setText("Page " + pageNumberPembelian + " of " + totalPagePembelian);
pembelianAbstractTableModel = new PembelianAbstractTableModel(
App.pembelianService().ambilPembelians(pageNumberPembelian, rowsPerPagePembelian));
this.pembelianView.getTabelPembelian().setModel(pembelianAbstractTableModel);
tableAutoResizeColumn.autoResizeColumn(this.pembelianView.getTabelPembelian());
LOGGER.info("Paging : {}", pageNumberPembelian);
// inisialisasi tabel pembelian detail kosong
List<PembelianDetail> pembelianDetails = new ArrayList<>();
pembelianDetailAbstractTableModel = new PembelianDetailAbstractTableModel(pembelianDetails);
this.pembelianView.getTabelPembelianDetail().setModel(pembelianDetailAbstractTableModel);
tableAutoResizeColumn.autoResizeColumn(this.pembelianView.getTabelPembelianDetail());
}
public void firstPagingPembelian() {
pageNumberPembelian = 1;
if (this.pembelianView.getValue().getText().isEmpty()) {
ambilDataPembelian();
} else {
cariDataPembelianPadaPembelianView();
}
LOGGER.info("Paging awal : {}", pageNumberPembelian);
}
public void PreviousPagingPembelian() {
if (pageNumberPembelian > 1) {
pageNumberPembelian -= 1;
if (this.pembelianView.getValue().getText().isEmpty()) {
ambilDataPembelian();
} else {
cariDataPembelianPadaPembelianView();
}
LOGGER.info("Paging sebelum : {}", pageNumberPembelian);
}
}
public void nextPagingPembelian() {
if (pageNumberPembelian < totalPagePembelian) {
pageNumberPembelian += 1;
if (this.pembelianView.getValue().getText().isEmpty()) {
ambilDataPembelian();
} else {
cariDataPembelianPadaPembelianView();
}
LOGGER.info("Paging selanjutnya : {}", pageNumberPembelian);
}
}
public void lastPagingPembelian() {
pageNumberPembelian = totalPagePembelian;
if (this.pembelianView.getValue().getText().isEmpty()) {
ambilDataPembelian();
} else {
cariDataPembelianPadaPembelianView();
}
LOGGER.info("Paging akhir : {}", pageNumberPembelian);
}
public void refreshPembelian() {
ambilDataPembelian();
LOGGER.info("refresh paging : {}", pageNumberPembelian);
}
// end paging pembelian
// cari data pembelian
public void cariDataPembelianPadaPembelianView() {
totalRowsPembelian = 0;
pageNumberPembelian = 1;
totalPagePembelian = 1;
rowsPerPagePembelian = 10;
if (this.pembelianView.getValue().getText().isEmpty()) {
ambilDataPembelian();
} else {
String value = this.pembelianView.getValue().getText();
String key = null;
if (this.pembelianView.getKey().getSelectedIndex() == 0) {
key = "kodeTransaksiPembelian";
} else if (this.pembelianView.getKey().getSelectedIndex() == 1) {
key = "namaSuplier";
}
LOGGER.info("cari data pembelian");
rowsPerPagePembelian
= Integer.valueOf(this.pembelianView.getPerPage().getSelectedItem().toString());
totalRowsPembelian = App.pembelianService().jumlahCariPembelian(key, value);
Double dbTotalPage
= Math.ceil(totalRowsPembelian.doubleValue() / rowsPerPagePembelian.doubleValue());
totalPagePembelian = dbTotalPage.intValue();
if (pageNumberPembelian == 1) {
this.pembelianView.getFirst().setEnabled(Boolean.FALSE);
this.pembelianView.getPrevious().setEnabled(Boolean.FALSE);
} else {
this.pembelianView.getFirst().setEnabled(Boolean.TRUE);
this.pembelianView.getPrevious().setEnabled(Boolean.TRUE);
}
if (pageNumberPembelian.equals(totalPagePembelian)) {
this.pembelianView.getNext().setEnabled(Boolean.FALSE);
this.pembelianView.getLast().setEnabled(Boolean.FALSE);
} else {
this.pembelianView.getNext().setEnabled(Boolean.TRUE);
this.pembelianView.getLast().setEnabled(Boolean.TRUE);
}
this.pembelianView.getLabelPaging()
.setText("Page " + pageNumberPembelian + " of " + totalPagePembelian);
this.pembelianView.getLabelTotalRecord().setText("Total Record " + pembelianView);
pembelianAbstractTableModel = new PembelianAbstractTableModel(App.pembelianService()
.cariPembelian(key, value, pageNumberPembelian, rowsPerPagePembelian));
this.pembelianView.getTabelPembelian().setModel(pembelianAbstractTableModel);
tableAutoResizeColumn.autoResizeColumn(this.pembelianView.getTabelPembelian());
LOGGER.info("Paging : {}", pageNumberPembelian);
}
}
// end cari data pembelian
// pembelian detail
public void tampilkanDataPembelianDetail() {
Integer index = this.pembelianView.getTabelPembelian().getSelectedRow();
String kodeTransaksiPembelian
= String.valueOf(this.pembelianView.getTabelPembelian().getValueAt(index, 1));
List<PembelianDetail> pembelianDetails
= App.pembelianDetailService().ambilPembelianDetails(kodeTransaksiPembelian);
pembelianDetailAbstractTableModel = new PembelianDetailAbstractTableModel(pembelianDetails);
this.pembelianView.getTabelPembelianDetail().setModel(pembelianDetailAbstractTableModel);
tableAutoResizeColumn.autoResizeColumn(this.pembelianView.getTabelPembelianDetail());
}
// end pembelian detail
// end halaman pembelian view
// halaman simpan pembelian view
public List<String> tampilkanDataBarangComboBox() {
List<Barang> barangs = App.barangService().getSemuaBarang();
isiComboBox.add("--Pilih--");
barangs.stream().forEach((barang) -> {
isiComboBox.add(barang.getIdBarang());
});
return isiComboBox;
}
private String checkContains(String idBarang, List<String> barangs) {
for (String b : barangs) {
if (b.equals(idBarang)) {
LOGGER.info("id sama : {}", b);
return b;
}
}
LOGGER.info("Beda id");
return null;
}
public void tampilkanDataComboBox() {
List<Barang> barangs = App.barangService().getSemuaBarang();
barangs.stream().forEach((barang) -> {
String b = checkContains(barang.getIdBarang(), isiComboBox);
if (b == null) {
this.pembelianSimpanView.getIdBarang().addItem(barang.getIdBarang());
}
});
}
public void inisialisasiTabelPembelianKosong() {
// inisialisasi tabel pembelian kosong
pembelianSementaraAbstractTableModel
= new PembelianSementaraAbstractTableModel(pembelianSementaras);
this.pembelianSimpanView.getTabelPembelianSementara()
.setModel(pembelianSementaraAbstractTableModel);
tableAutoResizeColumn
.autoResizeColumn(this.pembelianSimpanView.getTabelPembelianSementara());
}
public void tampilkanIsiDataBarangBerdasarkanIdBarang() {
if (this.pembelianSimpanView.getIdBarang().getSelectedIndex() == 0) {
this.pembelianSimpanView.getNamaBarang().setText(null);
this.pembelianSimpanView.getJenisBarang().setSelectedIndex(0);
this.pembelianSimpanView.getTanggalKadaluarsa().setDate(null);
this.pembelianSimpanView.getHargaSatuan().setText(null);
this.pembelianSimpanView.getJumlahBarang().setText(null);
} else {
Barang barang = App.barangService()
.getBarang(this.pembelianSimpanView.getIdBarang().getSelectedItem().toString());
this.pembelianSimpanView.getNamaBarang().setText(barang.getNamaBarang());
this.pembelianSimpanView.getJenisBarang()
.setSelectedItem(barang.getJenisBarang().toString());
this.pembelianSimpanView.getTanggalKadaluarsa().setDate(barang.getTanggalKadaluarsa());
this.pembelianSimpanView.getHargaSatuan()
.setText(barang.getHargaSatuanBarang().toString());
this.pembelianSimpanView.getJumlahBarang().setText(barang.getJumlahBarang().toString());
}
}
public void tambahPembelianSementara() {
PembelianSementara pembelianSementara = new PembelianSementara();
if (this.pembelianSimpanView.getIdBarang().getSelectedIndex() == 0) {
pembelianSementara.setIdBarang(null);
} else {
pembelianSementara.setIdBarang(this.pembelianSimpanView.getIdBarang().getSelectedItem().toString());
}
pembelianSementara.setNamaBarang(this.pembelianSimpanView.getNamaBarang().getText());
pembelianSementara.setTanggalKadaluarsa(this.pembelianSimpanView.getTanggalKadaluarsa().getDate());
pembelianSementara.setJenisBarang(JenisBarang.valueOf(this.pembelianSimpanView.getJenisBarang().getSelectedItem().toString()));
pembelianSementara.setJumlahBarang(Integer.parseInt(this.pembelianSimpanView.getJumlahBarang().getText()));
pembelianSementara.setHargaSatuanBarang(BigDecimal.valueOf(Double.parseDouble(this.pembelianSimpanView.getHargaSatuan().getText())));
pembelianSementaras.add(pembelianSementara);
tampilPembelianSementara();
clearForm();
}
public void tampilPembelianSementara() {
pembelianSementaraAbstractTableModel = new PembelianSementaraAbstractTableModel(pembelianSementaras);
this.pembelianSimpanView.getTabelPembelianSementara().setModel(pembelianSementaraAbstractTableModel);
tableAutoResizeColumn.autoResizeColumn(this.pembelianSimpanView.getTabelPembelianSementara());
}
public void clearForm() {
this.pembelianSimpanView.getNamaBarang().setText(null);
this.pembelianSimpanView.getJenisBarang().setSelectedIndex(0);
this.pembelianSimpanView.getTanggalKadaluarsa().setDate(null);
this.pembelianSimpanView.getHargaSatuan().setText(null);
this.pembelianSimpanView.getJumlahBarang().setText(null);
}
public void simpanTransaksi() {
if (this.pembelianSimpanView.getTanggalTransaksi().getDate() == null) {
JOptionPane.showMessageDialog(null, "Tanggal Pembelian belum dipilih", "Warning", JOptionPane.WARNING_MESSAGE);
} else if (this.pembelianSimpanView.getNamaSuplier().getText().isEmpty()) {
JOptionPane.showMessageDialog(null, "Nama Suplier belum diisi", "Warning", JOptionPane.WARNING_MESSAGE);
} else {
Pembelian pembelian = new Pembelian();
List<PembelianDetail> pembelianDetails = new ArrayList<>();
BigDecimal totalHarga = new BigDecimal(BigInteger.ZERO);
for (PembelianSementara pembelianSementara : pembelianSementaras) {
BigDecimal totalHargaBarang = pembelianSementara.getHargaSatuanBarang().multiply(BigDecimal.valueOf(pembelianSementara.getJumlahBarang().doubleValue()));
totalHarga = totalHarga.add(totalHargaBarang);
LOGGER.debug("id barang : {}", pembelianSementara.getIdBarang());
@SuppressWarnings("UnusedAssignment")
Barang barang = new Barang();
Barang barangBaru = new Barang();
PembelianDetail pembelianDetail = new PembelianDetail();
if (pembelianSementara.getIdBarang() == null) {
barangBaru.setNamaBarang(pembelianSementara.getNamaBarang());
barangBaru.setJenisBarang(pembelianSementara.getJenisBarang());
barangBaru.setTanggalKadaluarsa(pembelianSementara.getTanggalKadaluarsa());
barangBaru.setHargaSatuanBarang(pembelianSementara.getHargaSatuanBarang());
barangBaru.setJumlahBarang(pembelianSementara.getJumlahBarang());
App.barangService().simpanBarang(barangBaru);
pembelianDetail.setBarang(barangBaru);
} else {
barang = App.barangService().getBarang(pembelianSementara.getIdBarang());
barang.setNamaBarang(pembelianSementara.getNamaBarang());
barang.setJenisBarang(pembelianSementara.getJenisBarang());
barang.setTanggalKadaluarsa(pembelianSementara.getTanggalKadaluarsa());
barang.setHargaSatuanBarang(pembelianSementara.getHargaSatuanBarang());
if (!Objects.equals(barang.getJumlahBarang(), pembelianSementara.getJumlahBarang())) {
barang.setJumlahBarang(barang.getJumlahBarang() + pembelianSementara.getJumlahBarang());
} else {
barang.setJumlahBarang(pembelianSementara.getJumlahBarang());
}
App.barangService().editBarang(barang);
pembelianDetail.setBarang(barang);
}
pembelianDetail.setJumlahBarang(pembelianSementara.getJumlahBarang());
pembelianDetail.setTotalHargaPerBarang(totalHargaBarang);
pembelianDetail.setPembelian(pembelian);
pembelianDetails.add(pembelianDetail);
}
pembelian.setTanggalTransaksi(this.pembelianSimpanView.getTanggalTransaksi().getDate());
pembelian.setNamaSuplier(this.pembelianSimpanView.getNamaSuplier().getText());
pembelian.setTotalHarga(totalHarga);
pembelian.setPembelianDetails(pembelianDetails);
App.pembelianService().simpanPembelian(pembelian);
JOptionPane.showMessageDialog(null, "Data Pembelian Tersimpan", "Info", JOptionPane.INFORMATION_MESSAGE);
pembelianSementaras.clear();
tampilPembelianSementara();
clearForm();
tampilkanDataComboBox();
this.pembelianSimpanView.getTanggalTransaksi().setDate(null);
this.pembelianSimpanView.getNamaSuplier().setText(null);
}
}
// end halaman simpan pembelian view
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import com.carrotsearch.randomizedtesting.annotations.*;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.AbstractRandomizedTest;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.uninverting.UninvertingReader;
import org.elasticsearch.Version;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.operation.hash.djb.DjbHashFunction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.util.concurrent.EsAbortPolicy;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.test.junit.listeners.LoggingListener;
import org.elasticsearch.test.store.MockDirectoryHelper;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.*;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.net.URI;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllFilesClosed;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSearchersClosed;
/**
* Base testcase for randomized unit testing with Elasticsearch
*/
@ThreadLeakFilters(defaultFilters = true, filters = {ElasticsearchThreadFilter.class})
@ThreadLeakScope(Scope.SUITE)
@ThreadLeakLingering(linger = 5000) // 5 sec lingering
@TimeoutSuite(millis = 20 * TimeUnits.MINUTE) // timeout the suite after 20min and fail the test.
@Listeners(LoggingListener.class)
public abstract class ElasticsearchTestCase extends AbstractRandomizedTest {
private static Thread.UncaughtExceptionHandler defaultHandler;
protected final ESLogger logger = Loggers.getLogger(getClass());
public static final String TESTS_SECURITY_MANAGER = System.getProperty("tests.security.manager");
public static final String JAVA_SECURTY_POLICY = System.getProperty("java.security.policy");
/**
* Property that allows to adapt the tests behaviour to older features/bugs based on the input version
*/
private static final String TESTS_COMPATIBILITY = "tests.compatibility";
private static final Version GLOABL_COMPATIBILITY_VERSION = Version.fromString(compatibilityVersionProperty());
public static final boolean ASSERTIONS_ENABLED;
static {
boolean enabled = false;
assert enabled = true;
ASSERTIONS_ENABLED = enabled;
if (Boolean.parseBoolean(Strings.hasLength(TESTS_SECURITY_MANAGER) ? TESTS_SECURITY_MANAGER : "true") && JAVA_SECURTY_POLICY != null) {
System.setSecurityManager(new SecurityManager());
}
}
@After
public void ensureNoFieldCacheUse() {
// field cache should NEVER get loaded.
String[] entries = UninvertingReader.getUninvertedStats();
assertEquals("fieldcache must never be used, got=" + Arrays.toString(entries), 0, entries.length);
}
/**
* Runs the code block for 10 seconds waiting for no assertion to trip.
*/
public static void assertBusy(Runnable codeBlock) throws Exception {
assertBusy(Executors.callable(codeBlock), 10, TimeUnit.SECONDS);
}
public static void assertBusy(Runnable codeBlock, long maxWaitTime, TimeUnit unit) throws Exception {
assertBusy(Executors.callable(codeBlock), maxWaitTime, unit);
}
/**
* Runs the code block for 10 seconds waiting for no assertion to trip.
*/
public static <V> V assertBusy(Callable<V> codeBlock) throws Exception {
return assertBusy(codeBlock, 10, TimeUnit.SECONDS);
}
/**
* Runs the code block for the provided interval, waiting for no assertions to trip.
*/
public static <V> V assertBusy(Callable<V> codeBlock, long maxWaitTime, TimeUnit unit) throws Exception {
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
long iterations = Math.max(Math.round(Math.log10(maxTimeInMillis) / Math.log10(2)), 1);
long timeInMillis = 1;
long sum = 0;
List<AssertionError> failures = new ArrayList<>();
for (int i = 0; i < iterations; i++) {
try {
return codeBlock.call();
} catch (AssertionError e) {
failures.add(e);
}
sum += timeInMillis;
Thread.sleep(timeInMillis);
timeInMillis *= 2;
}
timeInMillis = maxTimeInMillis - sum;
Thread.sleep(Math.max(timeInMillis, 0));
try {
return codeBlock.call();
} catch (AssertionError e) {
for (AssertionError failure : failures) {
e.addSuppressed(failure);
}
throw e;
}
}
public static boolean awaitBusy(Predicate<?> breakPredicate) throws InterruptedException {
return awaitBusy(breakPredicate, 10, TimeUnit.SECONDS);
}
public static boolean awaitBusy(Predicate<?> breakPredicate, long maxWaitTime, TimeUnit unit) throws InterruptedException {
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
long iterations = Math.max(Math.round(Math.log10(maxTimeInMillis) / Math.log10(2)), 1);
long timeInMillis = 1;
long sum = 0;
for (int i = 0; i < iterations; i++) {
if (breakPredicate.apply(null)) {
return true;
}
sum += timeInMillis;
Thread.sleep(timeInMillis);
timeInMillis *= 2;
}
timeInMillis = maxTimeInMillis - sum;
Thread.sleep(Math.max(timeInMillis, 0));
return breakPredicate.apply(null);
}
private static final String[] numericTypes = new String[]{"byte", "short", "integer", "long"};
public static String randomNumericType(Random random) {
return numericTypes[random.nextInt(numericTypes.length)];
}
/**
* Returns a {@link File} pointing to the class path relative resource given
* as the first argument. In contrast to
* <code>getClass().getResource(...).getFile()</code> this method will not
* return URL encoded paths if the parent path contains spaces or other
* non-standard characters.
*/
public File getResource(String relativePath) {
URI uri = URI.create(getClass().getResource(relativePath).toString());
return new File(uri);
}
@After
public void ensureAllPagesReleased() throws Exception {
MockPageCacheRecycler.ensureAllPagesAreReleased();
}
@After
public void ensureAllArraysReleased() throws Exception {
MockBigArrays.ensureAllArraysAreReleased();
}
public static boolean hasUnclosedWrapper() {
for (MockDirectoryWrapper w : MockDirectoryHelper.wrappers) {
if (w.isOpen()) {
return true;
}
}
return false;
}
@BeforeClass
public static void setBeforeClass() throws Exception {
closeAfterSuite(new Closeable() {
@Override
public void close() throws IOException {
assertAllFilesClosed();
}
});
closeAfterSuite(new Closeable() {
@Override
public void close() throws IOException {
assertAllSearchersClosed();
}
});
defaultHandler = Thread.getDefaultUncaughtExceptionHandler();
Thread.setDefaultUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(defaultHandler));
Requests.CONTENT_TYPE = randomXContentType();
Requests.INDEX_CONTENT_TYPE = randomXContentType();
}
private static XContentType randomXContentType() {
if (globalCompatibilityVersion().onOrAfter(Version.V_1_2_0)) {
return randomFrom(XContentType.values());
} else {
// CBOR was added in 1.2.0 earlier version can't derive the format
XContentType type = randomFrom(XContentType.values());
while(type == XContentType.CBOR) {
type = randomFrom(XContentType.values());
}
return type;
}
}
@AfterClass
public static void resetAfterClass() {
Thread.setDefaultUncaughtExceptionHandler(defaultHandler);
Requests.CONTENT_TYPE = XContentType.SMILE;
Requests.INDEX_CONTENT_TYPE = XContentType.JSON;
}
public static boolean maybeDocValues() {
return randomBoolean();
}
private static final List<Version> SORTED_VERSIONS;
static {
Field[] declaredFields = Version.class.getDeclaredFields();
Set<Integer> ids = new HashSet<>();
for (Field field : declaredFields) {
final int mod = field.getModifiers();
if (Modifier.isStatic(mod) && Modifier.isFinal(mod) && Modifier.isPublic(mod)) {
if (field.getType() == Version.class) {
try {
Version object = (Version) field.get(null);
ids.add(object.id);
} catch (Throwable e) {
throw new RuntimeException(e);
}
}
}
}
List<Integer> idList = new ArrayList<>(ids);
Collections.sort(idList);
Collections.reverse(idList);
ImmutableList.Builder<Version> version = ImmutableList.builder();
for (Integer integer : idList) {
version.add(Version.fromId(integer));
}
SORTED_VERSIONS = version.build();
}
/**
* @return the {@link Version} before the {@link Version#CURRENT}
*/
public static Version getPreviousVersion() {
Version version = SORTED_VERSIONS.get(1);
assert version.before(Version.CURRENT);
return version;
}
/**
* A random {@link Version}.
*
* @return a random {@link Version} from all available versions
*/
public static Version randomVersion() {
return randomVersion(getRandom());
}
/**
* A random {@link Version}.
*
* @param random
* the {@link Random} to use to generate the random version
*
* @return a random {@link Version} from all available versions
*/
public static Version randomVersion(Random random) {
return SORTED_VERSIONS.get(random.nextInt(SORTED_VERSIONS.size()));
}
/**
* Returns immutable list of all known versions.
*/
public static List<Version> allVersions() {
return Collections.unmodifiableList(SORTED_VERSIONS);
}
/**
* A random {@link Version} from <code>minVersion</code> to
* <code>maxVersion</code> (inclusive).
*
* @param minVersion
* the minimum version (inclusive)
* @param maxVersion
* the maximum version (inclusive)
* @return a random {@link Version} from <code>minVersion</code> to
* <code>maxVersion</code> (inclusive)
*/
public static Version randomVersionBetween(Version minVersion, Version maxVersion) {
return randomVersionBetween(getRandom(), minVersion, maxVersion);
}
/**
* A random {@link Version} from <code>minVersion</code> to
* <code>maxVersion</code> (inclusive).
*
* @param random
* the {@link Random} to use to generate the random version
* @param minVersion
* the minimum version (inclusive)
* @param maxVersion
* the maximum version (inclusive)
* @return a random {@link Version} from <code>minVersion</code> to
* <code>maxVersion</code> (inclusive)
*/
public static Version randomVersionBetween(Random random, Version minVersion, Version maxVersion) {
int minVersionIndex = SORTED_VERSIONS.size();
if (minVersion != null) {
minVersionIndex = SORTED_VERSIONS.indexOf(minVersion);
}
int maxVersionIndex = 0;
if (maxVersion != null) {
maxVersionIndex = SORTED_VERSIONS.indexOf(maxVersion);
}
if (minVersionIndex == -1) {
throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist.");
} else if (maxVersionIndex == -1) {
throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist.");
} else {
// minVersionIndex is inclusive so need to add 1 to this index
int range = minVersionIndex + 1 - maxVersionIndex;
return SORTED_VERSIONS.get(maxVersionIndex + random.nextInt(range));
}
}
/**
* Return consistent index settings for the provided index version.
*/
public static ImmutableSettings.Builder settings(Version version) {
ImmutableSettings.Builder builder = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version);
if (version.before(Version.V_2_0_0)) {
builder.put(IndexMetaData.SETTING_LEGACY_ROUTING_HASH_FUNCTION, DjbHashFunction.class);
}
return builder;
}
static final class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
private final Thread.UncaughtExceptionHandler parent;
private final ESLogger logger = Loggers.getLogger(getClass());
private ElasticsearchUncaughtExceptionHandler(Thread.UncaughtExceptionHandler parent) {
this.parent = parent;
}
@Override
public void uncaughtException(Thread t, Throwable e) {
if (e instanceof EsRejectedExecutionException) {
if (e.getMessage().contains(EsAbortPolicy.SHUTTING_DOWN_KEY)) {
return; // ignore the EsRejectedExecutionException when a node shuts down
}
} else if (e instanceof OutOfMemoryError) {
if (e.getMessage().contains("unable to create new native thread")) {
printStackDump(logger);
}
}
parent.uncaughtException(t, e);
}
}
protected static final void printStackDump(ESLogger logger) {
// print stack traces if we can't create any native thread anymore
Map<Thread, StackTraceElement[]> allStackTraces = Thread.getAllStackTraces();
logger.error(formatThreadStacks(allStackTraces));
}
/**
* Dump threads and their current stack trace.
*/
private static String formatThreadStacks(Map<Thread, StackTraceElement[]> threads) {
StringBuilder message = new StringBuilder();
int cnt = 1;
final Formatter f = new Formatter(message, Locale.ENGLISH);
for (Map.Entry<Thread, StackTraceElement[]> e : threads.entrySet()) {
if (e.getKey().isAlive())
f.format(Locale.ENGLISH, "\n %2d) %s", cnt++, threadName(e.getKey())).flush();
if (e.getValue().length == 0) {
message.append("\n at (empty stack)");
} else {
for (StackTraceElement ste : e.getValue()) {
message.append("\n at ").append(ste);
}
}
}
return message.toString();
}
private static String threadName(Thread t) {
return "Thread[" +
"id=" + t.getId() +
", name=" + t.getName() +
", state=" + t.getState() +
", group=" + groupName(t.getThreadGroup()) +
"]";
}
private static String groupName(ThreadGroup threadGroup) {
if (threadGroup == null) {
return "{null group}";
} else {
return threadGroup.getName();
}
}
public static <T> T randomFrom(T... values) {
return RandomizedTest.randomFrom(values);
}
public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull) {
if (allowNull && randomBoolean()) {
return null;
}
String[] array = new String[randomInt(maxArraySize)]; // allow empty arrays
for (int i = 0; i < array.length; i++) {
array[i] = randomAsciiOfLength(maxStringSize);
}
return array;
}
public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize) {
return generateRandomStringArray(maxArraySize, maxStringSize, false);
}
/**
* If a test is annotated with {@link org.elasticsearch.test.ElasticsearchTestCase.CompatibilityVersion}
* all randomized settings will only contain settings or mappings which are compatible with the specified version ID.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
@Ignore
public @interface CompatibilityVersion {
int version();
}
/**
* Returns a global compatibility version that is set via the
* {@value #TESTS_COMPATIBILITY} or {@value #TESTS_BACKWARDS_COMPATIBILITY_VERSION} system property.
* If both are unset the current version is used as the global compatibility version. This
* compatibility version is used for static randomization. For per-suite compatibility version see
* {@link #compatibilityVersion()}
*/
public static Version globalCompatibilityVersion() {
return GLOABL_COMPATIBILITY_VERSION;
}
/**
* Retruns the tests compatibility version.
*/
public Version compatibilityVersion() {
return compatibilityVersion(getClass());
}
private Version compatibilityVersion(Class<?> clazz) {
if (clazz == Object.class || clazz == ElasticsearchIntegrationTest.class) {
return globalCompatibilityVersion();
}
CompatibilityVersion annotation = clazz.getAnnotation(CompatibilityVersion.class);
if (annotation != null) {
return Version.smallest(Version.fromId(annotation.version()), compatibilityVersion(clazz.getSuperclass()));
}
return compatibilityVersion(clazz.getSuperclass());
}
private static String compatibilityVersionProperty() {
final String version = System.getProperty(TESTS_COMPATIBILITY);
if (Strings.hasLength(version)) {
return version;
}
return System.getProperty(TESTS_BACKWARDS_COMPATIBILITY_VERSION);
}
public static boolean terminate(ExecutorService... services) throws InterruptedException {
boolean terminated = true;
for (ExecutorService service : services) {
if (service != null) {
terminated &= ThreadPool.terminate(service, 10, TimeUnit.SECONDS);
}
}
return terminated;
}
public static boolean terminate(ThreadPool service) throws InterruptedException {
return ThreadPool.terminate(service, 10, TimeUnit.SECONDS);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.repartition;
import com.facebook.presto.spi.block.ColumnarArray;
import com.google.common.annotations.VisibleForTesting;
import io.airlift.slice.SliceOutput;
import org.openjdk.jol.info.ClassLayout;
import static com.facebook.presto.array.Arrays.ExpansionFactor.LARGE;
import static com.facebook.presto.array.Arrays.ExpansionOption.PRESERVE;
import static com.facebook.presto.array.Arrays.ensureCapacity;
import static com.facebook.presto.operator.UncheckedByteArrays.setIntUnchecked;
import static io.airlift.slice.SizeOf.SIZE_OF_BYTE;
import static io.airlift.slice.SizeOf.SIZE_OF_INT;
import static io.airlift.slice.SizeOf.sizeOf;
import static java.util.Objects.requireNonNull;
import static sun.misc.Unsafe.ARRAY_INT_INDEX_SCALE;
public class ArrayBlockEncodingBuffer
extends AbstractBlockEncodingBuffer
{
@VisibleForTesting
static final int POSITION_SIZE = SIZE_OF_INT + SIZE_OF_BYTE;
private static final String NAME = "ARRAY";
private static final int INSTANCE_SIZE = ClassLayout.parseClass(ArrayBlockEncodingBuffer.class).instanceSize();
// The buffer for the offsets for all incoming blocks so far
private byte[] offsetsBuffer;
// The address that the next offset value will be written to.
private int offsetsBufferIndex;
// This array holds the condensed offsets for each position for the incoming block.
private int[] offsets;
// The last offset in the offsets buffer
private int lastOffset;
// This array holds the offsets into its nested values block for each row in the ArrayBlock.
private int[] offsetsCopy;
// The AbstractBlockEncodingBuffer for the nested values Block of the ArrayBlock
private final BlockEncodingBuffer valuesBuffers;
public ArrayBlockEncodingBuffer(DecodedBlockNode decodedBlockNode)
{
valuesBuffers = createBlockEncodingBuffers(decodedBlockNode.getChildren().get(0));
}
@Override
public void accumulateSerializedRowSizes(int[] serializedRowSizes)
{
for (int i = 0; i < positionCount; i++) {
serializedRowSizes[i] += POSITION_SIZE;
}
offsetsCopy = ensureCapacity(offsetsCopy, positionCount + 1);
System.arraycopy(offsets, 0, offsetsCopy, 0, positionCount + 1);
((AbstractBlockEncodingBuffer) valuesBuffers).accumulateSerializedRowSizes(offsetsCopy, positionCount, serializedRowSizes);
}
@Override
public void setNextBatch(int positionsOffset, int batchSize)
{
this.positionsOffset = positionsOffset;
this.batchSize = batchSize;
// If all positions for the ArrayBlock to be copied are null, the number of positions to copy for its
// nested values block could be 0. In such case we don't need to proceed.
if (this.positionCount == 0) {
return;
}
int offset = offsets[positionsOffset];
valuesBuffers.setNextBatch(offset, offsets[positionsOffset + batchSize] - offset);
}
@Override
public void appendDataInBatch()
{
if (batchSize == 0) {
return;
}
appendNulls();
appendOffsets();
valuesBuffers.appendDataInBatch();
bufferedPositionCount += batchSize;
}
@Override
public void serializeTo(SliceOutput output)
{
writeLengthPrefixedString(output, NAME);
valuesBuffers.serializeTo(output);
output.writeInt(bufferedPositionCount);
// offsets
output.writeInt(0); // the base position
if (offsetsBufferIndex > 0) {
output.appendBytes(offsetsBuffer, 0, offsetsBufferIndex);
}
serializeNullsTo(output);
}
@Override
public void resetBuffers()
{
bufferedPositionCount = 0;
offsetsBufferIndex = 0;
lastOffset = 0;
resetNullsBuffer();
valuesBuffers.resetBuffers();
}
@Override
public long getRetainedSizeInBytes()
{
return INSTANCE_SIZE +
getPositionsRetainedSizeInBytes() +
sizeOf(offsetsBuffer) +
sizeOf(offsets) +
sizeOf(offsetsCopy) +
getNullsBufferRetainedSizeInBytes();
}
@Override
public long getSerializedSizeInBytes()
{
return NAME.length() + SIZE_OF_INT + // encoding name
valuesBuffers.getSerializedSizeInBytes() + // nested block
SIZE_OF_INT + // positionCount
SIZE_OF_INT + // offset 0. The offsetsBuffer doesn't contain the offset 0 so we need to add it here.
offsetsBufferIndex + // offsets buffer.
getNullsBufferSerializedSizeInBytes(); // nulls
}
@Override
protected void setupDecodedBlockAndMapPositions(DecodedBlockNode decodedBlockNode)
{
requireNonNull(decodedBlockNode, "decodedBlockNode is null");
decodedBlockNode = mapPositionsToNestedBlock(decodedBlockNode);
ColumnarArray columnarArray = (ColumnarArray) decodedBlockNode.getDecodedBlock();
decodedBlock = columnarArray.getNullCheckBlock();
populateNestedPositions(columnarArray);
((AbstractBlockEncodingBuffer) valuesBuffers).setupDecodedBlockAndMapPositions(decodedBlockNode.getChildren().get(0));
}
@Override
protected void accumulateSerializedRowSizes(int[] positionOffsets, int positionCount, int[] serializedRowSizes)
{
if (this.positionCount == 0) {
return;
}
int lastOffset = positionOffsets[0];
for (int i = 0; i < positionCount; i++) {
int offset = positionOffsets[i + 1];
serializedRowSizes[i] += POSITION_SIZE * (offset - lastOffset);
lastOffset = offset;
positionOffsets[i + 1] = offsets[offset];
}
((AbstractBlockEncodingBuffer) valuesBuffers).accumulateSerializedRowSizes(positionOffsets, positionCount, serializedRowSizes);
}
private void populateNestedPositions(ColumnarArray columnarArray)
{
// Reset nested level positions before checking positionCount. Failing to do so may result in valuesBuffers having stale values when positionCount is 0.
((AbstractBlockEncodingBuffer) valuesBuffers).resetPositions();
if (positionCount == 0) {
return;
}
offsets = ensureCapacity(offsets, positionCount + 1);
int[] positions = getPositions();
for (int i = 0; i < positionCount; i++) {
int position = positions[i];
int beginOffset = columnarArray.getOffset(position);
int endOffset = columnarArray.getOffset(position + 1);
int length = endOffset - beginOffset;
offsets[i + 1] = offsets[i] + length;
if (length > 0) {
// beginOffset is the absolute position in the nested block. We need to subtract the base offset from it to get the logical position.
((AbstractBlockEncodingBuffer) valuesBuffers).appendPositionRange(beginOffset, length);
}
}
}
private void appendOffsets()
{
offsetsBuffer = ensureCapacity(offsetsBuffer, offsetsBufferIndex + batchSize * ARRAY_INT_INDEX_SCALE, LARGE, PRESERVE);
int baseOffset = lastOffset - offsets[positionsOffset];
for (int i = positionsOffset; i < positionsOffset + batchSize; i++) {
offsetsBufferIndex = setIntUnchecked(offsetsBuffer, offsetsBufferIndex, offsets[i + 1] + baseOffset);
}
lastOffset = offsets[positionsOffset + batchSize] + baseOffset;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.configuration.HadoopConfiguration;
import org.apache.ignite.hadoop.mapreduce.IgniteHadoopWeightedMapReducePlanner;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters;
import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobTracker;
import org.apache.ignite.internal.processors.hadoop.shuffle.HadoopShuffle;
import org.apache.ignite.internal.processors.hadoop.taskexecutor.HadoopEmbeddedTaskExecutor;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import java.io.IOException;
import java.util.List;
import java.util.ListIterator;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Hadoop processor.
*/
public class HadoopProcessor extends HadoopProcessorAdapter {
/** Class to probe for Hadoop libraries in Ignite classpath. */
private static final String HADOOP_PROBE_CLS = "org.apache.hadoop.conf.Configuration";
/** Job ID counter. */
private final AtomicInteger idCtr = new AtomicInteger();
/** Hadoop context. */
@GridToStringExclude
private HadoopContext hctx;
/** Hadoop facade for public API. */
@GridToStringExclude
private Hadoop hadoop;
/**
* Constructor.
*
* @param ctx Kernal context.
*/
public HadoopProcessor(GridKernalContext ctx) {
super(ctx);
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
if (ctx.isDaemon())
return;
HadoopConfiguration cfg = ctx.config().getHadoopConfiguration();
if (cfg == null)
cfg = new HadoopConfiguration();
else
cfg = new HadoopConfiguration(cfg);
initializeDefaults(cfg);
hctx = new HadoopContext(
ctx,
cfg,
new HadoopJobTracker(),
new HadoopEmbeddedTaskExecutor(),
// TODO: IGNITE-404: Uncomment when fixed.
//cfg.isExternalExecution() ? new HadoopExternalTaskExecutor() : new HadoopEmbeddedTaskExecutor(),
new HadoopShuffle());
for (HadoopComponent c : hctx.components())
c.start(hctx);
hadoop = new HadoopImpl(this);
ctx.addNodeAttribute(HadoopAttributes.NAME, new HadoopAttributes(cfg));
}
/** {@inheritDoc} */
@Override public void onKernalStart(boolean active) throws IgniteCheckedException {
super.onKernalStart(active);
if (hctx == null)
return;
for (HadoopComponent c : hctx.components())
c.onKernalStart();
}
/** {@inheritDoc} */
@Override public void onKernalStop(boolean cancel) {
super.onKernalStop(cancel);
if (hctx == null)
return;
List<HadoopComponent> components = hctx.components();
for (ListIterator<HadoopComponent> it = components.listIterator(components.size()); it.hasPrevious();) {
HadoopComponent c = it.previous();
c.onKernalStop(cancel);
}
}
/** {@inheritDoc} */
@Override public void stop(boolean cancel) throws IgniteCheckedException {
super.stop(cancel);
if (hctx == null)
return;
List<HadoopComponent> components = hctx.components();
for (ListIterator<HadoopComponent> it = components.listIterator(components.size()); it.hasPrevious();) {
HadoopComponent c = it.previous();
c.stop(cancel);
}
}
/**
* Gets Hadoop context.
*
* @return Hadoop context.
*/
public HadoopContext context() {
return hctx;
}
/** {@inheritDoc} */
@Override public Hadoop hadoop() {
if (hadoop == null)
throw new IllegalStateException("Hadoop accelerator is disabled (Hadoop is not in classpath, " +
"is HADOOP_HOME environment variable set?)");
return hadoop;
}
/** {@inheritDoc} */
@Override public HadoopConfiguration config() {
return hctx.configuration();
}
/** {@inheritDoc} */
@Override public HadoopJobId nextJobId() {
return new HadoopJobId(ctx.localNodeId(), idCtr.incrementAndGet());
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> submit(HadoopJobId jobId, HadoopJobInfo jobInfo) {
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader());
try {
return hctx.jobTracker().submit(jobId, jobInfo);
}
finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
/** {@inheritDoc} */
@Override public HadoopJobStatus status(HadoopJobId jobId) throws IgniteCheckedException {
return hctx.jobTracker().status(jobId);
}
/** {@inheritDoc} */
@Override public HadoopCounters counters(HadoopJobId jobId) throws IgniteCheckedException {
return hctx.jobTracker().jobCounters(jobId);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> finishFuture(HadoopJobId jobId) throws IgniteCheckedException {
return hctx.jobTracker().finishFuture(jobId);
}
/** {@inheritDoc} */
@Override public boolean kill(HadoopJobId jobId) throws IgniteCheckedException {
return hctx.jobTracker().killJob(jobId);
}
/** {@inheritDoc} */
@Override public void validateEnvironment() throws IgniteCheckedException {
// Perform some static checks as early as possible, so that any recoverable exceptions are thrown here.
try {
HadoopLocations loc = HadoopClasspathUtils.locations();
if (!F.isEmpty(loc.home()))
U.quietAndInfo(log, HadoopClasspathUtils.HOME + " is set to " + loc.home());
U.quietAndInfo(log, "Resolved Hadoop classpath locations: " + loc.common() + ", " + loc.hdfs() + ", " +
loc.mapred());
}
catch (IOException ioe) {
throw new IgniteCheckedException(ioe.getMessage(), ioe);
}
// Check if Hadoop is in parent class loader classpath.
try {
Class cls = Class.forName(HADOOP_PROBE_CLS, false, getClass().getClassLoader());
try {
String path = cls.getProtectionDomain().getCodeSource().getLocation().toString();
U.warn(log, "Hadoop libraries are found in Ignite classpath, this could lead to class loading " +
"errors (please remove all Hadoop libraries from Ignite classpath) [path=" + path + ']');
}
catch (Throwable ignore) {
U.warn(log, "Hadoop libraries are found in Ignite classpath, this could lead to class loading " +
"errors (please remove all Hadoop libraries from Ignite classpath)");
}
}
catch (Throwable ignore) {
// All is fine.
}
// Try assembling Hadoop URLs.
HadoopClassLoader.hadoopUrls();
}
/**
* Initializes default hadoop configuration.
*
* @param cfg Hadoop configuration.
*/
private void initializeDefaults(HadoopConfiguration cfg) {
if (cfg.getMapReducePlanner() == null)
cfg.setMapReducePlanner(new IgniteHadoopWeightedMapReducePlanner());
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(HadoopProcessor.class, this);
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.core.dossiermgt.model;
import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.lar.StagedModelType;
import com.liferay.portal.kernel.util.ProxyUtil;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.model.BaseModel;
import com.liferay.portal.model.impl.BaseModelImpl;
import com.liferay.portal.util.PortalUtil;
import org.oep.core.dossiermgt.service.ClpSerializer;
import org.oep.core.dossiermgt.service.DocFileLocalServiceUtil;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* @author trungdk
*/
public class DocFileClp extends BaseModelImpl<DocFile> implements DocFile {
public DocFileClp() {
}
@Override
public Class<?> getModelClass() {
return DocFile.class;
}
@Override
public String getModelClassName() {
return DocFile.class.getName();
}
@Override
public long getPrimaryKey() {
return _docFileId;
}
@Override
public void setPrimaryKey(long primaryKey) {
setDocFileId(primaryKey);
}
@Override
public Serializable getPrimaryKeyObj() {
return _docFileId;
}
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj) {
setPrimaryKey(((Long)primaryKeyObj).longValue());
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("uuid", getUuid());
attributes.put("docFileId", getDocFileId());
attributes.put("userId", getUserId());
attributes.put("groupId", getGroupId());
attributes.put("companyId", getCompanyId());
attributes.put("createDate", getCreateDate());
attributes.put("modifiedDate", getModifiedDate());
attributes.put("dossierId", getDossierId());
attributes.put("dossierDocId", getDossierDocId());
attributes.put("docTemplateId", getDocTemplateId());
attributes.put("docFileVersionId", getDocFileVersionId());
attributes.put("docFileName", getDocFileName());
attributes.put("docFileType", getDocFileType());
attributes.put("verifyStatus", getVerifyStatus());
attributes.put("note", getNote());
attributes.put("approveBy", getApproveBy());
attributes.put("approveDate", getApproveDate());
attributes.put("premier", getPremier());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
String uuid = (String)attributes.get("uuid");
if (uuid != null) {
setUuid(uuid);
}
Long docFileId = (Long)attributes.get("docFileId");
if (docFileId != null) {
setDocFileId(docFileId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Long groupId = (Long)attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Date modifiedDate = (Date)attributes.get("modifiedDate");
if (modifiedDate != null) {
setModifiedDate(modifiedDate);
}
Long dossierId = (Long)attributes.get("dossierId");
if (dossierId != null) {
setDossierId(dossierId);
}
Long dossierDocId = (Long)attributes.get("dossierDocId");
if (dossierDocId != null) {
setDossierDocId(dossierDocId);
}
Long docTemplateId = (Long)attributes.get("docTemplateId");
if (docTemplateId != null) {
setDocTemplateId(docTemplateId);
}
Long docFileVersionId = (Long)attributes.get("docFileVersionId");
if (docFileVersionId != null) {
setDocFileVersionId(docFileVersionId);
}
String docFileName = (String)attributes.get("docFileName");
if (docFileName != null) {
setDocFileName(docFileName);
}
Long docFileType = (Long)attributes.get("docFileType");
if (docFileType != null) {
setDocFileType(docFileType);
}
Integer verifyStatus = (Integer)attributes.get("verifyStatus");
if (verifyStatus != null) {
setVerifyStatus(verifyStatus);
}
String note = (String)attributes.get("note");
if (note != null) {
setNote(note);
}
String approveBy = (String)attributes.get("approveBy");
if (approveBy != null) {
setApproveBy(approveBy);
}
Date approveDate = (Date)attributes.get("approveDate");
if (approveDate != null) {
setApproveDate(approveDate);
}
Integer premier = (Integer)attributes.get("premier");
if (premier != null) {
setPremier(premier);
}
}
@Override
public String getUuid() {
return _uuid;
}
@Override
public void setUuid(String uuid) {
_uuid = uuid;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setUuid", String.class);
method.invoke(_docFileRemoteModel, uuid);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getDocFileId() {
return _docFileId;
}
@Override
public void setDocFileId(long docFileId) {
_docFileId = docFileId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setDocFileId", long.class);
method.invoke(_docFileRemoteModel, docFileId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getUserId() {
return _userId;
}
@Override
public void setUserId(long userId) {
_userId = userId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setUserId", long.class);
method.invoke(_docFileRemoteModel, userId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getUserUuid() throws SystemException {
return PortalUtil.getUserValue(getUserId(), "uuid", _userUuid);
}
@Override
public void setUserUuid(String userUuid) {
_userUuid = userUuid;
}
@Override
public long getGroupId() {
return _groupId;
}
@Override
public void setGroupId(long groupId) {
_groupId = groupId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setGroupId", long.class);
method.invoke(_docFileRemoteModel, groupId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getCompanyId() {
return _companyId;
}
@Override
public void setCompanyId(long companyId) {
_companyId = companyId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setCompanyId", long.class);
method.invoke(_docFileRemoteModel, companyId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getCreateDate() {
return _createDate;
}
@Override
public void setCreateDate(Date createDate) {
_createDate = createDate;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setCreateDate", Date.class);
method.invoke(_docFileRemoteModel, createDate);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getModifiedDate() {
return _modifiedDate;
}
@Override
public void setModifiedDate(Date modifiedDate) {
_modifiedDate = modifiedDate;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setModifiedDate", Date.class);
method.invoke(_docFileRemoteModel, modifiedDate);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getDossierId() {
return _dossierId;
}
@Override
public void setDossierId(long dossierId) {
_dossierId = dossierId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setDossierId", long.class);
method.invoke(_docFileRemoteModel, dossierId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getDossierDocId() {
return _dossierDocId;
}
@Override
public void setDossierDocId(long dossierDocId) {
_dossierDocId = dossierDocId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setDossierDocId", long.class);
method.invoke(_docFileRemoteModel, dossierDocId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getDocTemplateId() {
return _docTemplateId;
}
@Override
public void setDocTemplateId(long docTemplateId) {
_docTemplateId = docTemplateId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setDocTemplateId", long.class);
method.invoke(_docFileRemoteModel, docTemplateId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getDocFileVersionId() {
return _docFileVersionId;
}
@Override
public void setDocFileVersionId(long docFileVersionId) {
_docFileVersionId = docFileVersionId;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setDocFileVersionId",
long.class);
method.invoke(_docFileRemoteModel, docFileVersionId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getDocFileName() {
return _docFileName;
}
@Override
public void setDocFileName(String docFileName) {
_docFileName = docFileName;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setDocFileName", String.class);
method.invoke(_docFileRemoteModel, docFileName);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getDocFileType() {
return _docFileType;
}
@Override
public void setDocFileType(long docFileType) {
_docFileType = docFileType;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setDocFileType", long.class);
method.invoke(_docFileRemoteModel, docFileType);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public int getVerifyStatus() {
return _verifyStatus;
}
@Override
public void setVerifyStatus(int verifyStatus) {
_verifyStatus = verifyStatus;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setVerifyStatus", int.class);
method.invoke(_docFileRemoteModel, verifyStatus);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getNote() {
return _note;
}
@Override
public void setNote(String note) {
_note = note;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setNote", String.class);
method.invoke(_docFileRemoteModel, note);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getApproveBy() {
return _approveBy;
}
@Override
public void setApproveBy(String approveBy) {
_approveBy = approveBy;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setApproveBy", String.class);
method.invoke(_docFileRemoteModel, approveBy);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getApproveDate() {
return _approveDate;
}
@Override
public void setApproveDate(Date approveDate) {
_approveDate = approveDate;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setApproveDate", Date.class);
method.invoke(_docFileRemoteModel, approveDate);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public int getPremier() {
return _premier;
}
@Override
public void setPremier(int premier) {
_premier = premier;
if (_docFileRemoteModel != null) {
try {
Class<?> clazz = _docFileRemoteModel.getClass();
Method method = clazz.getMethod("setPremier", int.class);
method.invoke(_docFileRemoteModel, premier);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public StagedModelType getStagedModelType() {
return new StagedModelType(PortalUtil.getClassNameId(
DocFile.class.getName()));
}
public BaseModel<?> getDocFileRemoteModel() {
return _docFileRemoteModel;
}
public void setDocFileRemoteModel(BaseModel<?> docFileRemoteModel) {
_docFileRemoteModel = docFileRemoteModel;
}
public Object invokeOnRemoteModel(String methodName,
Class<?>[] parameterTypes, Object[] parameterValues)
throws Exception {
Object[] remoteParameterValues = new Object[parameterValues.length];
for (int i = 0; i < parameterValues.length; i++) {
if (parameterValues[i] != null) {
remoteParameterValues[i] = ClpSerializer.translateInput(parameterValues[i]);
}
}
Class<?> remoteModelClass = _docFileRemoteModel.getClass();
ClassLoader remoteModelClassLoader = remoteModelClass.getClassLoader();
Class<?>[] remoteParameterTypes = new Class[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
if (parameterTypes[i].isPrimitive()) {
remoteParameterTypes[i] = parameterTypes[i];
}
else {
String parameterTypeName = parameterTypes[i].getName();
remoteParameterTypes[i] = remoteModelClassLoader.loadClass(parameterTypeName);
}
}
Method method = remoteModelClass.getMethod(methodName,
remoteParameterTypes);
Object returnValue = method.invoke(_docFileRemoteModel,
remoteParameterValues);
if (returnValue != null) {
returnValue = ClpSerializer.translateOutput(returnValue);
}
return returnValue;
}
@Override
public void persist() throws SystemException {
if (this.isNew()) {
DocFileLocalServiceUtil.addDocFile(this);
}
else {
DocFileLocalServiceUtil.updateDocFile(this);
}
}
@Override
public DocFile toEscapedModel() {
return (DocFile)ProxyUtil.newProxyInstance(DocFile.class.getClassLoader(),
new Class[] { DocFile.class }, new AutoEscapeBeanHandler(this));
}
@Override
public Object clone() {
DocFileClp clone = new DocFileClp();
clone.setUuid(getUuid());
clone.setDocFileId(getDocFileId());
clone.setUserId(getUserId());
clone.setGroupId(getGroupId());
clone.setCompanyId(getCompanyId());
clone.setCreateDate(getCreateDate());
clone.setModifiedDate(getModifiedDate());
clone.setDossierId(getDossierId());
clone.setDossierDocId(getDossierDocId());
clone.setDocTemplateId(getDocTemplateId());
clone.setDocFileVersionId(getDocFileVersionId());
clone.setDocFileName(getDocFileName());
clone.setDocFileType(getDocFileType());
clone.setVerifyStatus(getVerifyStatus());
clone.setNote(getNote());
clone.setApproveBy(getApproveBy());
clone.setApproveDate(getApproveDate());
clone.setPremier(getPremier());
return clone;
}
@Override
public int compareTo(DocFile docFile) {
long primaryKey = docFile.getPrimaryKey();
if (getPrimaryKey() < primaryKey) {
return -1;
}
else if (getPrimaryKey() > primaryKey) {
return 1;
}
else {
return 0;
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof DocFileClp)) {
return false;
}
DocFileClp docFile = (DocFileClp)obj;
long primaryKey = docFile.getPrimaryKey();
if (getPrimaryKey() == primaryKey) {
return true;
}
else {
return false;
}
}
public Class<?> getClpSerializerClass() {
return _clpSerializerClass;
}
@Override
public int hashCode() {
return (int)getPrimaryKey();
}
@Override
public String toString() {
StringBundler sb = new StringBundler(37);
sb.append("{uuid=");
sb.append(getUuid());
sb.append(", docFileId=");
sb.append(getDocFileId());
sb.append(", userId=");
sb.append(getUserId());
sb.append(", groupId=");
sb.append(getGroupId());
sb.append(", companyId=");
sb.append(getCompanyId());
sb.append(", createDate=");
sb.append(getCreateDate());
sb.append(", modifiedDate=");
sb.append(getModifiedDate());
sb.append(", dossierId=");
sb.append(getDossierId());
sb.append(", dossierDocId=");
sb.append(getDossierDocId());
sb.append(", docTemplateId=");
sb.append(getDocTemplateId());
sb.append(", docFileVersionId=");
sb.append(getDocFileVersionId());
sb.append(", docFileName=");
sb.append(getDocFileName());
sb.append(", docFileType=");
sb.append(getDocFileType());
sb.append(", verifyStatus=");
sb.append(getVerifyStatus());
sb.append(", note=");
sb.append(getNote());
sb.append(", approveBy=");
sb.append(getApproveBy());
sb.append(", approveDate=");
sb.append(getApproveDate());
sb.append(", premier=");
sb.append(getPremier());
sb.append("}");
return sb.toString();
}
@Override
public String toXmlString() {
StringBundler sb = new StringBundler(58);
sb.append("<model><model-name>");
sb.append("org.oep.core.dossiermgt.model.DocFile");
sb.append("</model-name>");
sb.append(
"<column><column-name>uuid</column-name><column-value><![CDATA[");
sb.append(getUuid());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>docFileId</column-name><column-value><![CDATA[");
sb.append(getDocFileId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userId</column-name><column-value><![CDATA[");
sb.append(getUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>groupId</column-name><column-value><![CDATA[");
sb.append(getGroupId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>companyId</column-name><column-value><![CDATA[");
sb.append(getCompanyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>createDate</column-name><column-value><![CDATA[");
sb.append(getCreateDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>modifiedDate</column-name><column-value><![CDATA[");
sb.append(getModifiedDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>dossierId</column-name><column-value><![CDATA[");
sb.append(getDossierId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>dossierDocId</column-name><column-value><![CDATA[");
sb.append(getDossierDocId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>docTemplateId</column-name><column-value><![CDATA[");
sb.append(getDocTemplateId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>docFileVersionId</column-name><column-value><![CDATA[");
sb.append(getDocFileVersionId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>docFileName</column-name><column-value><![CDATA[");
sb.append(getDocFileName());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>docFileType</column-name><column-value><![CDATA[");
sb.append(getDocFileType());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>verifyStatus</column-name><column-value><![CDATA[");
sb.append(getVerifyStatus());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>note</column-name><column-value><![CDATA[");
sb.append(getNote());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>approveBy</column-name><column-value><![CDATA[");
sb.append(getApproveBy());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>approveDate</column-name><column-value><![CDATA[");
sb.append(getApproveDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>premier</column-name><column-value><![CDATA[");
sb.append(getPremier());
sb.append("]]></column-value></column>");
sb.append("</model>");
return sb.toString();
}
private String _uuid;
private long _docFileId;
private long _userId;
private String _userUuid;
private long _groupId;
private long _companyId;
private Date _createDate;
private Date _modifiedDate;
private long _dossierId;
private long _dossierDocId;
private long _docTemplateId;
private long _docFileVersionId;
private String _docFileName;
private long _docFileType;
private int _verifyStatus;
private String _note;
private String _approveBy;
private Date _approveDate;
private int _premier;
private BaseModel<?> _docFileRemoteModel;
private Class<?> _clpSerializerClass = org.oep.core.dossiermgt.service.ClpSerializer.class;
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.sql.impl.connector.kafka;
import com.hazelcast.jet.core.DAG;
import com.hazelcast.jet.core.Vertex;
import com.hazelcast.jet.kafka.KafkaProcessors;
import com.hazelcast.jet.sql.impl.connector.SqlConnector;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvMetadata;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvMetadataAvroResolver;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvMetadataJavaResolver;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvMetadataJsonResolver;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvMetadataNullResolver;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvMetadataResolver;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvMetadataResolvers;
import com.hazelcast.jet.sql.impl.connector.keyvalue.KvProcessors;
import com.hazelcast.jet.sql.impl.schema.MappingField;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.sql.impl.expression.Expression;
import com.hazelcast.sql.impl.schema.ConstantTableStatistics;
import com.hazelcast.sql.impl.schema.Table;
import com.hazelcast.sql.impl.schema.TableField;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static com.hazelcast.jet.Util.entry;
import static com.hazelcast.jet.core.Edge.between;
import static com.hazelcast.jet.core.EventTimePolicy.noEventTime;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Stream.concat;
public class KafkaSqlConnector implements SqlConnector {
public static final String TYPE_NAME = "Kafka";
private final KvMetadataResolvers metadataResolvers;
public KafkaSqlConnector() {
this.metadataResolvers = new KvMetadataResolvers(
new KvMetadataResolver[]{
KvMetadataNullResolver.INSTANCE,
KvMetadataJavaResolver.INSTANCE,
KvMetadataJsonResolver.INSTANCE,
KvMetadataAvroResolver.INSTANCE
},
new KvMetadataResolver[]{
KvMetadataJavaResolver.INSTANCE,
KvMetadataJsonResolver.INSTANCE,
KvMetadataAvroResolver.INSTANCE
}
);
}
@Override
public String typeName() {
return TYPE_NAME;
}
@Override
public boolean isStream() {
return true;
}
@Nonnull @Override
public List<MappingField> resolveAndValidateFields(
@Nonnull NodeEngine nodeEngine,
@Nonnull Map<String, String> options,
@Nonnull List<MappingField> userFields
) {
return metadataResolvers.resolveAndValidateFields(userFields, options, nodeEngine);
}
@Nonnull @Override
public Table createTable(
@Nonnull NodeEngine nodeEngine,
@Nonnull String schemaName,
@Nonnull String tableName,
@Nonnull Map<String, String> options,
@Nonnull List<MappingField> resolvedFields
) {
String topicName = options.getOrDefault(OPTION_OBJECT_NAME, tableName);
KvMetadata keyMetadata = metadataResolvers.resolveMetadata(true, resolvedFields, options, null);
KvMetadata valueMetadata = metadataResolvers.resolveMetadata(false, resolvedFields, options, null);
List<TableField> fields = concat(keyMetadata.getFields().stream(), valueMetadata.getFields().stream())
.collect(toList());
return new KafkaTable(
this,
schemaName,
tableName,
fields,
new ConstantTableStatistics(0),
topicName,
options,
keyMetadata.getQueryTargetDescriptor(),
keyMetadata.getUpsertTargetDescriptor(),
valueMetadata.getQueryTargetDescriptor(),
valueMetadata.getUpsertTargetDescriptor()
);
}
@Override
public boolean supportsFullScanReader() {
return true;
}
@Nonnull @Override
public Vertex fullScanReader(
@Nonnull DAG dag,
@Nonnull Table table0,
@Nullable Expression<Boolean> predicate,
@Nonnull List<Expression<?>> projections
) {
KafkaTable table = (KafkaTable) table0;
Vertex vStart = dag.newVertex(
table.toString(),
KafkaProcessors.streamKafkaP(
table.kafkaConsumerProperties(),
record -> entry(record.key(), record.value()),
noEventTime(),
table.topicName()
)
);
Vertex vEnd = dag.newVertex(
"Project(" + table.toString() + ")",
KvProcessors.rowProjector(
table.paths(),
table.types(),
table.keyQueryDescriptor(),
table.valueQueryDescriptor(),
predicate,
projections
)
);
dag.edge(between(vStart, vEnd).isolated());
return vEnd;
}
@Override
public boolean supportsSink() {
return true;
}
@Override
public boolean supportsInsert() {
return true;
}
@Nonnull @Override
public Vertex sink(
@Nonnull DAG dag,
@Nonnull Table table0
) {
KafkaTable table = (KafkaTable) table0;
Vertex vStart = dag.newVertex(
"Project(" + table.toString() + ")",
KvProcessors.entryProjector(
table.paths(),
table.types(),
table.keyUpsertDescriptor(),
table.valueUpsertDescriptor()
)
);
Vertex vEnd = dag.newVertex(
table.toString(),
KafkaProcessors.<Entry<Object, Object>, Object, Object>writeKafkaP(
table.kafkaProducerProperties(),
table.topicName(),
Entry::getKey,
Entry::getValue,
true
)
);
dag.edge(between(vStart, vEnd));
return vStart;
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth2.dcr.endpoint.impl;
import org.mockito.Mock;
import org.osgi.framework.BundleContext;
import org.osgi.util.tracker.ServiceTracker;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.testng.PowerMockTestCase;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.wso2.carbon.base.MultitenantConstants;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.context.internal.OSGiDataHolder;
import org.wso2.carbon.identity.application.common.IdentityApplicationManagementException;
import org.wso2.carbon.identity.application.common.model.ServiceProvider;
import org.wso2.carbon.identity.application.mgt.ApplicationManagementService;
import org.wso2.carbon.identity.oauth.OAuthAdminService;
import org.wso2.carbon.identity.oauth.dcr.exception.DCRMException;
import org.wso2.carbon.identity.oauth.dcr.internal.DCRDataHolder;
import org.wso2.carbon.identity.oauth.dcr.service.DCRMService;
import org.wso2.carbon.identity.oauth2.dcr.endpoint.Exceptions.DCRMEndpointException;
import org.wso2.carbon.identity.oauth2.dcr.endpoint.TestUtil;
import org.wso2.carbon.identity.oauth2.dcr.endpoint.dto.RegistrationRequestDTO;
import org.wso2.carbon.identity.oauth2.dcr.endpoint.dto.UpdateRequestDTO;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.List;
import static org.mockito.Matchers.any;
import static org.powermock.api.mockito.PowerMockito.mockStatic;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.powermock.api.mockito.PowerMockito.whenNew;
import static org.testng.Assert.assertEquals;
@PrepareForTest({BundleContext.class, ServiceTracker.class, PrivilegedCarbonContext.class, DCRDataHolder.class, ApplicationManagementService.class, ServiceProvider.class, OAuthAdminService.class})
public class RegisterApiServiceImplExceptionTest extends PowerMockTestCase {
private RegisterApiServiceImpl registerApiService = null;
private DCRMService dcrmService = new DCRMService();
@Mock
BundleContext bundleContext;
@Mock
ServiceTracker serviceTracker ;
@Mock
DCRDataHolder dataHolder;
@Mock
ApplicationManagementService applicationManagementService ;
@Mock
ServiceProvider serviceProvider;
@Mock
OAuthAdminService oAuthAdminService;
@BeforeMethod
public void setUp() throws Exception {
//Initializing variables
registerApiService = new RegisterApiServiceImpl();
//Get OSGIservice by starting the tenant flow.
whenNew(ServiceTracker.class).withAnyArguments().thenReturn(serviceTracker);
TestUtil.startTenantFlow(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME);
Object[] services = new Object[1];
services[0] = dcrmService;
when(serviceTracker.getServices()).thenReturn(services);
OSGiDataHolder.getInstance().setBundleContext(bundleContext);
}
@Test
public void testDeleteApplicationClientException() throws Exception {
try {
registerApiService.deleteApplication("");
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testDeleteApplicationThrowableException() throws DCRMException {
//Test for invalid client id.
try {
registerApiService.deleteApplication("ClientIDInvalid");
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(),Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testGetApplicationClientException() throws Exception {
try {
registerApiService.getApplication("");
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testGetApplicationThrowableException() throws DCRMException {
//Test for invalid client id.
try {
registerApiService.getApplication("N2QqQluzQuL5X6CtM3KZwqzLQyyy");
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(),Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testRegisterApplicationClientException() throws DCRMException {
List<String> granttypes = new ArrayList<>();
granttypes.add("code");
List<String> redirectUris = new ArrayList<>();
redirectUris.add("https://op.certification.openid.net:60845/authz_cb");
RegistrationRequestDTO registrationRequestDTO = new RegistrationRequestDTO();
registrationRequestDTO.setClientName("Test App");
registrationRequestDTO.setGrantTypes(granttypes);
registrationRequestDTO.setRedirectUris(redirectUris);
mockStatic(DCRDataHolder.class);
when(DCRDataHolder.getInstance()).thenReturn(dataHolder);
when(dataHolder.getApplicationManagementService()).thenReturn( applicationManagementService);
try {
registerApiService.registerApplication(registrationRequestDTO);
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(),Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testRegisterApplicationServerException() throws DCRMException, IdentityApplicationManagementException {
List<String> granttypes = new ArrayList<>();
granttypes.add("code");
List<String> redirectUris = new ArrayList<>();
redirectUris.add("https://op.certification.openid.net:60845/authz_cb");
RegistrationRequestDTO registrationRequestDTO = new RegistrationRequestDTO();
registrationRequestDTO.setClientName("Test App");
registrationRequestDTO.setGrantTypes(granttypes);
registrationRequestDTO.setRedirectUris(redirectUris);
mockStatic(DCRDataHolder.class);
when(DCRDataHolder.getInstance()).thenReturn(dataHolder);
when(dataHolder.getApplicationManagementService()).thenReturn( applicationManagementService);
when(applicationManagementService.getServiceProvider(any(String.class),any(String.class))).
thenThrow(new IdentityApplicationManagementException("execption"));
try {
registerApiService.registerApplication(registrationRequestDTO);
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(),Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testRegisterApplicationThrowableException() throws DCRMException {
//Test for invalid client id.
RegistrationRequestDTO registrationRequestDTO = new RegistrationRequestDTO();
registrationRequestDTO.setClientName("");
try {
registerApiService.registerApplication(registrationRequestDTO);
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(),Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testUpdateApplicationClientException() throws DCRMException {
List<String> granttypes = new ArrayList<>();
granttypes.add("code");
List<String> redirectUris = new ArrayList<>();
redirectUris.add("https://op.certification.openid.net:60845/authz_cb");
UpdateRequestDTO updateRequestDTO = new UpdateRequestDTO();
updateRequestDTO.setClientName("Test App");
updateRequestDTO.setGrantTypes(granttypes);
updateRequestDTO.setRedirectUris(redirectUris);
mockStatic(DCRDataHolder.class);
when(DCRDataHolder.getInstance()).thenReturn(dataHolder);
when(dataHolder.getApplicationManagementService()).thenReturn( applicationManagementService);
//Test when clientID is null
try {
registerApiService.updateApplication(updateRequestDTO,"");
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(),Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
@Test
public void testUpdateApplicationThrowableException() throws DCRMException {
//Test for invalid client id.
UpdateRequestDTO updateRequestDTO = new UpdateRequestDTO();
updateRequestDTO.setClientName("");
try {
registerApiService.updateApplication(updateRequestDTO, "ClientID");
} catch (DCRMEndpointException e){
assertEquals(e.getResponse().getStatus(),Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.TruncateOp;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormat.renameReservedPathsOnUpgrade;
import static org.apache.hadoop.util.Time.monotonicNow;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
import org.apache.hadoop.hdfs.protocol.LayoutVersion;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstructionContiguous;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.RollingUpgradeStartupOption;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.AddBlockOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.AddCacheDirectiveInfoOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.AddCachePoolOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.AddCloseOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.AllocateBlockIdOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.AllowSnapshotOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.AppendOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.BlockListUpdatingOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.CancelDelegationTokenOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.ClearNSQuotaOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.ConcatDeleteOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.CreateSnapshotOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.DeleteOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.DeleteSnapshotOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.DisallowSnapshotOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.GetDelegationTokenOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.MkdirOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.ModifyCacheDirectiveInfoOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.ModifyCachePoolOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.ReassignLeaseOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RemoveCacheDirectiveInfoOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RemoveCachePoolOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RemoveXAttrOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RenameOldOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RenameOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RenameSnapshotOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RenewDelegationTokenOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RollingUpgradeOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetAclOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetGenstampV1Op;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetGenstampV2Op;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetNSQuotaOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetOwnerOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetPermissionsOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetQuotaOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetReplicationOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetStoragePolicyOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetXAttrOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SymlinkOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.TimesOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.UpdateBlocksOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.UpdateMasterKeyOp;
import org.apache.hadoop.hdfs.server.namenode.INode.BlocksMapUpdateInfo;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager.Lease;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile;
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress.Counter;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
import org.apache.hadoop.hdfs.util.Holder;
import org.apache.hadoop.util.ChunkedArrayList;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class FSEditLogLoader {
static final Log LOG = LogFactory.getLog(FSEditLogLoader.class.getName());
static final long REPLAY_TRANSACTION_LOG_INTERVAL = 1000; // 1sec
private final FSNamesystem fsNamesys;
private long lastAppliedTxId;
/** Total number of end transactions loaded. */
private int totalEdits = 0;
public FSEditLogLoader(FSNamesystem fsNamesys, long lastAppliedTxId) {
this.fsNamesys = fsNamesys;
this.lastAppliedTxId = lastAppliedTxId;
}
long loadFSEdits(EditLogInputStream edits, long expectedStartingTxId)
throws IOException {
return loadFSEdits(edits, expectedStartingTxId, null, null);
}
/**
* Load an edit log, and apply the changes to the in-memory structure
* This is where we apply edits that we've been writing to disk all
* along.
*/
long loadFSEdits(EditLogInputStream edits, long expectedStartingTxId,
StartupOption startOpt, MetaRecoveryContext recovery) throws IOException {
StartupProgress prog = NameNode.getStartupProgress();
Step step = createStartupProgressStep(edits);
prog.beginStep(Phase.LOADING_EDITS, step);
fsNamesys.writeLock();
try {
long startTime = monotonicNow();
FSImage.LOG.info("Start loading edits file " + edits.getName());
long numEdits = loadEditRecords(edits, false, expectedStartingTxId,
startOpt, recovery);
FSImage.LOG.info("Edits file " + edits.getName()
+ " of size " + edits.length() + " edits # " + numEdits
+ " loaded in " + (monotonicNow()-startTime)/1000 + " seconds");
return numEdits;
} finally {
edits.close();
fsNamesys.writeUnlock();
prog.endStep(Phase.LOADING_EDITS, step);
}
}
long loadEditRecords(EditLogInputStream in, boolean closeOnExit,
long expectedStartingTxId, StartupOption startOpt,
MetaRecoveryContext recovery) throws IOException {
FSDirectory fsDir = fsNamesys.dir;
EnumMap<FSEditLogOpCodes, Holder<Integer>> opCounts =
new EnumMap<FSEditLogOpCodes, Holder<Integer>>(FSEditLogOpCodes.class);
if (LOG.isTraceEnabled()) {
LOG.trace("Acquiring write lock to replay edit log");
}
fsNamesys.writeLock();
fsDir.writeLock();
long recentOpcodeOffsets[] = new long[4];
Arrays.fill(recentOpcodeOffsets, -1);
long expectedTxId = expectedStartingTxId;
long numEdits = 0;
long lastTxId = in.getLastTxId();
long numTxns = (lastTxId - expectedStartingTxId) + 1;
StartupProgress prog = NameNode.getStartupProgress();
Step step = createStartupProgressStep(in);
prog.setTotal(Phase.LOADING_EDITS, step, numTxns);
Counter counter = prog.getCounter(Phase.LOADING_EDITS, step);
long lastLogTime = monotonicNow();
long lastInodeId = fsNamesys.dir.getLastInodeId();
try {
while (true) {
try {
FSEditLogOp op;
try {
op = in.readOp();
if (op == null) {
break;
}
} catch (Throwable e) {
// Handle a problem with our input
check203UpgradeFailure(in.getVersion(true), e);
String errorMessage =
formatEditLogReplayError(in, recentOpcodeOffsets, expectedTxId);
FSImage.LOG.error(errorMessage, e);
if (recovery == null) {
// We will only try to skip over problematic opcodes when in
// recovery mode.
throw new EditLogInputException(errorMessage, e, numEdits);
}
MetaRecoveryContext.editLogLoaderPrompt(
"We failed to read txId " + expectedTxId,
recovery, "skipping the bad section in the log");
in.resync();
continue;
}
recentOpcodeOffsets[(int)(numEdits % recentOpcodeOffsets.length)] =
in.getPosition();
if (op.hasTransactionId()) {
if (op.getTransactionId() > expectedTxId) {
MetaRecoveryContext.editLogLoaderPrompt("There appears " +
"to be a gap in the edit log. We expected txid " +
expectedTxId + ", but got txid " +
op.getTransactionId() + ".", recovery, "ignoring missing " +
" transaction IDs");
} else if (op.getTransactionId() < expectedTxId) {
MetaRecoveryContext.editLogLoaderPrompt("There appears " +
"to be an out-of-order edit in the edit log. We " +
"expected txid " + expectedTxId + ", but got txid " +
op.getTransactionId() + ".", recovery,
"skipping the out-of-order edit");
continue;
}
}
try {
if (LOG.isTraceEnabled()) {
LOG.trace("op=" + op + ", startOpt=" + startOpt
+ ", numEdits=" + numEdits + ", totalEdits=" + totalEdits);
}
long inodeId = applyEditLogOp(op, fsDir, startOpt,
in.getVersion(true), lastInodeId);
if (lastInodeId < inodeId) {
lastInodeId = inodeId;
}
} catch (RollingUpgradeOp.RollbackException e) {
throw e;
} catch (Throwable e) {
LOG.error("Encountered exception on operation " + op, e);
if (recovery == null) {
throw e instanceof IOException? (IOException)e: new IOException(e);
}
MetaRecoveryContext.editLogLoaderPrompt("Failed to " +
"apply edit log operation " + op + ": error " +
e.getMessage(), recovery, "applying edits");
}
// Now that the operation has been successfully decoded and
// applied, update our bookkeeping.
incrOpCount(op.opCode, opCounts, step, counter);
if (op.hasTransactionId()) {
lastAppliedTxId = op.getTransactionId();
expectedTxId = lastAppliedTxId + 1;
} else {
expectedTxId = lastAppliedTxId = expectedStartingTxId;
}
// log progress
if (op.hasTransactionId()) {
long now = monotonicNow();
if (now - lastLogTime > REPLAY_TRANSACTION_LOG_INTERVAL) {
long deltaTxId = lastAppliedTxId - expectedStartingTxId + 1;
int percent = Math.round((float) deltaTxId / numTxns * 100);
LOG.info("replaying edit log: " + deltaTxId + "/" + numTxns
+ " transactions completed. (" + percent + "%)");
lastLogTime = now;
}
}
numEdits++;
totalEdits++;
} catch (RollingUpgradeOp.RollbackException e) {
LOG.info("Stopped at OP_START_ROLLING_UPGRADE for rollback.");
break;
} catch (MetaRecoveryContext.RequestStopException e) {
MetaRecoveryContext.LOG.warn("Stopped reading edit log at " +
in.getPosition() + "/" + in.length());
break;
}
}
} finally {
fsNamesys.dir.resetLastInodeId(lastInodeId);
if(closeOnExit) {
in.close();
}
fsDir.writeUnlock();
fsNamesys.writeUnlock();
if (LOG.isTraceEnabled()) {
LOG.trace("replaying edit log finished");
}
if (FSImage.LOG.isDebugEnabled()) {
dumpOpCounts(opCounts);
}
}
return numEdits;
}
// allocate and update last allocated inode id
private long getAndUpdateLastInodeId(long inodeIdFromOp, int logVersion,
long lastInodeId) throws IOException {
long inodeId = inodeIdFromOp;
if (inodeId == HdfsConstants.GRANDFATHER_INODE_ID) {
if (NameNodeLayoutVersion.supports(
LayoutVersion.Feature.ADD_INODE_ID, logVersion)) {
throw new IOException("The layout version " + logVersion
+ " supports inodeId but gave bogus inodeId");
}
inodeId = fsNamesys.dir.allocateNewInodeId();
} else {
// need to reset lastInodeId. fsnamesys gets lastInodeId firstly from
// fsimage but editlog captures more recent inodeId allocations
if (inodeId > lastInodeId) {
fsNamesys.dir.resetLastInodeId(inodeId);
}
}
return inodeId;
}
@SuppressWarnings("deprecation")
private long applyEditLogOp(FSEditLogOp op, FSDirectory fsDir,
StartupOption startOpt, int logVersion, long lastInodeId) throws IOException {
long inodeId = HdfsConstants.GRANDFATHER_INODE_ID;
if (LOG.isTraceEnabled()) {
LOG.trace("replaying edit log: " + op);
}
final boolean toAddRetryCache = fsNamesys.hasRetryCache() && op.hasRpcIds();
switch (op.opCode) {
case OP_ADD: {
AddCloseOp addCloseOp = (AddCloseOp)op;
final String path =
renameReservedPathsOnUpgrade(addCloseOp.path, logVersion);
if (FSNamesystem.LOG.isDebugEnabled()) {
FSNamesystem.LOG.debug(op.opCode + ": " + path +
" numblocks : " + addCloseOp.blocks.length +
" clientHolder " + addCloseOp.clientName +
" clientMachine " + addCloseOp.clientMachine);
}
// There are 3 cases here:
// 1. OP_ADD to create a new file
// 2. OP_ADD to update file blocks
// 3. OP_ADD to open file for append (old append)
// See if the file already exists (persistBlocks call)
INodesInPath iip = fsDir.getINodesInPath(path, true);
INodeFile oldFile = INodeFile.valueOf(iip.getLastINode(), path, true);
if (oldFile != null && addCloseOp.overwrite) {
// This is OP_ADD with overwrite
FSDirDeleteOp.deleteForEditLog(fsDir, path, addCloseOp.mtime);
iip = INodesInPath.replace(iip, iip.length() - 1, null);
oldFile = null;
}
INodeFile newFile = oldFile;
if (oldFile == null) { // this is OP_ADD on a new file (case 1)
// versions > 0 support per file replication
// get name and replication
final short replication = fsNamesys.getBlockManager()
.adjustReplication(addCloseOp.replication);
assert addCloseOp.blocks.length == 0;
// add to the file tree
inodeId = getAndUpdateLastInodeId(addCloseOp.inodeId, logVersion, lastInodeId);
newFile = FSDirWriteFileOp.addFileForEditLog(fsDir, inodeId,
iip.getExistingINodes(), iip.getLastLocalName(),
addCloseOp.permissions, addCloseOp.aclEntries,
addCloseOp.xAttrs, replication, addCloseOp.mtime,
addCloseOp.atime, addCloseOp.blockSize, true,
addCloseOp.clientName, addCloseOp.clientMachine,
addCloseOp.storagePolicyId);
iip = INodesInPath.replace(iip, iip.length() - 1, newFile);
fsNamesys.leaseManager.addLease(addCloseOp.clientName, newFile.getId());
// add the op into retry cache if necessary
if (toAddRetryCache) {
HdfsFileStatus stat = FSDirStatAndListingOp.createFileStatusForEditLog(
fsNamesys.dir, path, HdfsFileStatus.EMPTY_NAME, newFile,
HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID,
false, iip);
fsNamesys.addCacheEntryWithPayload(addCloseOp.rpcClientId,
addCloseOp.rpcCallId, stat);
}
} else { // This is OP_ADD on an existing file (old append)
if (!oldFile.isUnderConstruction()) {
// This is case 3: a call to append() on an already-closed file.
if (FSNamesystem.LOG.isDebugEnabled()) {
FSNamesystem.LOG.debug("Reopening an already-closed file " +
"for append");
}
LocatedBlock lb = fsNamesys.prepareFileForAppend(path, iip,
addCloseOp.clientName, addCloseOp.clientMachine, false, false,
false);
// add the op into retry cache if necessary
if (toAddRetryCache) {
HdfsFileStatus stat = FSDirStatAndListingOp.createFileStatusForEditLog(
fsNamesys.dir, path,
HdfsFileStatus.EMPTY_NAME, newFile,
HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED,
Snapshot.CURRENT_STATE_ID, false, iip);
fsNamesys.addCacheEntryWithPayload(addCloseOp.rpcClientId,
addCloseOp.rpcCallId, new LastBlockWithStatus(lb, stat));
}
}
}
// Fall-through for case 2.
// Regardless of whether it's a new file or an updated file,
// update the block list.
// Update the salient file attributes.
newFile.setAccessTime(addCloseOp.atime, Snapshot.CURRENT_STATE_ID);
newFile.setModificationTime(addCloseOp.mtime, Snapshot.CURRENT_STATE_ID);
updateBlocks(fsDir, addCloseOp, iip, newFile);
break;
}
case OP_CLOSE: {
AddCloseOp addCloseOp = (AddCloseOp)op;
final String path =
renameReservedPathsOnUpgrade(addCloseOp.path, logVersion);
if (FSNamesystem.LOG.isDebugEnabled()) {
FSNamesystem.LOG.debug(op.opCode + ": " + path +
" numblocks : " + addCloseOp.blocks.length +
" clientHolder " + addCloseOp.clientName +
" clientMachine " + addCloseOp.clientMachine);
}
final INodesInPath iip = fsDir.getINodesInPath(path, true);
final INodeFile file = INodeFile.valueOf(iip.getLastINode(), path);
// Update the salient file attributes.
file.setAccessTime(addCloseOp.atime, Snapshot.CURRENT_STATE_ID);
file.setModificationTime(addCloseOp.mtime, Snapshot.CURRENT_STATE_ID);
updateBlocks(fsDir, addCloseOp, iip, file);
// Now close the file
if (!file.isUnderConstruction() &&
logVersion <= LayoutVersion.BUGFIX_HDFS_2991_VERSION) {
// There was a bug (HDFS-2991) in hadoop < 0.23.1 where OP_CLOSE
// could show up twice in a row. But after that version, this
// should be fixed, so we should treat it as an error.
throw new IOException(
"File is not under construction: " + path);
}
// One might expect that you could use removeLease(holder, path) here,
// but OP_CLOSE doesn't serialize the holder. So, remove the inode.
if (file.isUnderConstruction()) {
fsNamesys.leaseManager.removeLeases(Lists.newArrayList(file.getId()));
file.toCompleteFile(file.getModificationTime());
}
break;
}
case OP_APPEND: {
AppendOp appendOp = (AppendOp) op;
final String path = renameReservedPathsOnUpgrade(appendOp.path,
logVersion);
if (FSNamesystem.LOG.isDebugEnabled()) {
FSNamesystem.LOG.debug(op.opCode + ": " + path +
" clientName " + appendOp.clientName +
" clientMachine " + appendOp.clientMachine +
" newBlock " + appendOp.newBlock);
}
INodesInPath iip = fsDir.getINodesInPath4Write(path);
INodeFile file = INodeFile.valueOf(iip.getLastINode(), path);
if (!file.isUnderConstruction()) {
LocatedBlock lb = fsNamesys.prepareFileForAppend(path, iip,
appendOp.clientName, appendOp.clientMachine, appendOp.newBlock,
false, false);
// add the op into retry cache if necessary
if (toAddRetryCache) {
HdfsFileStatus stat = FSDirStatAndListingOp.createFileStatusForEditLog(
fsNamesys.dir, path, HdfsFileStatus.EMPTY_NAME, file,
HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED,
Snapshot.CURRENT_STATE_ID, false, iip);
fsNamesys.addCacheEntryWithPayload(appendOp.rpcClientId,
appendOp.rpcCallId, new LastBlockWithStatus(lb, stat));
}
}
break;
}
case OP_UPDATE_BLOCKS: {
UpdateBlocksOp updateOp = (UpdateBlocksOp)op;
final String path =
renameReservedPathsOnUpgrade(updateOp.path, logVersion);
if (FSNamesystem.LOG.isDebugEnabled()) {
FSNamesystem.LOG.debug(op.opCode + ": " + path +
" numblocks : " + updateOp.blocks.length);
}
INodesInPath iip = fsDir.getINodesInPath(path, true);
INodeFile oldFile = INodeFile.valueOf(iip.getLastINode(), path);
// Update in-memory data structures
updateBlocks(fsDir, updateOp, iip, oldFile);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(updateOp.rpcClientId, updateOp.rpcCallId);
}
break;
}
case OP_ADD_BLOCK: {
AddBlockOp addBlockOp = (AddBlockOp) op;
String path = renameReservedPathsOnUpgrade(addBlockOp.getPath(), logVersion);
if (FSNamesystem.LOG.isDebugEnabled()) {
FSNamesystem.LOG.debug(op.opCode + ": " + path +
" new block id : " + addBlockOp.getLastBlock().getBlockId());
}
INodeFile oldFile = INodeFile.valueOf(fsDir.getINode(path), path);
// add the new block to the INodeFile
addNewBlock(fsDir, addBlockOp, oldFile);
break;
}
case OP_SET_REPLICATION: {
SetReplicationOp setReplicationOp = (SetReplicationOp)op;
short replication = fsNamesys.getBlockManager().adjustReplication(
setReplicationOp.replication);
FSDirAttrOp.unprotectedSetReplication(fsDir, renameReservedPathsOnUpgrade(
setReplicationOp.path, logVersion), replication, null);
break;
}
case OP_CONCAT_DELETE: {
ConcatDeleteOp concatDeleteOp = (ConcatDeleteOp)op;
String trg = renameReservedPathsOnUpgrade(concatDeleteOp.trg, logVersion);
String[] srcs = new String[concatDeleteOp.srcs.length];
for (int i=0; i<srcs.length; i++) {
srcs[i] =
renameReservedPathsOnUpgrade(concatDeleteOp.srcs[i], logVersion);
}
INodesInPath targetIIP = fsDir.getINodesInPath4Write(trg);
INodeFile[] srcFiles = new INodeFile[srcs.length];
for (int i = 0; i < srcs.length; i++) {
INodesInPath srcIIP = fsDir.getINodesInPath4Write(srcs[i]);
srcFiles[i] = srcIIP.getLastINode().asFile();
}
FSDirConcatOp.unprotectedConcat(fsDir, targetIIP, srcFiles,
concatDeleteOp.timestamp);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(concatDeleteOp.rpcClientId,
concatDeleteOp.rpcCallId);
}
break;
}
case OP_RENAME_OLD: {
RenameOldOp renameOp = (RenameOldOp)op;
final String src = renameReservedPathsOnUpgrade(renameOp.src, logVersion);
final String dst = renameReservedPathsOnUpgrade(renameOp.dst, logVersion);
FSDirRenameOp.renameForEditLog(fsDir, src, dst, renameOp.timestamp);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(renameOp.rpcClientId, renameOp.rpcCallId);
}
break;
}
case OP_DELETE: {
DeleteOp deleteOp = (DeleteOp)op;
FSDirDeleteOp.deleteForEditLog(
fsDir, renameReservedPathsOnUpgrade(deleteOp.path, logVersion),
deleteOp.timestamp);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(deleteOp.rpcClientId, deleteOp.rpcCallId);
}
break;
}
case OP_MKDIR: {
MkdirOp mkdirOp = (MkdirOp)op;
inodeId = getAndUpdateLastInodeId(mkdirOp.inodeId, logVersion,
lastInodeId);
FSDirMkdirOp.mkdirForEditLog(fsDir, inodeId,
renameReservedPathsOnUpgrade(mkdirOp.path, logVersion),
mkdirOp.permissions, mkdirOp.aclEntries, mkdirOp.timestamp);
break;
}
case OP_SET_GENSTAMP_V1: {
SetGenstampV1Op setGenstampV1Op = (SetGenstampV1Op)op;
fsNamesys.getBlockIdManager().setGenerationStampV1(
setGenstampV1Op.genStampV1);
break;
}
case OP_SET_PERMISSIONS: {
SetPermissionsOp setPermissionsOp = (SetPermissionsOp)op;
FSDirAttrOp.unprotectedSetPermission(fsDir, renameReservedPathsOnUpgrade(
setPermissionsOp.src, logVersion), setPermissionsOp.permissions);
break;
}
case OP_SET_OWNER: {
SetOwnerOp setOwnerOp = (SetOwnerOp)op;
FSDirAttrOp.unprotectedSetOwner(
fsDir, renameReservedPathsOnUpgrade(setOwnerOp.src, logVersion),
setOwnerOp.username, setOwnerOp.groupname);
break;
}
case OP_SET_NS_QUOTA: {
SetNSQuotaOp setNSQuotaOp = (SetNSQuotaOp)op;
FSDirAttrOp.unprotectedSetQuota(
fsDir, renameReservedPathsOnUpgrade(setNSQuotaOp.src, logVersion),
setNSQuotaOp.nsQuota, HdfsConstants.QUOTA_DONT_SET, null);
break;
}
case OP_CLEAR_NS_QUOTA: {
ClearNSQuotaOp clearNSQuotaOp = (ClearNSQuotaOp)op;
FSDirAttrOp.unprotectedSetQuota(
fsDir, renameReservedPathsOnUpgrade(clearNSQuotaOp.src, logVersion),
HdfsConstants.QUOTA_RESET, HdfsConstants.QUOTA_DONT_SET, null);
break;
}
case OP_SET_QUOTA:
SetQuotaOp setQuotaOp = (SetQuotaOp) op;
FSDirAttrOp.unprotectedSetQuota(fsDir,
renameReservedPathsOnUpgrade(setQuotaOp.src, logVersion),
setQuotaOp.nsQuota, setQuotaOp.dsQuota, null);
break;
case OP_SET_QUOTA_BY_STORAGETYPE:
FSEditLogOp.SetQuotaByStorageTypeOp setQuotaByStorageTypeOp =
(FSEditLogOp.SetQuotaByStorageTypeOp) op;
FSDirAttrOp.unprotectedSetQuota(fsDir,
renameReservedPathsOnUpgrade(setQuotaByStorageTypeOp.src, logVersion),
HdfsConstants.QUOTA_DONT_SET, setQuotaByStorageTypeOp.dsQuota,
setQuotaByStorageTypeOp.type);
break;
case OP_TIMES: {
TimesOp timesOp = (TimesOp)op;
FSDirAttrOp.unprotectedSetTimes(
fsDir, renameReservedPathsOnUpgrade(timesOp.path, logVersion),
timesOp.mtime, timesOp.atime, true);
break;
}
case OP_SYMLINK: {
if (!FileSystem.areSymlinksEnabled()) {
throw new IOException("Symlinks not supported - please remove symlink before upgrading to this version of HDFS");
}
SymlinkOp symlinkOp = (SymlinkOp)op;
inodeId = getAndUpdateLastInodeId(symlinkOp.inodeId, logVersion,
lastInodeId);
final String path = renameReservedPathsOnUpgrade(symlinkOp.path,
logVersion);
final INodesInPath iip = fsDir.getINodesInPath(path, false);
FSDirSymlinkOp.unprotectedAddSymlink(fsDir, iip.getExistingINodes(),
iip.getLastLocalName(), inodeId, symlinkOp.value, symlinkOp.mtime,
symlinkOp.atime, symlinkOp.permissionStatus);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(symlinkOp.rpcClientId, symlinkOp.rpcCallId);
}
break;
}
case OP_RENAME: {
RenameOp renameOp = (RenameOp)op;
FSDirRenameOp.renameForEditLog(fsDir,
renameReservedPathsOnUpgrade(renameOp.src, logVersion),
renameReservedPathsOnUpgrade(renameOp.dst, logVersion),
renameOp.timestamp, renameOp.options);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(renameOp.rpcClientId, renameOp.rpcCallId);
}
break;
}
case OP_GET_DELEGATION_TOKEN: {
GetDelegationTokenOp getDelegationTokenOp
= (GetDelegationTokenOp)op;
fsNamesys.getDelegationTokenSecretManager()
.addPersistedDelegationToken(getDelegationTokenOp.token,
getDelegationTokenOp.expiryTime);
break;
}
case OP_RENEW_DELEGATION_TOKEN: {
RenewDelegationTokenOp renewDelegationTokenOp
= (RenewDelegationTokenOp)op;
fsNamesys.getDelegationTokenSecretManager()
.updatePersistedTokenRenewal(renewDelegationTokenOp.token,
renewDelegationTokenOp.expiryTime);
break;
}
case OP_CANCEL_DELEGATION_TOKEN: {
CancelDelegationTokenOp cancelDelegationTokenOp
= (CancelDelegationTokenOp)op;
fsNamesys.getDelegationTokenSecretManager()
.updatePersistedTokenCancellation(
cancelDelegationTokenOp.token);
break;
}
case OP_UPDATE_MASTER_KEY: {
UpdateMasterKeyOp updateMasterKeyOp = (UpdateMasterKeyOp)op;
fsNamesys.getDelegationTokenSecretManager()
.updatePersistedMasterKey(updateMasterKeyOp.key);
break;
}
case OP_REASSIGN_LEASE: {
ReassignLeaseOp reassignLeaseOp = (ReassignLeaseOp)op;
Lease lease = fsNamesys.leaseManager.getLease(
reassignLeaseOp.leaseHolder);
final String path =
renameReservedPathsOnUpgrade(reassignLeaseOp.path, logVersion);
INodeFile pendingFile = fsDir.getINode(path).asFile();
Preconditions.checkState(pendingFile.isUnderConstruction());
fsNamesys.reassignLeaseInternal(lease, reassignLeaseOp.newHolder,
pendingFile);
break;
}
case OP_START_LOG_SEGMENT:
case OP_END_LOG_SEGMENT: {
// no data in here currently.
break;
}
case OP_CREATE_SNAPSHOT: {
CreateSnapshotOp createSnapshotOp = (CreateSnapshotOp) op;
final String snapshotRoot =
renameReservedPathsOnUpgrade(createSnapshotOp.snapshotRoot,
logVersion);
INodesInPath iip = fsDir.getINodesInPath4Write(snapshotRoot);
String path = fsNamesys.getSnapshotManager().createSnapshot(iip,
snapshotRoot, createSnapshotOp.snapshotName);
if (toAddRetryCache) {
fsNamesys.addCacheEntryWithPayload(createSnapshotOp.rpcClientId,
createSnapshotOp.rpcCallId, path);
}
break;
}
case OP_DELETE_SNAPSHOT: {
DeleteSnapshotOp deleteSnapshotOp = (DeleteSnapshotOp) op;
BlocksMapUpdateInfo collectedBlocks = new BlocksMapUpdateInfo();
List<INode> removedINodes = new ChunkedArrayList<INode>();
final String snapshotRoot =
renameReservedPathsOnUpgrade(deleteSnapshotOp.snapshotRoot,
logVersion);
INodesInPath iip = fsDir.getINodesInPath4Write(snapshotRoot);
fsNamesys.getSnapshotManager().deleteSnapshot(iip,
deleteSnapshotOp.snapshotName,
new INode.ReclaimContext(fsNamesys.dir.getBlockStoragePolicySuite(),
collectedBlocks, removedINodes, null));
fsNamesys.removeBlocksAndUpdateSafemodeTotal(collectedBlocks);
collectedBlocks.clear();
fsNamesys.dir.removeFromInodeMap(removedINodes);
removedINodes.clear();
if (toAddRetryCache) {
fsNamesys.addCacheEntry(deleteSnapshotOp.rpcClientId,
deleteSnapshotOp.rpcCallId);
}
break;
}
case OP_RENAME_SNAPSHOT: {
RenameSnapshotOp renameSnapshotOp = (RenameSnapshotOp) op;
final String snapshotRoot =
renameReservedPathsOnUpgrade(renameSnapshotOp.snapshotRoot,
logVersion);
INodesInPath iip = fsDir.getINodesInPath4Write(snapshotRoot);
fsNamesys.getSnapshotManager().renameSnapshot(iip,
snapshotRoot, renameSnapshotOp.snapshotOldName,
renameSnapshotOp.snapshotNewName);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(renameSnapshotOp.rpcClientId,
renameSnapshotOp.rpcCallId);
}
break;
}
case OP_ALLOW_SNAPSHOT: {
AllowSnapshotOp allowSnapshotOp = (AllowSnapshotOp) op;
final String snapshotRoot =
renameReservedPathsOnUpgrade(allowSnapshotOp.snapshotRoot, logVersion);
fsNamesys.getSnapshotManager().setSnapshottable(
snapshotRoot, false);
break;
}
case OP_DISALLOW_SNAPSHOT: {
DisallowSnapshotOp disallowSnapshotOp = (DisallowSnapshotOp) op;
final String snapshotRoot =
renameReservedPathsOnUpgrade(disallowSnapshotOp.snapshotRoot,
logVersion);
fsNamesys.getSnapshotManager().resetSnapshottable(
snapshotRoot);
break;
}
case OP_SET_GENSTAMP_V2: {
SetGenstampV2Op setGenstampV2Op = (SetGenstampV2Op) op;
fsNamesys.getBlockIdManager().setGenerationStampV2(
setGenstampV2Op.genStampV2);
break;
}
case OP_ALLOCATE_BLOCK_ID: {
AllocateBlockIdOp allocateBlockIdOp = (AllocateBlockIdOp) op;
fsNamesys.getBlockIdManager().setLastAllocatedBlockId(
allocateBlockIdOp.blockId);
break;
}
case OP_ROLLING_UPGRADE_START: {
if (startOpt == StartupOption.ROLLINGUPGRADE) {
final RollingUpgradeStartupOption rollingUpgradeOpt
= startOpt.getRollingUpgradeStartupOption();
if (rollingUpgradeOpt == RollingUpgradeStartupOption.ROLLBACK) {
throw new RollingUpgradeOp.RollbackException();
}
}
// start rolling upgrade
final long startTime = ((RollingUpgradeOp) op).getTime();
fsNamesys.startRollingUpgradeInternal(startTime);
fsNamesys.triggerRollbackCheckpoint();
break;
}
case OP_ROLLING_UPGRADE_FINALIZE: {
final long finalizeTime = ((RollingUpgradeOp) op).getTime();
if (fsNamesys.isRollingUpgrade()) {
// Only do it when NN is actually doing rolling upgrade.
// We can get FINALIZE without corresponding START, if NN is restarted
// before this op is consumed and a new checkpoint is created.
fsNamesys.finalizeRollingUpgradeInternal(finalizeTime);
}
fsNamesys.getFSImage().updateStorageVersion();
fsNamesys.getFSImage().renameCheckpoint(NameNodeFile.IMAGE_ROLLBACK,
NameNodeFile.IMAGE);
break;
}
case OP_ADD_CACHE_DIRECTIVE: {
AddCacheDirectiveInfoOp addOp = (AddCacheDirectiveInfoOp) op;
CacheDirectiveInfo result = fsNamesys.
getCacheManager().addDirectiveFromEditLog(addOp.directive);
if (toAddRetryCache) {
Long id = result.getId();
fsNamesys.addCacheEntryWithPayload(op.rpcClientId, op.rpcCallId, id);
}
break;
}
case OP_MODIFY_CACHE_DIRECTIVE: {
ModifyCacheDirectiveInfoOp modifyOp =
(ModifyCacheDirectiveInfoOp) op;
fsNamesys.getCacheManager().modifyDirectiveFromEditLog(
modifyOp.directive);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(op.rpcClientId, op.rpcCallId);
}
break;
}
case OP_REMOVE_CACHE_DIRECTIVE: {
RemoveCacheDirectiveInfoOp removeOp =
(RemoveCacheDirectiveInfoOp) op;
fsNamesys.getCacheManager().removeDirective(removeOp.id, null);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(op.rpcClientId, op.rpcCallId);
}
break;
}
case OP_ADD_CACHE_POOL: {
AddCachePoolOp addOp = (AddCachePoolOp) op;
fsNamesys.getCacheManager().addCachePool(addOp.info);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(op.rpcClientId, op.rpcCallId);
}
break;
}
case OP_MODIFY_CACHE_POOL: {
ModifyCachePoolOp modifyOp = (ModifyCachePoolOp) op;
fsNamesys.getCacheManager().modifyCachePool(modifyOp.info);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(op.rpcClientId, op.rpcCallId);
}
break;
}
case OP_REMOVE_CACHE_POOL: {
RemoveCachePoolOp removeOp = (RemoveCachePoolOp) op;
fsNamesys.getCacheManager().removeCachePool(removeOp.poolName);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(op.rpcClientId, op.rpcCallId);
}
break;
}
case OP_SET_ACL: {
SetAclOp setAclOp = (SetAclOp) op;
FSDirAclOp.unprotectedSetAcl(fsDir, setAclOp.src, setAclOp.aclEntries,
true);
break;
}
case OP_SET_XATTR: {
SetXAttrOp setXAttrOp = (SetXAttrOp) op;
FSDirXAttrOp.unprotectedSetXAttrs(fsDir, setXAttrOp.src,
setXAttrOp.xAttrs,
EnumSet.of(XAttrSetFlag.CREATE,
XAttrSetFlag.REPLACE));
if (toAddRetryCache) {
fsNamesys.addCacheEntry(setXAttrOp.rpcClientId, setXAttrOp.rpcCallId);
}
break;
}
case OP_REMOVE_XATTR: {
RemoveXAttrOp removeXAttrOp = (RemoveXAttrOp) op;
FSDirXAttrOp.unprotectedRemoveXAttrs(fsDir, removeXAttrOp.src,
removeXAttrOp.xAttrs);
if (toAddRetryCache) {
fsNamesys.addCacheEntry(removeXAttrOp.rpcClientId,
removeXAttrOp.rpcCallId);
}
break;
}
case OP_TRUNCATE: {
TruncateOp truncateOp = (TruncateOp) op;
FSDirTruncateOp.unprotectedTruncate(fsNamesys, truncateOp.src,
truncateOp.clientName, truncateOp.clientMachine,
truncateOp.newLength, truncateOp.timestamp, truncateOp.truncateBlock);
break;
}
case OP_SET_STORAGE_POLICY: {
SetStoragePolicyOp setStoragePolicyOp = (SetStoragePolicyOp) op;
final String path = renameReservedPathsOnUpgrade(setStoragePolicyOp.path,
logVersion);
final INodesInPath iip = fsDir.getINodesInPath4Write(path);
FSDirAttrOp.unprotectedSetStoragePolicy(
fsDir, fsNamesys.getBlockManager(), iip,
setStoragePolicyOp.policyId);
break;
}
default:
throw new IOException("Invalid operation read " + op.opCode);
}
return inodeId;
}
private static String formatEditLogReplayError(EditLogInputStream in,
long recentOpcodeOffsets[], long txid) {
StringBuilder sb = new StringBuilder();
sb.append("Error replaying edit log at offset " + in.getPosition());
sb.append(". Expected transaction ID was ").append(txid);
if (recentOpcodeOffsets[0] != -1) {
Arrays.sort(recentOpcodeOffsets);
sb.append("\nRecent opcode offsets:");
for (long offset : recentOpcodeOffsets) {
if (offset != -1) {
sb.append(' ').append(offset);
}
}
}
return sb.toString();
}
/**
* Add a new block into the given INodeFile
*/
private void addNewBlock(FSDirectory fsDir, AddBlockOp op, INodeFile file)
throws IOException {
BlockInfo[] oldBlocks = file.getBlocks();
Block pBlock = op.getPenultimateBlock();
Block newBlock= op.getLastBlock();
if (pBlock != null) { // the penultimate block is not null
Preconditions.checkState(oldBlocks != null && oldBlocks.length > 0);
// compare pBlock with the last block of oldBlocks
Block oldLastBlock = oldBlocks[oldBlocks.length - 1];
if (oldLastBlock.getBlockId() != pBlock.getBlockId()
|| oldLastBlock.getGenerationStamp() != pBlock.getGenerationStamp()) {
throw new IOException(
"Mismatched block IDs or generation stamps for the old last block of file "
+ op.getPath() + ", the old last block is " + oldLastBlock
+ ", and the block read from editlog is " + pBlock);
}
oldLastBlock.setNumBytes(pBlock.getNumBytes());
if (oldLastBlock instanceof BlockInfoUnderConstruction) {
fsNamesys.getBlockManager().forceCompleteBlock(file,
(BlockInfoUnderConstruction) oldLastBlock);
fsNamesys.getBlockManager().processQueuedMessagesForBlock(pBlock);
}
} else { // the penultimate block is null
Preconditions.checkState(oldBlocks == null || oldBlocks.length == 0);
}
// add the new block
BlockInfo newBI = new BlockInfoUnderConstructionContiguous(
newBlock, file.getPreferredBlockReplication());
fsNamesys.getBlockManager().addBlockCollection(newBI, file);
file.addBlock(newBI);
fsNamesys.getBlockManager().processQueuedMessagesForBlock(newBlock);
}
/**
* Update in-memory data structures with new block information.
* @throws IOException
*/
private void updateBlocks(FSDirectory fsDir, BlockListUpdatingOp op,
INodesInPath iip, INodeFile file) throws IOException {
// Update its block list
BlockInfo[] oldBlocks = file.getBlocks();
Block[] newBlocks = op.getBlocks();
String path = op.getPath();
// Are we only updating the last block's gen stamp.
boolean isGenStampUpdate = oldBlocks.length == newBlocks.length;
// First, update blocks in common
for (int i = 0; i < oldBlocks.length && i < newBlocks.length; i++) {
BlockInfo oldBlock = oldBlocks[i];
Block newBlock = newBlocks[i];
boolean isLastBlock = i == newBlocks.length - 1;
if (oldBlock.getBlockId() != newBlock.getBlockId() ||
(oldBlock.getGenerationStamp() != newBlock.getGenerationStamp() &&
!(isGenStampUpdate && isLastBlock))) {
throw new IOException("Mismatched block IDs or generation stamps, " +
"attempting to replace block " + oldBlock + " with " + newBlock +
" as block # " + i + "/" + newBlocks.length + " of " +
path);
}
oldBlock.setNumBytes(newBlock.getNumBytes());
boolean changeMade =
oldBlock.getGenerationStamp() != newBlock.getGenerationStamp();
oldBlock.setGenerationStamp(newBlock.getGenerationStamp());
if (oldBlock instanceof BlockInfoUnderConstruction &&
(!isLastBlock || op.shouldCompleteLastBlock())) {
changeMade = true;
fsNamesys.getBlockManager().forceCompleteBlock(file,
(BlockInfoUnderConstruction) oldBlock);
}
if (changeMade) {
// The state or gen-stamp of the block has changed. So, we may be
// able to process some messages from datanodes that we previously
// were unable to process.
fsNamesys.getBlockManager().processQueuedMessagesForBlock(newBlock);
}
}
if (newBlocks.length < oldBlocks.length) {
// We're removing a block from the file, e.g. abandonBlock(...)
if (!file.isUnderConstruction()) {
throw new IOException("Trying to remove a block from file " +
path + " which is not under construction.");
}
if (newBlocks.length != oldBlocks.length - 1) {
throw new IOException("Trying to remove more than one block from file "
+ path);
}
Block oldBlock = oldBlocks[oldBlocks.length - 1];
boolean removed = FSDirWriteFileOp.unprotectedRemoveBlock(
fsDir, path, iip, file, oldBlock);
if (!removed && !(op instanceof UpdateBlocksOp)) {
throw new IOException("Trying to delete non-existant block " + oldBlock);
}
} else if (newBlocks.length > oldBlocks.length) {
// We're adding blocks
for (int i = oldBlocks.length; i < newBlocks.length; i++) {
Block newBlock = newBlocks[i];
BlockInfo newBI;
if (!op.shouldCompleteLastBlock()) {
// TODO: shouldn't this only be true for the last block?
// what about an old-version fsync() where fsync isn't called
// until several blocks in?
newBI = new BlockInfoUnderConstructionContiguous(
newBlock, file.getPreferredBlockReplication());
} else {
// OP_CLOSE should add finalized blocks. This code path
// is only executed when loading edits written by prior
// versions of Hadoop. Current versions always log
// OP_ADD operations as each block is allocated.
newBI = new BlockInfoContiguous(newBlock,
file.getPreferredBlockReplication());
}
fsNamesys.getBlockManager().addBlockCollection(newBI, file);
file.addBlock(newBI);
fsNamesys.getBlockManager().processQueuedMessagesForBlock(newBlock);
}
}
}
private static void dumpOpCounts(
EnumMap<FSEditLogOpCodes, Holder<Integer>> opCounts) {
StringBuilder sb = new StringBuilder();
sb.append("Summary of operations loaded from edit log:\n ");
Joiner.on("\n ").withKeyValueSeparator("=").appendTo(sb, opCounts);
FSImage.LOG.debug(sb.toString());
}
private void incrOpCount(FSEditLogOpCodes opCode,
EnumMap<FSEditLogOpCodes, Holder<Integer>> opCounts, Step step,
Counter counter) {
Holder<Integer> holder = opCounts.get(opCode);
if (holder == null) {
holder = new Holder<Integer>(1);
opCounts.put(opCode, holder);
} else {
holder.held++;
}
counter.increment();
}
/**
* Throw appropriate exception during upgrade from 203, when editlog loading
* could fail due to opcode conflicts.
*/
private void check203UpgradeFailure(int logVersion, Throwable e)
throws IOException {
// 0.20.203 version version has conflicting opcodes with the later releases.
// The editlog must be emptied by restarting the namenode, before proceeding
// with the upgrade.
if (Storage.is203LayoutVersion(logVersion)
&& logVersion != HdfsServerConstants.NAMENODE_LAYOUT_VERSION) {
String msg = "During upgrade failed to load the editlog version "
+ logVersion + " from release 0.20.203. Please go back to the old "
+ " release and restart the namenode. This empties the editlog "
+ " and saves the namespace. Resume the upgrade after this step.";
throw new IOException(msg, e);
}
}
/**
* Find the last valid transaction ID in the stream.
* If there are invalid or corrupt transactions in the middle of the stream,
* validateEditLog will skip over them.
* This reads through the stream but does not close it.
*/
static EditLogValidation validateEditLog(EditLogInputStream in) {
long lastPos = 0;
long lastTxId = HdfsServerConstants.INVALID_TXID;
long numValid = 0;
FSEditLogOp op = null;
while (true) {
lastPos = in.getPosition();
try {
if ((op = in.readOp()) == null) {
break;
}
} catch (Throwable t) {
FSImage.LOG.warn("Caught exception after reading " + numValid +
" ops from " + in + " while determining its valid length." +
"Position was " + lastPos, t);
in.resync();
FSImage.LOG.warn("After resync, position is " + in.getPosition());
continue;
}
if (lastTxId == HdfsServerConstants.INVALID_TXID
|| op.getTransactionId() > lastTxId) {
lastTxId = op.getTransactionId();
}
numValid++;
}
return new EditLogValidation(lastPos, lastTxId, false);
}
static EditLogValidation scanEditLog(EditLogInputStream in) {
long lastPos = 0;
long lastTxId = HdfsServerConstants.INVALID_TXID;
long numValid = 0;
FSEditLogOp op = null;
while (true) {
lastPos = in.getPosition();
try {
if ((op = in.readOp()) == null) { // TODO
break;
}
} catch (Throwable t) {
FSImage.LOG.warn("Caught exception after reading " + numValid +
" ops from " + in + " while determining its valid length." +
"Position was " + lastPos, t);
in.resync();
FSImage.LOG.warn("After resync, position is " + in.getPosition());
continue;
}
if (lastTxId == HdfsServerConstants.INVALID_TXID
|| op.getTransactionId() > lastTxId) {
lastTxId = op.getTransactionId();
}
numValid++;
}
return new EditLogValidation(lastPos, lastTxId, false);
}
static class EditLogValidation {
private final long validLength;
private final long endTxId;
private final boolean hasCorruptHeader;
EditLogValidation(long validLength, long endTxId,
boolean hasCorruptHeader) {
this.validLength = validLength;
this.endTxId = endTxId;
this.hasCorruptHeader = hasCorruptHeader;
}
long getValidLength() { return validLength; }
long getEndTxId() { return endTxId; }
boolean hasCorruptHeader() { return hasCorruptHeader; }
}
/**
* Stream wrapper that keeps track of the current stream position.
*
* This stream also allows us to set a limit on how many bytes we can read
* without getting an exception.
*/
public static class PositionTrackingInputStream extends FilterInputStream
implements StreamLimiter {
private long curPos = 0;
private long markPos = -1;
private long limitPos = Long.MAX_VALUE;
public PositionTrackingInputStream(InputStream is) {
super(is);
}
private void checkLimit(long amt) throws IOException {
long extra = (curPos + amt) - limitPos;
if (extra > 0) {
throw new IOException("Tried to read " + amt + " byte(s) past " +
"the limit at offset " + limitPos);
}
}
@Override
public int read() throws IOException {
checkLimit(1);
int ret = super.read();
if (ret != -1) curPos++;
return ret;
}
@Override
public int read(byte[] data) throws IOException {
checkLimit(data.length);
int ret = super.read(data);
if (ret > 0) curPos += ret;
return ret;
}
@Override
public int read(byte[] data, int offset, int length) throws IOException {
checkLimit(length);
int ret = super.read(data, offset, length);
if (ret > 0) curPos += ret;
return ret;
}
@Override
public void setLimit(long limit) {
limitPos = curPos + limit;
}
@Override
public void clearLimit() {
limitPos = Long.MAX_VALUE;
}
@Override
public void mark(int limit) {
super.mark(limit);
markPos = curPos;
}
@Override
public void reset() throws IOException {
if (markPos == -1) {
throw new IOException("Not marked!");
}
super.reset();
curPos = markPos;
markPos = -1;
}
public long getPos() {
return curPos;
}
@Override
public long skip(long amt) throws IOException {
long extra = (curPos + amt) - limitPos;
if (extra > 0) {
throw new IOException("Tried to skip " + extra + " bytes past " +
"the limit at offset " + limitPos);
}
long ret = super.skip(amt);
curPos += ret;
return ret;
}
}
public long getLastAppliedTxId() {
return lastAppliedTxId;
}
/**
* Creates a Step used for updating startup progress, populated with
* information from the given edits. The step always includes the log's name.
* If the log has a known length, then the length is included in the step too.
*
* @param edits EditLogInputStream to use for populating step
* @return Step populated with information from edits
* @throws IOException thrown if there is an I/O error
*/
private static Step createStartupProgressStep(EditLogInputStream edits)
throws IOException {
long length = edits.length();
String name = edits.getCurrentStreamName();
return length != -1 ? new Step(name, length) : new Step(name);
}
}
| |
package org.ripple.bouncycastle.asn1;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import org.ripple.bouncycastle.util.Arrays;
/**
* Class representing the ASN.1 OBJECT IDENTIFIER type.
*/
public class ASN1ObjectIdentifier
extends ASN1Primitive
{
String identifier;
private byte[] body;
/**
* return an OID from the passed in object
* @param obj an ASN1ObjectIdentifier or an object that can be converted into one.
* @throws IllegalArgumentException if the object cannot be converted.
* @return an ASN1ObjectIdentifier instance, or null.
*/
public static ASN1ObjectIdentifier getInstance(
Object obj)
{
if (obj == null || obj instanceof ASN1ObjectIdentifier)
{
return (ASN1ObjectIdentifier)obj;
}
if (obj instanceof ASN1Encodable && ((ASN1Encodable)obj).toASN1Primitive() instanceof ASN1ObjectIdentifier)
{
return (ASN1ObjectIdentifier)((ASN1Encodable)obj).toASN1Primitive();
}
if (obj instanceof byte[])
{
byte[] enc = (byte[])obj;
try
{
return (ASN1ObjectIdentifier)fromByteArray(enc);
}
catch (IOException e)
{
throw new IllegalArgumentException("failed to construct object identifier from byte[]: " + e.getMessage());
}
}
throw new IllegalArgumentException("illegal object in getInstance: " + obj.getClass().getName());
}
/**
* return an Object Identifier from a tagged object.
*
* @param obj the tagged object holding the object we want
* @param explicit true if the object is meant to be explicitly
* tagged false otherwise.
* @throws IllegalArgumentException if the tagged object cannot
* be converted.
* @return an ASN1ObjectIdentifier instance, or null.
*/
public static ASN1ObjectIdentifier getInstance(
ASN1TaggedObject obj,
boolean explicit)
{
ASN1Primitive o = obj.getObject();
if (explicit || o instanceof ASN1ObjectIdentifier)
{
return getInstance(o);
}
else
{
return ASN1ObjectIdentifier.fromOctetString(ASN1OctetString.getInstance(obj.getObject()).getOctets());
}
}
private static final long LONG_LIMIT = (Long.MAX_VALUE >> 7) - 0x7f;
ASN1ObjectIdentifier(
byte[] bytes)
{
StringBuffer objId = new StringBuffer();
long value = 0;
BigInteger bigValue = null;
boolean first = true;
for (int i = 0; i != bytes.length; i++)
{
int b = bytes[i] & 0xff;
if (value <= LONG_LIMIT)
{
value += (b & 0x7f);
if ((b & 0x80) == 0) // end of number reached
{
if (first)
{
if (value < 40)
{
objId.append('0');
}
else if (value < 80)
{
objId.append('1');
value -= 40;
}
else
{
objId.append('2');
value -= 80;
}
first = false;
}
objId.append('.');
objId.append(value);
value = 0;
}
else
{
value <<= 7;
}
}
else
{
if (bigValue == null)
{
bigValue = BigInteger.valueOf(value);
}
bigValue = bigValue.or(BigInteger.valueOf(b & 0x7f));
if ((b & 0x80) == 0)
{
if (first)
{
objId.append('2');
bigValue = bigValue.subtract(BigInteger.valueOf(80));
first = false;
}
objId.append('.');
objId.append(bigValue);
bigValue = null;
value = 0;
}
else
{
bigValue = bigValue.shiftLeft(7);
}
}
}
this.identifier = objId.toString();
this.body = Arrays.clone(bytes);
}
/**
* Create an OID based on the passed in String.
*
* @param identifier a string representation of an OID.
*/
public ASN1ObjectIdentifier(
String identifier)
{
if (identifier == null)
{
throw new IllegalArgumentException("'identifier' cannot be null");
}
if (!isValidIdentifier(identifier))
{
throw new IllegalArgumentException("string " + identifier + " not an OID");
}
this.identifier = identifier;
}
/**
* Create an OID that creates a branch under the current one.
*
* @param branchID node numbers for the new branch.
* @return the OID for the new created branch.
*/
ASN1ObjectIdentifier(ASN1ObjectIdentifier oid, String branchID)
{
if (!isValidBranchID(branchID, 0))
{
throw new IllegalArgumentException("string " + branchID + " not a valid OID branch");
}
this.identifier = oid.getId() + "." + branchID;
}
/**
* Return the OID as a string.
*
* @return the string representation of the OID carried by this object.
*/
public String getId()
{
return identifier;
}
/**
* Return an OID that creates a branch under the current one.
*
* @param branchID node numbers for the new branch.
* @return the OID for the new created branch.
*/
public ASN1ObjectIdentifier branch(String branchID)
{
return new ASN1ObjectIdentifier(this, branchID);
}
/**
* Return true if this oid is an extension of the passed in branch, stem.
*
* @param stem the arc or branch that is a possible parent.
* @return true if the branch is on the passed in stem, false otherwise.
*/
public boolean on(ASN1ObjectIdentifier stem)
{
String id = getId(), stemId = stem.getId();
return id.length() > stemId.length() && id.charAt(stemId.length()) == '.' && id.startsWith(stemId);
}
private void writeField(
ByteArrayOutputStream out,
long fieldValue)
{
byte[] result = new byte[9];
int pos = 8;
result[pos] = (byte)((int)fieldValue & 0x7f);
while (fieldValue >= (1L << 7))
{
fieldValue >>= 7;
result[--pos] = (byte)((int)fieldValue & 0x7f | 0x80);
}
out.write(result, pos, 9 - pos);
}
private void writeField(
ByteArrayOutputStream out,
BigInteger fieldValue)
{
int byteCount = (fieldValue.bitLength() + 6) / 7;
if (byteCount == 0)
{
out.write(0);
}
else
{
BigInteger tmpValue = fieldValue;
byte[] tmp = new byte[byteCount];
for (int i = byteCount - 1; i >= 0; i--)
{
tmp[i] = (byte)((tmpValue.intValue() & 0x7f) | 0x80);
tmpValue = tmpValue.shiftRight(7);
}
tmp[byteCount - 1] &= 0x7f;
out.write(tmp, 0, tmp.length);
}
}
private void doOutput(ByteArrayOutputStream aOut)
{
OIDTokenizer tok = new OIDTokenizer(identifier);
int first = Integer.parseInt(tok.nextToken()) * 40;
String secondToken = tok.nextToken();
if (secondToken.length() <= 18)
{
writeField(aOut, first + Long.parseLong(secondToken));
}
else
{
writeField(aOut, new BigInteger(secondToken).add(BigInteger.valueOf(first)));
}
while (tok.hasMoreTokens())
{
String token = tok.nextToken();
if (token.length() <= 18)
{
writeField(aOut, Long.parseLong(token));
}
else
{
writeField(aOut, new BigInteger(token));
}
}
}
protected synchronized byte[] getBody()
{
if (body == null)
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
doOutput(bOut);
body = bOut.toByteArray();
}
return body;
}
boolean isConstructed()
{
return false;
}
int encodedLength()
throws IOException
{
int length = getBody().length;
return 1 + StreamUtil.calculateBodyLength(length) + length;
}
void encode(
ASN1OutputStream out)
throws IOException
{
byte[] enc = getBody();
out.write(BERTags.OBJECT_IDENTIFIER);
out.writeLength(enc.length);
out.write(enc);
}
public int hashCode()
{
return identifier.hashCode();
}
boolean asn1Equals(
ASN1Primitive o)
{
if (!(o instanceof ASN1ObjectIdentifier))
{
return false;
}
return identifier.equals(((ASN1ObjectIdentifier)o).identifier);
}
public String toString()
{
return getId();
}
private static boolean isValidBranchID(
String branchID, int start)
{
boolean periodAllowed = false;
int pos = branchID.length();
while (--pos >= start)
{
char ch = branchID.charAt(pos);
// TODO Leading zeroes?
if ('0' <= ch && ch <= '9')
{
periodAllowed = true;
continue;
}
if (ch == '.')
{
if (!periodAllowed)
{
return false;
}
periodAllowed = false;
continue;
}
return false;
}
return periodAllowed;
}
private static boolean isValidIdentifier(
String identifier)
{
if (identifier.length() < 3 || identifier.charAt(1) != '.')
{
return false;
}
char first = identifier.charAt(0);
if (first < '0' || first > '2')
{
return false;
}
return isValidBranchID(identifier, 2);
}
private static ASN1ObjectIdentifier[][] cache = new ASN1ObjectIdentifier[256][];
static ASN1ObjectIdentifier fromOctetString(byte[] enc)
{
if (enc.length < 3)
{
return new ASN1ObjectIdentifier(enc);
}
int idx1 = enc[enc.length - 2] & 0xff;
// in this case top bit is always zero
int idx2 = enc[enc.length - 1] & 0x7f;
ASN1ObjectIdentifier possibleMatch;
synchronized (cache)
{
ASN1ObjectIdentifier[] first = cache[idx1];
if (first == null)
{
first = cache[idx1] = new ASN1ObjectIdentifier[128];
}
possibleMatch = first[idx2];
if (possibleMatch == null)
{
return first[idx2] = new ASN1ObjectIdentifier(enc);
}
if (Arrays.areEqual(enc, possibleMatch.getBody()))
{
return possibleMatch;
}
idx1 = (idx1 + 1) & 0xff;
first = cache[idx1];
if (first == null)
{
first = cache[idx1] = new ASN1ObjectIdentifier[128];
}
possibleMatch = first[idx2];
if (possibleMatch == null)
{
return first[idx2] = new ASN1ObjectIdentifier(enc);
}
if (Arrays.areEqual(enc, possibleMatch.getBody()))
{
return possibleMatch;
}
idx2 = (idx2 + 1) & 0x7f;
possibleMatch = first[idx2];
if (possibleMatch == null)
{
return first[idx2] = new ASN1ObjectIdentifier(enc);
}
}
if (Arrays.areEqual(enc, possibleMatch.getBody()))
{
return possibleMatch;
}
return new ASN1ObjectIdentifier(enc);
}
}
| |
package org.openprovenance.prov.template;
import java.util.Collection;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import javax.xml.datatype.XMLGregorianCalendar;
import org.openprovenance.prov.model.ActedOnBehalfOf;
import org.openprovenance.prov.model.Activity;
import org.openprovenance.prov.model.Agent;
import org.openprovenance.prov.model.AlternateOf;
import org.openprovenance.prov.model.Attribute;
import org.openprovenance.prov.model.DerivedByInsertionFrom;
import org.openprovenance.prov.model.DerivedByRemovalFrom;
import org.openprovenance.prov.model.DictionaryMembership;
import org.openprovenance.prov.model.Entity;
import org.openprovenance.prov.model.HadMember;
import org.openprovenance.prov.model.HasOther;
import org.openprovenance.prov.model.HasTime;
import org.openprovenance.prov.model.MentionOf;
import org.openprovenance.prov.model.Bundle;
import org.openprovenance.prov.model.ProvFactory;
import org.openprovenance.prov.model.ProvUtilities;
import org.openprovenance.prov.model.QualifiedName;
import org.openprovenance.prov.model.SpecializationOf;
import org.openprovenance.prov.model.Statement;
import org.openprovenance.prov.model.StatementAction;
import org.openprovenance.prov.model.StatementOrBundle;
import org.openprovenance.prov.model.TypedValue;
import org.openprovenance.prov.model.Used;
import org.openprovenance.prov.model.WasAssociatedWith;
import org.openprovenance.prov.model.WasAttributedTo;
import org.openprovenance.prov.model.WasDerivedFrom;
import org.openprovenance.prov.model.WasEndedBy;
import org.openprovenance.prov.model.WasGeneratedBy;
import org.openprovenance.prov.model.WasInfluencedBy;
import org.openprovenance.prov.model.WasInformedBy;
import org.openprovenance.prov.model.WasInvalidatedBy;
import org.openprovenance.prov.model.WasStartedBy;
import static org.openprovenance.prov.template.Expand.TMPL_NS;
import static org.openprovenance.prov.template.Expand.TMPL_PREFIX;
public class ExpandAction implements StatementAction {
public static final String UUID_PREFIX = "uuid";
public static final String URN_UUID_NS = "urn:uuid:";
final private ProvFactory pf;
final private Expand expand;
final private Hashtable<QualifiedName, QualifiedName> env;
final private ProvUtilities u;
final private List<StatementOrBundle> ll=new LinkedList<StatementOrBundle>();
final private List<Integer> index;
final private Bindings bindings;
final private Groupings grp1;
final private Hashtable<QualifiedName, List<TypedValue>> env2;
public ExpandAction(ProvFactory pf, ProvUtilities u, Expand expand, Hashtable<QualifiedName, QualifiedName> env, Hashtable<QualifiedName, List<TypedValue>> env2, List<Integer> index, Bindings bindings1, Groupings grp1) {
this.pf=pf;
this.expand=expand;
this.env=env;
this.u=u;
this.index=index;
this.bindings=bindings1;
this.grp1=grp1;
this.env2=env2;
}
@Override
public void doAction(Activity s) {
Activity res=pf.newActivity(s.getId(), s.getStartTime(), s.getEndTime(), null);
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
boolean updated2=expandAttributes(s,res);
boolean updated=updated1 || updated2;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(Used s) {
Used res=pf.newUsed(s.getId(),s.getActivity(), s.getEntity());
res.setTime(s.getTime());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en=res.getActivity();
boolean updated2=setExpand(res, en, 1);
QualifiedName ag=res.getEntity();
boolean updated3=setExpand(res, ag, 2);
boolean updated4=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(WasStartedBy s) {
WasStartedBy res=pf.newWasStartedBy(s.getId(),s.getActivity(), s.getTrigger(), s.getStarter());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en=res.getActivity();
boolean updated2=setExpand(res, en, 1);
QualifiedName ag=res.getTrigger();
boolean updated3=setExpand(res, ag, 2);
QualifiedName st=res.getStarter();
boolean updated4=setExpand(res,st,3);
boolean updated5=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4 || updated5;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(Agent e) {
Agent res=pf.newAgent(e.getId());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
boolean updated2=expandAttributes(e,res);
boolean updated=updated1 || updated2;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(AlternateOf s) {
AlternateOf res=pf.newAlternateOf(s.getAlternate1(), s.getAlternate2());
QualifiedName alt1=res.getAlternate1();
boolean updated0=setExpand(res, alt1, 0);
QualifiedName alt2=res.getAlternate2();
boolean updated1=setExpand(res, alt2, 1);
@SuppressWarnings("unused")
boolean updated=updated0 || updated1;
ll.add(res);
//if (updated) addOrderAttribute(res);
}
@Override
public void doAction(WasAssociatedWith s) {
WasAssociatedWith res=pf.newWasAssociatedWith(s.getId(),s.getActivity(), s.getAgent());
res.setPlan(s.getPlan());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en=res.getActivity();
boolean updated2=setExpand(res, en, 1);
QualifiedName ag=res.getAgent();
boolean updated3=setExpand(res, ag, 2);
QualifiedName pl=res.getPlan();
boolean updated4=setExpand(res, pl, 3);
boolean updated5=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4|| updated5;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(WasAttributedTo s) {
WasAttributedTo res=pf.newWasAttributedTo(s.getId(),s.getEntity(), s.getAgent());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en=res.getEntity();
boolean updated2=setExpand(res, en, 1);
QualifiedName ag=res.getAgent();
boolean updated3=setExpand(res, ag, 2);
boolean updated4=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(WasInfluencedBy s) {
// TODO Auto-generated method stub
}
@Override
public void doAction(ActedOnBehalfOf s) {
ActedOnBehalfOf res=pf.newActedOnBehalfOf(s.getId(),s.getDelegate(), s.getResponsible(), s.getActivity());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName del=res.getDelegate();
boolean updated2=setExpand(res, del, 1);
QualifiedName resp=res.getResponsible();
boolean updated3=setExpand(res, resp, 2);
QualifiedName act=res.getActivity();
boolean updated4=setExpand(res, act, 3);
boolean updated5=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4|| updated5;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(WasDerivedFrom s) {
WasDerivedFrom res=pf.newWasDerivedFrom(s.getId(), s.getGeneratedEntity(), s.getUsedEntity());
res.setActivity(s.getActivity());
res.setUsage(s.getUsage());
res.setGeneration(s.getGeneration());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en2=res.getGeneratedEntity();
boolean updated2=setExpand(res, en2, 1);
QualifiedName en1=res.getUsedEntity();
boolean updated3=setExpand(res, en1, 2);
QualifiedName act=res.getActivity();
boolean updated4=setExpand(res,act,3);
QualifiedName gen=res.getGeneration();
boolean updated5=setExpand(res,gen,4);
QualifiedName use=res.getUsage();
boolean updated6=setExpand(res,use,5);
boolean updated7=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4 || updated5|| updated6|| updated7;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(DictionaryMembership s) {
// TODO Auto-generated method stub
}
@Override
public void doAction(DerivedByRemovalFrom s) {
// TODO Auto-generated method stub
}
@Override
public void doAction(WasEndedBy s) {
WasEndedBy res=pf.newWasEndedBy(s.getId(),s.getActivity(), s.getTrigger(), s.getEnder());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en=res.getActivity();
boolean updated2=setExpand(res, en, 1);
QualifiedName ag=res.getTrigger();
boolean updated3=setExpand(res, ag, 2);
QualifiedName st=res.getEnder();
boolean updated4=setExpand(res,st,3);
boolean updated5=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4 || updated5;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(Entity e) {
Entity res=pf.newEntity(e.getId());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
boolean updated2=expandAttributes(e,res);
boolean updated=updated1 || updated2;
ll.add(res);
if (updated) addOrderAttribute(res);
}
public boolean expandAttributes(Statement srcStatement, Statement dstStatement) {
boolean found=false;
if (dstStatement instanceof HasOther) {
Collection<Attribute> attributes=pf.getAttributes(srcStatement);
Collection<Attribute> dstAttributes=new LinkedList<Attribute>();
String xsdQNameUri = pf.getName().XSD_QNAME.getUri();
for (Attribute attribute: attributes) {
if (xsdQNameUri.equals(attribute.getType().getUri())) {
Object o=attribute.getValue();
if (o instanceof QualifiedName) {
QualifiedName qn1=(QualifiedName)o;
if (Expand.isVariable(qn1)) {
List<TypedValue> vals=env2.get(qn1);
if (vals==null) {
if (Expand.isGensymVariable(qn1)) {
dstAttributes.add(pf.newAttribute(attribute.getElementName(),
getUUIDQualifiedName(),
pf.getName().XSD_QNAME));
}
// if not a vargen, then simply drop this attribute
//dstAttributes.add(attribute);
} else {
found=true;
processTemplateAttributes(dstStatement,
dstAttributes,
attribute,
vals);
}
} else { // no variable here
dstAttributes.add(attribute);
}
} else { // not even a qualified name
dstAttributes.add(attribute);
}
} else { //not xsd_qname
dstAttributes.add(attribute);
}
}
pf.setAttributes((HasOther) dstStatement, dstAttributes);
}
return found;
}
public void processTemplateAttributes(Statement dstStatement,
Collection<Attribute> dstAttributes,
Attribute attribute,
List<TypedValue> vals) {
for (TypedValue val: vals) {
String elementName = attribute.getElementName().getUri();
if (Expand.LABEL_URI.equals(elementName)) {
dstAttributes.add(pf.newAttribute(pf.getName().PROV_LABEL,
val.getValue(),
val.getType()));
} else
if (Expand.TIME_URI.equals(elementName)) {
if (dstStatement instanceof HasTime) {
((HasTime)dstStatement).setTime(pf.newISOTime((String)val.getValue()));
}
} else
if (Expand.STARTTIME_URI.equals(elementName)) {
if (dstStatement instanceof Activity) {
((Activity)dstStatement).setStartTime(pf.newISOTime((String)val.getValue()));
}
} else
if (Expand.ENDTIME_URI.equals(elementName)) {
if (dstStatement instanceof Activity) {
((Activity)dstStatement).setEndTime(pf.newISOTime((String)val.getValue()));
}
} else {
dstAttributes.add(pf.newAttribute(attribute.getElementName(),
val.getValue(),
val.getType()));
}
}
}
public QualifiedName getUUIDQualifiedName() {
UUID uuid=UUID.randomUUID();
return pf.newQualifiedName(URN_UUID_NS, uuid.toString(), UUID_PREFIX);
}
public void addOrderAttribute(HasOther res) {
res.getOther().add(pf.newOther(TMPL_NS, "order", TMPL_PREFIX, index, pf.getName().XSD_STRING));
}
private boolean setExpand(Statement res, QualifiedName id, int position) {
if (Expand.isVariable(id)) {
QualifiedName val=env.get(id);
if (val!=null) {
u.setter(res, position, val);
return true;
} else {
if (Expand.isGensymVariable(id)) {
QualifiedName uuid=getUUIDQualifiedName();
u.setter(res,position,uuid);
bindings.addVariable(id, uuid);
}
}
}
return false;
}
@Override
public void doAction(WasGeneratedBy s) {
WasGeneratedBy res=pf.newWasGeneratedBy(s.getId(),s.getEntity(), s.getActivity());
res.setTime(s.getTime());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en=res.getEntity();
boolean updated2=setExpand(res, en, 1);
QualifiedName act=res.getActivity();
boolean updated3=setExpand(res, act, 2);
boolean updated4=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(WasInvalidatedBy s) {
WasInvalidatedBy res=pf.newWasInvalidatedBy(s.getId(),s.getEntity(), s.getActivity());
res.setTime(s.getTime());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName en=res.getEntity();
boolean updated2=setExpand(res, en, 1);
QualifiedName act=res.getActivity();
boolean updated3=setExpand(res, act, 2);
boolean updated4=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(HadMember s) {
HadMember res=pf.newHadMember(s.getCollection(), s.getEntity());
QualifiedName col=res.getCollection();
boolean updated0=setExpand(res, col, 0);
@SuppressWarnings("unused")
List<QualifiedName> ent=res.getEntity();
if (ent.size()>1) {
throw new UnsupportedOperationException("can't expand HadMember with more than one members");
}
boolean updated1=setExpand(res, ent.get(0), 1);
//.out.println("FIXME: to do , expand entities"); //FIXME
@SuppressWarnings("unused")
boolean updated=updated0||updated1 ;
ll.add(res);
//if (updated) addOrderAttribute(res);
// TODO Auto-generated method stub
}
@Override
public void doAction(MentionOf s) {
// TODO Auto-generated method stub
}
@Override
public void doAction(SpecializationOf s) {
SpecializationOf res=pf.newSpecializationOf(s.getSpecificEntity(), s.getGeneralEntity());
QualifiedName spe=res.getSpecificEntity();
boolean updated0=setExpand(res, spe, 0);
QualifiedName gen=res.getGeneralEntity();
boolean updated1=setExpand(res, gen, 1);
@SuppressWarnings("unused")
boolean updated=updated0 || updated1;
ll.add(res);
//if (updated) addOrderAttribute(res);
}
@Override
public void doAction(DerivedByInsertionFrom s) {
// TODO Auto-generated method stub
}
@Override
public void doAction(WasInformedBy s) {
WasInformedBy res=pf.newWasInformedBy(s.getId(),s.getInformed(), s.getInformant());
QualifiedName id=res.getId();
boolean updated1=setExpand(res, id, 0);
QualifiedName a2=res.getInformed();
boolean updated2=setExpand(res, a2, 1);
QualifiedName a1=res.getInformant();
boolean updated3=setExpand(res, a1, 2);
boolean updated4=expandAttributes(s,res);
boolean updated=updated1 || updated2 || updated3 || updated4;
ll.add(res);
if (updated) addOrderAttribute(res);
}
@Override
public void doAction(Bundle bun, ProvUtilities provUtilities) {
List<Statement> statements=bun.getStatement();
List<Statement> newStatements=new LinkedList<Statement>();
for (Statement s: statements) {
for (StatementOrBundle sb: expand.expand(s, bindings, grp1)) {
newStatements.add((Statement)sb);
}
}
updateEnvironmentForBundleId(bun, bindings, env);
QualifiedName newId;
final QualifiedName bunId = bun.getId();
if (Expand.isVariable(bunId)) {
//System.out.println("===> bundle " + env + " " + bindings);
QualifiedName val=env.get(bunId);
if (val!=null) {
newId=val;
} else {
if (Expand.isGensymVariable(bunId)) {
QualifiedName uuid=getUUIDQualifiedName();
newId=uuid;
bindings.addVariable(bunId, uuid);
} else {
newId=bunId;
}
}
} else {
newId=bunId;
}
ll.add(pf.newNamedBundle(newId, newStatements));
}
public void updateEnvironmentForBundleId(Bundle bun,
Bindings bindings1,
Hashtable<QualifiedName, QualifiedName> env0) {
final QualifiedName id = bun.getId();
if (Expand.isVariable(id)) {
List<QualifiedName> vals=bindings1.getVariables().get(id);
if (vals==null) {
if (Expand.isGensymVariable(id)) {
// OK, we'll generate a uuid later
} else {
throw new BundleVariableHasNoValue(id);
}
} else {
if (vals.size()>1) {
throw new BundleVariableHasMultipleValues(id,vals);
} else {
env0.put(id, vals.get(0));
}
}
}
}
public List<StatementOrBundle> getList() {
return ll;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.navigation.ItemPresentation;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.*;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.stubs.IStubElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.PlatformIcons;
import com.intellij.util.Processor;
import com.jetbrains.python.PyElementTypes;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyTokenTypes;
import com.jetbrains.python.PythonDialectsTokenSetProvider;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
import com.jetbrains.python.codeInsight.typing.PyTypingTypeProvider;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.stubs.PyNamedParameterStub;
import com.jetbrains.python.psi.types.*;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author yole
*/
public class PyNamedParameterImpl extends PyBaseElementImpl<PyNamedParameterStub> implements PyNamedParameter {
public PyNamedParameterImpl(ASTNode astNode) {
super(astNode);
}
public PyNamedParameterImpl(final PyNamedParameterStub stub) {
this(stub, PyElementTypes.NAMED_PARAMETER);
}
public PyNamedParameterImpl(final PyNamedParameterStub stub, IStubElementType nodeType) {
super(stub, nodeType);
}
@Nullable
@Override
public String getName() {
final PyNamedParameterStub stub = getStub();
if (stub != null) {
return stub.getName();
}
else {
ASTNode node = getNameIdentifierNode();
return node != null ? node.getText() : null;
}
}
@Override
public int getTextOffset() {
ASTNode node = getNameIdentifierNode();
return node == null ? super.getTextOffset() : node.getTextRange().getStartOffset();
}
@Nullable
protected ASTNode getNameIdentifierNode() {
return getNode().findChildByType(PyTokenTypes.IDENTIFIER);
}
@Override
@Nullable
public PsiElement getNameIdentifier() {
final ASTNode node = getNameIdentifierNode();
return node == null ? null : node.getPsi();
}
@Override
@NotNull
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
final ASTNode oldNameIdentifier = getNameIdentifierNode();
if (oldNameIdentifier != null) {
final ASTNode nameElement = PyUtil.createNewName(this, name);
getNode().replaceChild(oldNameIdentifier, nameElement);
}
return this;
}
@Override
protected void acceptPyVisitor(PyElementVisitor pyVisitor) {
pyVisitor.visitPyNamedParameter(this);
}
@Override
public boolean isPositionalContainer() {
final PyNamedParameterStub stub = getStub();
if (stub != null) {
return stub.isPositionalContainer();
}
else {
return getNode().findChildByType(PyTokenTypes.MULT) != null;
}
}
@Override
public boolean isKeywordContainer() {
final PyNamedParameterStub stub = getStub();
if (stub != null) {
return stub.isKeywordContainer();
}
else {
return getNode().findChildByType(PyTokenTypes.EXP) != null;
}
}
@Override
public boolean isKeywordOnly() {
final PyParameterList parameters = getStubOrPsiParentOfType(PyParameterList.class);
if (parameters == null) {
return false;
}
boolean varargSeen = false;
for (PyParameter param : parameters.getParameters()) {
if (param == this) {
break;
}
final PyNamedParameter named = param.getAsNamed();
if ((named != null && named.isPositionalContainer()) || param instanceof PySingleStarParameter) {
varargSeen = true;
break;
}
}
return varargSeen;
}
@Override
@Nullable
public PyExpression getDefaultValue() {
final PyNamedParameterStub stub = getStub();
if (stub != null && !stub.hasDefaultValue()) {
return null;
}
ASTNode[] nodes = getNode().getChildren(PythonDialectsTokenSetProvider.INSTANCE.getExpressionTokens());
if (nodes.length > 0) {
return (PyExpression)nodes[0].getPsi();
}
return null;
}
@Override
public boolean hasDefaultValue() {
final PyNamedParameterStub stub = getStub();
if (stub != null) {
return stub.hasDefaultValue();
}
return getDefaultValue() != null;
}
@NotNull
@Override
public String getRepr(boolean includeDefaultValue, @Nullable TypeEvalContext context) {
return PyCallableParameterImpl.psi(this).getPresentableText(includeDefaultValue, context);
}
@Override
@Nullable
public PyType getArgumentType(@NotNull TypeEvalContext context) {
return PyCallableParameterImpl.psi(this).getArgumentType(context);
}
@Override
public PyAnnotation getAnnotation() {
return getStubOrPsiChild(PyElementTypes.ANNOTATION);
}
@Override
@NotNull
public Icon getIcon(final int flags) {
return PlatformIcons.PARAMETER_ICON;
}
@Override
@NotNull
public PyNamedParameter getAsNamed() {
return this;
}
@Override
@Nullable
public PyTupleParameter getAsTuple() {
return null; // we're not a tuple
}
@Override
@Nullable
public PyType getType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
final PsiElement parent = getParentByStub();
if (parent instanceof PyParameterList) {
PyParameterList parameterList = (PyParameterList)parent;
PyFunction func = parameterList.getContainingFunction();
if (func != null) {
for (PyTypeProvider provider : Extensions.getExtensions(PyTypeProvider.EP_NAME)) {
final Ref<PyType> resultRef = provider.getParameterType(this, func, context);
if (resultRef != null) {
return resultRef.get();
}
}
if (isSelf()) {
// must be 'self' or 'cls'
final PyClass containingClass = func.getContainingClass();
if (containingClass != null) {
final PyFunction.Modifier modifier = func.getModifier();
return new PyClassTypeImpl(containingClass, modifier == PyFunction.Modifier.CLASSMETHOD);
}
}
if (isKeywordContainer()) {
return PyTypeUtil.toKeywordContainerType(this, null);
}
if (isPositionalContainer()) {
return PyTypeUtil.toPositionalContainerType(this, null);
}
if (context.maySwitchToAST(this)) {
final PyExpression defaultValue = getDefaultValue();
if (defaultValue != null) {
final PyType type = context.getType(defaultValue);
if (type != null && !(type instanceof PyNoneType)) {
if (type instanceof PyTupleType) {
return PyUnionType.createWeakType(type);
}
return type;
}
}
}
// Guess the type from file-local calls
if (context.allowCallContext(this)) {
final List<PyType> types = new ArrayList<>();
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
final PyCallableParameter parameter = PyCallableParameterImpl.psi(this);
processLocalCalls(
func, call -> {
StreamEx
.of(call.multiMapArguments(resolveContext))
.flatCollection(mapping -> mapping.getMappedParameters().entrySet())
.filter(entry -> parameter.equals(entry.getValue()))
.map(Map.Entry::getKey)
.nonNull()
.map(context::getType)
.nonNull()
.forEach(types::add);
return true;
}
);
if (!types.isEmpty()) {
return PyUnionType.createWeakType(PyUnionType.union(types));
}
}
if (context.maySwitchToAST(this)) {
final Set<String> attributes = collectUsedAttributes(context);
if (!attributes.isEmpty()) {
return new PyStructuralType(attributes, true);
}
}
}
}
return null;
}
@Override
public ItemPresentation getPresentation() {
return new PyElementPresentation(this);
}
@NotNull
private Set<String> collectUsedAttributes(@NotNull final TypeEvalContext context) {
final Set<String> result = new LinkedHashSet<>();
final ScopeOwner owner = ScopeUtil.getScopeOwner(this);
final String name = getName();
final Ref<Boolean> parameterWasReassigned = Ref.create(false);
if (owner != null && name != null) {
owner.accept(new PyRecursiveElementVisitor() {
@Override
public void visitPyElement(PyElement node) {
if (parameterWasReassigned.get()) return;
if (node instanceof ScopeOwner && node != owner) {
return;
}
if (node instanceof PyQualifiedExpression) {
final PyQualifiedExpression expr = (PyQualifiedExpression)node;
final PyExpression qualifier = expr.getQualifier();
if (qualifier != null) {
final String attributeName = expr.getReferencedName();
final PyExpression referencedExpr = node instanceof PyBinaryExpression && PyNames.isRightOperatorName(attributeName) ?
((PyBinaryExpression)node).getRightExpression() : qualifier;
if (referencedExpr != null) {
final PsiReference ref = referencedExpr.getReference();
if (ref != null && ref.isReferenceTo(PyNamedParameterImpl.this)) {
if (attributeName != null && !result.contains(attributeName)) {
result.add(attributeName);
}
}
}
}
else {
final PsiReference ref = expr.getReference();
if (ref != null && ref.isReferenceTo(PyNamedParameterImpl.this)) {
StreamEx.of(getParametersByCallArgument(expr, context))
.nonNull()
.map(parameter -> parameter.getType(context))
.select(PyStructuralType.class)
.forEach(type -> result.addAll(type.getAttributeNames()));
}
}
}
super.visitPyElement(node);
}
@Override
public void visitPyIfStatement(PyIfStatement node) {
if (parameterWasReassigned.get()) return;
final PyExpression ifCondition = node.getIfPart().getCondition();
if (ifCondition != null) {
ifCondition.accept(this);
}
for (PyIfPart part : node.getElifParts()) {
final PyExpression elseIfCondition = part.getCondition();
if (elseIfCondition != null) {
elseIfCondition.accept(this);
}
}
}
@Override
public void visitPyCallExpression(PyCallExpression node) {
if (parameterWasReassigned.get()) return;
Optional
.ofNullable(node.getCallee())
.filter(callee -> "len".equals(callee.getName()))
.map(PyExpression::getReference)
.map(PsiReference::resolve)
.filter(element -> PyBuiltinCache.getInstance(element).isBuiltin(element))
.ifPresent(
callable -> {
final PyReferenceExpression argument = node.getArgument(0, PyReferenceExpression.class);
if (argument != null && argument.getReference().isReferenceTo(PyNamedParameterImpl.this)) {
result.add(PyNames.LEN);
}
}
);
super.visitPyCallExpression(node);
}
@Override
public void visitPyForStatement(PyForStatement node) {
if (parameterWasReassigned.get()) return;
Optional
.of(node.getForPart())
.map(PyForPart::getSource)
.map(PyExpression::getReference)
.filter(reference -> reference.isReferenceTo(PyNamedParameterImpl.this))
.ifPresent(reference -> result.add(PyNames.ITER));
super.visitPyForStatement(node);
}
@Override
public void visitPyTargetExpression(PyTargetExpression node) {
if (parameterWasReassigned.get()) return;
if (node.getReference().isReferenceTo(PyNamedParameterImpl.this)) {
parameterWasReassigned.set(true);
}
else {
super.visitPyTargetExpression(node);
}
}
});
}
return result;
}
@NotNull
private List<PyCallableParameter> getParametersByCallArgument(@NotNull PsiElement element, @NotNull TypeEvalContext context) {
final PyArgumentList argumentList = PsiTreeUtil.getParentOfType(element, PyArgumentList.class);
if (argumentList != null) {
boolean elementIsArgument = false;
for (PyExpression argument : argumentList.getArgumentExpressions()) {
if (PyPsiUtils.flattenParens(argument) == element) {
elementIsArgument = true;
break;
}
}
final PyCallExpression callExpression = argumentList.getCallExpression();
if (elementIsArgument && callExpression != null) {
final PyExpression callee = callExpression.getCallee();
if (callee instanceof PyReferenceExpression) {
final PyReferenceExpression calleeReferenceExpr = (PyReferenceExpression)callee;
final PyExpression firstQualifier = PyPsiUtils.getFirstQualifier(calleeReferenceExpr);
if (firstQualifier != null) {
final PsiReference ref = firstQualifier.getReference();
if (ref != null && ref.isReferenceTo(this)) {
return Collections.emptyList();
}
}
}
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
return callExpression.multiMapArguments(resolveContext)
.stream()
.flatMap(mapping -> mapping.getMappedParameters().entrySet().stream())
.filter(entry -> entry.getKey() == element)
.map(Map.Entry::getValue)
.collect(Collectors.toList());
}
}
return Collections.emptyList();
}
private static void processLocalCalls(@NotNull PyFunction function, @NotNull Processor<PyCallExpression> processor) {
final PsiFile file = function.getContainingFile();
final String name = function.getName();
if (file != null && name != null) {
// Text search is faster than ReferencesSearch in LocalSearchScope
final String text = file.getText();
for (int pos = text.indexOf(name); pos != -1; pos = text.indexOf(name, pos + 1)) {
final PsiReference ref = file.findReferenceAt(pos);
if (ref != null && ref.isReferenceTo(function)) {
final PyCallExpression expr = PsiTreeUtil.getParentOfType(file.findElementAt(pos), PyCallExpression.class);
if (expr != null && !processor.process(expr)) {
return;
}
}
}
}
}
@Override
public String toString() {
return super.toString() + "('" + getName() + "')";
}
@NotNull
@Override
public SearchScope getUseScope() {
final ScopeOwner owner = ScopeUtil.getScopeOwner(this);
if (owner instanceof PyFunction) {
return new LocalSearchScope(owner);
}
return new LocalSearchScope(getContainingFile());
}
@Override
public boolean isSelf() {
if (isPositionalContainer() || isKeywordContainer()) {
return false;
}
PyFunction function = getStubOrPsiParentOfType(PyFunction.class);
if (function == null) {
return false;
}
final PyClass cls = function.getContainingClass();
final PyParameter[] parameters = function.getParameterList().getParameters();
if (cls != null && parameters.length > 0 && parameters[0] == this) {
if (PyNames.NEW.equals(function.getName())) {
return true;
}
final PyFunction.Modifier modifier = function.getModifier();
if (modifier != PyFunction.Modifier.STATICMETHOD) {
return true;
}
}
return false;
}
@Nullable
@Override
public PsiComment getTypeComment() {
for (PsiElement next = getNextSibling(); next != null; next = next.getNextSibling()) {
if (next.textContains('\n')) break;
if (!(next instanceof PsiWhiteSpace)) {
if (",".equals(next.getText())) continue;
if (next instanceof PsiComment && PyTypingTypeProvider.getTypeCommentValue(next.getText()) != null) {
return (PsiComment)next;
}
break;
}
}
return null;
}
@Nullable
@Override
public String getTypeCommentAnnotation() {
final PyNamedParameterStub stub = getStub();
if (stub != null) {
return stub.getTypeComment();
}
final PsiComment comment = getTypeComment();
if (comment != null) {
return PyTypingTypeProvider.getTypeCommentValue(comment.getText());
}
return null;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ram.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ram-2018-01-04/ListResourceSharePermissions" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListResourceSharePermissionsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Specifies the <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resoure
* Name (ARN)</a> of the resource share for which you want to retrieve the associated permissions.
* </p>
*/
private String resourceShareArn;
/**
* <p>
* Specifies that you want to receive the next page of results. Valid only if you received a <code>NextToken</code>
* response in the previous request. If you did, it indicates that more output is available. Set this parameter to
* the value provided by the previous call's <code>NextToken</code> response to request the next page of results.
* </p>
*/
private String nextToken;
/**
* <p>
* Specifies the total number of results that you want included on each page of the response. If you do not include
* this parameter, it defaults to a value that is specific to the operation. If additional items exist beyond the
* number you specify, the <code>NextToken</code> response element is returned with a value (not null). Include the
* specified value as the <code>NextToken</code> request parameter in the next call to the operation to get the next
* part of the results. Note that the service might return fewer results than the maximum even when there are more
* results available. You should check <code>NextToken</code> after every operation to ensure that you receive all
* of the results.
* </p>
*/
private Integer maxResults;
/**
* <p>
* Specifies the <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resoure
* Name (ARN)</a> of the resource share for which you want to retrieve the associated permissions.
* </p>
*
* @param resourceShareArn
* Specifies the <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon
* Resoure Name (ARN)</a> of the resource share for which you want to retrieve the associated permissions.
*/
public void setResourceShareArn(String resourceShareArn) {
this.resourceShareArn = resourceShareArn;
}
/**
* <p>
* Specifies the <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resoure
* Name (ARN)</a> of the resource share for which you want to retrieve the associated permissions.
* </p>
*
* @return Specifies the <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon
* Resoure Name (ARN)</a> of the resource share for which you want to retrieve the associated permissions.
*/
public String getResourceShareArn() {
return this.resourceShareArn;
}
/**
* <p>
* Specifies the <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resoure
* Name (ARN)</a> of the resource share for which you want to retrieve the associated permissions.
* </p>
*
* @param resourceShareArn
* Specifies the <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon
* Resoure Name (ARN)</a> of the resource share for which you want to retrieve the associated permissions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListResourceSharePermissionsRequest withResourceShareArn(String resourceShareArn) {
setResourceShareArn(resourceShareArn);
return this;
}
/**
* <p>
* Specifies that you want to receive the next page of results. Valid only if you received a <code>NextToken</code>
* response in the previous request. If you did, it indicates that more output is available. Set this parameter to
* the value provided by the previous call's <code>NextToken</code> response to request the next page of results.
* </p>
*
* @param nextToken
* Specifies that you want to receive the next page of results. Valid only if you received a
* <code>NextToken</code> response in the previous request. If you did, it indicates that more output is
* available. Set this parameter to the value provided by the previous call's <code>NextToken</code> response
* to request the next page of results.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* Specifies that you want to receive the next page of results. Valid only if you received a <code>NextToken</code>
* response in the previous request. If you did, it indicates that more output is available. Set this parameter to
* the value provided by the previous call's <code>NextToken</code> response to request the next page of results.
* </p>
*
* @return Specifies that you want to receive the next page of results. Valid only if you received a
* <code>NextToken</code> response in the previous request. If you did, it indicates that more output is
* available. Set this parameter to the value provided by the previous call's <code>NextToken</code>
* response to request the next page of results.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* Specifies that you want to receive the next page of results. Valid only if you received a <code>NextToken</code>
* response in the previous request. If you did, it indicates that more output is available. Set this parameter to
* the value provided by the previous call's <code>NextToken</code> response to request the next page of results.
* </p>
*
* @param nextToken
* Specifies that you want to receive the next page of results. Valid only if you received a
* <code>NextToken</code> response in the previous request. If you did, it indicates that more output is
* available. Set this parameter to the value provided by the previous call's <code>NextToken</code> response
* to request the next page of results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListResourceSharePermissionsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* Specifies the total number of results that you want included on each page of the response. If you do not include
* this parameter, it defaults to a value that is specific to the operation. If additional items exist beyond the
* number you specify, the <code>NextToken</code> response element is returned with a value (not null). Include the
* specified value as the <code>NextToken</code> request parameter in the next call to the operation to get the next
* part of the results. Note that the service might return fewer results than the maximum even when there are more
* results available. You should check <code>NextToken</code> after every operation to ensure that you receive all
* of the results.
* </p>
*
* @param maxResults
* Specifies the total number of results that you want included on each page of the response. If you do not
* include this parameter, it defaults to a value that is specific to the operation. If additional items
* exist beyond the number you specify, the <code>NextToken</code> response element is returned with a value
* (not null). Include the specified value as the <code>NextToken</code> request parameter in the next call
* to the operation to get the next part of the results. Note that the service might return fewer results
* than the maximum even when there are more results available. You should check <code>NextToken</code> after
* every operation to ensure that you receive all of the results.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* Specifies the total number of results that you want included on each page of the response. If you do not include
* this parameter, it defaults to a value that is specific to the operation. If additional items exist beyond the
* number you specify, the <code>NextToken</code> response element is returned with a value (not null). Include the
* specified value as the <code>NextToken</code> request parameter in the next call to the operation to get the next
* part of the results. Note that the service might return fewer results than the maximum even when there are more
* results available. You should check <code>NextToken</code> after every operation to ensure that you receive all
* of the results.
* </p>
*
* @return Specifies the total number of results that you want included on each page of the response. If you do not
* include this parameter, it defaults to a value that is specific to the operation. If additional items
* exist beyond the number you specify, the <code>NextToken</code> response element is returned with a value
* (not null). Include the specified value as the <code>NextToken</code> request parameter in the next call
* to the operation to get the next part of the results. Note that the service might return fewer results
* than the maximum even when there are more results available. You should check <code>NextToken</code>
* after every operation to ensure that you receive all of the results.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* Specifies the total number of results that you want included on each page of the response. If you do not include
* this parameter, it defaults to a value that is specific to the operation. If additional items exist beyond the
* number you specify, the <code>NextToken</code> response element is returned with a value (not null). Include the
* specified value as the <code>NextToken</code> request parameter in the next call to the operation to get the next
* part of the results. Note that the service might return fewer results than the maximum even when there are more
* results available. You should check <code>NextToken</code> after every operation to ensure that you receive all
* of the results.
* </p>
*
* @param maxResults
* Specifies the total number of results that you want included on each page of the response. If you do not
* include this parameter, it defaults to a value that is specific to the operation. If additional items
* exist beyond the number you specify, the <code>NextToken</code> response element is returned with a value
* (not null). Include the specified value as the <code>NextToken</code> request parameter in the next call
* to the operation to get the next part of the results. Note that the service might return fewer results
* than the maximum even when there are more results available. You should check <code>NextToken</code> after
* every operation to ensure that you receive all of the results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListResourceSharePermissionsRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResourceShareArn() != null)
sb.append("ResourceShareArn: ").append(getResourceShareArn()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListResourceSharePermissionsRequest == false)
return false;
ListResourceSharePermissionsRequest other = (ListResourceSharePermissionsRequest) obj;
if (other.getResourceShareArn() == null ^ this.getResourceShareArn() == null)
return false;
if (other.getResourceShareArn() != null && other.getResourceShareArn().equals(this.getResourceShareArn()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getResourceShareArn() == null) ? 0 : getResourceShareArn().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
return hashCode;
}
@Override
public ListResourceSharePermissionsRequest clone() {
return (ListResourceSharePermissionsRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.work.batch;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.DrillBuf;
import java.io.EOFException;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.proto.BitData;
import org.apache.drill.exec.proto.ExecProtos;
import org.apache.drill.exec.proto.helper.QueryIdHelper;
import org.apache.drill.exec.record.RawFragmentBatch;
import org.apache.drill.exec.store.LocalSyncableFileSystem;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
import org.apache.drill.shaded.guava.com.google.common.collect.Queues;
/**
* This implementation of RawBatchBuffer starts writing incoming batches to disk once the buffer size reaches a threshold.
* The order of the incoming buffers is maintained.
*/
public class SpoolingRawBatchBuffer extends BaseRawBatchBuffer<SpoolingRawBatchBuffer.RawFragmentBatchWrapper> {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SpoolingRawBatchBuffer.class);
private static String DRILL_LOCAL_IMPL_STRING = "fs.drill-local.impl";
private static final float STOP_SPOOLING_FRACTION = (float) 0.5;
public static final long ALLOCATOR_INITIAL_RESERVATION = 1*1024*1024;
public static final long ALLOCATOR_MAX_RESERVATION = 20L*1000*1000*1000;
private enum SpoolingState {
NOT_SPOOLING,
SPOOLING,
PAUSE_SPOOLING,
STOP_SPOOLING
}
private final BufferAllocator allocator;
private final long threshold;
private final int oppositeId;
private final int bufferIndex;
private volatile SpoolingState spoolingState;
private volatile long currentSizeInMemory = 0;
private volatile Spooler spooler;
private FileSystem fs;
private Path path;
private FSDataOutputStream outputStream;
public SpoolingRawBatchBuffer(FragmentContext context, int fragmentCount, int oppositeId, int bufferIndex) {
super(context, fragmentCount);
this.allocator = context.getNewChildAllocator(
"SpoolingRawBatchBufer", 100, ALLOCATOR_INITIAL_RESERVATION, ALLOCATOR_MAX_RESERVATION);
this.threshold = context.getConfig().getLong(ExecConstants.SPOOLING_BUFFER_MEMORY);
this.oppositeId = oppositeId;
this.bufferIndex = bufferIndex;
this.bufferQueue = new SpoolingBufferQueue();
}
private class SpoolingBufferQueue implements BufferQueue<RawFragmentBatchWrapper> {
private final LinkedBlockingDeque<RawFragmentBatchWrapper> buffer = Queues.newLinkedBlockingDeque();
@Override
public void addOomBatch(RawFragmentBatch batch) {
RawFragmentBatchWrapper batchWrapper = new RawFragmentBatchWrapper(batch, true);
batchWrapper.setOutOfMemory(true);
buffer.addFirst(batchWrapper);
}
@Override
public RawFragmentBatch poll() throws IOException, InterruptedException {
RawFragmentBatchWrapper batchWrapper = buffer.poll();
if (batchWrapper != null) {
return batchWrapper.get();
}
return null;
}
@Override
public RawFragmentBatch take() throws IOException, InterruptedException {
return buffer.take().get();
}
@Override
public RawFragmentBatch poll(long timeout, TimeUnit timeUnit) throws InterruptedException, IOException {
RawFragmentBatchWrapper batchWrapper = buffer.poll(timeout, timeUnit);
if (batchWrapper != null) {
return batchWrapper.get();
}
return null;
}
@Override
public boolean checkForOutOfMemory() {
return buffer.peek().isOutOfMemory();
}
@Override
public int size() {
return buffer.size();
}
@Override
public boolean isEmpty() {
return buffer.size() == 0;
}
public void add(RawFragmentBatchWrapper batchWrapper) {
buffer.add(batchWrapper);
}
}
private synchronized void setSpoolingState(SpoolingState newState) {
SpoolingState currentState = spoolingState;
if (newState == SpoolingState.NOT_SPOOLING ||
currentState == SpoolingState.STOP_SPOOLING) {
return;
}
spoolingState = newState;
}
private boolean isCurrentlySpooling() {
return spoolingState == SpoolingState.SPOOLING;
}
private void startSpooling() {
setSpoolingState(SpoolingState.SPOOLING);
}
private void pauseSpooling() {
setSpoolingState(SpoolingState.PAUSE_SPOOLING);
}
private boolean isSpoolingStopped() {
return spoolingState == SpoolingState.STOP_SPOOLING;
}
private void stopSpooling() {
setSpoolingState(SpoolingState.STOP_SPOOLING);
}
public String getDir() {
List<String> dirs = context.getConfig().getStringList(ExecConstants.TEMP_DIRECTORIES);
return dirs.get(ThreadLocalRandom.current().nextInt(dirs.size()));
}
private synchronized void initSpooler() throws IOException {
if (spooler != null) {
return;
}
Configuration conf = new Configuration();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, context.getConfig().getString(ExecConstants.TEMP_FILESYSTEM));
conf.set(DRILL_LOCAL_IMPL_STRING, LocalSyncableFileSystem.class.getName());
fs = FileSystem.get(conf);
path = getPath();
outputStream = fs.create(path);
final String spoolingThreadName = QueryIdHelper.getExecutorThreadName(context.getHandle()).concat(
":Spooler-" + oppositeId + "-" + bufferIndex);
spooler = new Spooler(spoolingThreadName);
spooler.start();
}
@Override
protected void enqueueInner(RawFragmentBatch batch) throws IOException {
assert batch.getHeader().getSendingMajorFragmentId() == oppositeId;
logger.debug("Enqueue batch. Current buffer size: {}. Last batch: {}. Sending fragment: {}", bufferQueue.size(), batch.getHeader().getIsLastBatch(), batch.getHeader().getSendingMajorFragmentId());
RawFragmentBatchWrapper wrapper;
boolean spoolCurrentBatch = isCurrentlySpooling();
wrapper = new RawFragmentBatchWrapper(batch, !spoolCurrentBatch);
currentSizeInMemory += wrapper.getBodySize();
if (spoolCurrentBatch) {
if (spooler == null) {
initSpooler();
}
spooler.addBatchForSpooling(wrapper);
}
bufferQueue.add(wrapper);
if (!spoolCurrentBatch && currentSizeInMemory > threshold) {
logger.debug("Buffer size {} greater than threshold {}. Start spooling to disk", currentSizeInMemory, threshold);
startSpooling();
}
}
@Override
public void kill(FragmentContext context) {
allocator.close();
if (spooler != null) {
spooler.terminate();
}
}
@Override
protected void upkeep(RawFragmentBatch batch) {
if (context.getAllocator().isOverLimit()) {
outOfMemory.set(true);
}
DrillBuf body = batch.getBody();
if (body != null) {
currentSizeInMemory -= body.capacity();
}
if (isCurrentlySpooling() && currentSizeInMemory < threshold * STOP_SPOOLING_FRACTION) {
logger.debug("buffer size {} less than {}x threshold. Stop spooling.", currentSizeInMemory, STOP_SPOOLING_FRACTION);
pauseSpooling();
}
logger.debug("Got batch. Current buffer size: {}", bufferQueue.size());
}
@Override
public void close() {
if (spooler != null) {
spooler.terminate();
while (spooler.isAlive()) {
try {
spooler.join();
} catch (InterruptedException e) {
logger.warn("Interrupted while waiting for spooling thread to exit");
continue;
}
}
}
allocator.close();
try {
if (outputStream != null) {
outputStream.close();
}
} catch (IOException e) {
logger.warn("Failed to cleanup I/O streams", e);
}
if (context.getConfig().getBoolean(ExecConstants.SPOOLING_BUFFER_DELETE)) {
try {
if (fs != null) {
fs.delete(path, false);
logger.debug("Deleted file {}", path.toString());
}
} catch (IOException e) {
logger.warn("Failed to delete temporary files", e);
}
}
super.close();
}
private class Spooler extends Thread {
private final LinkedBlockingDeque<RawFragmentBatchWrapper> spoolingQueue;
private volatile boolean shouldContinue = true;
private Thread spoolingThread;
public Spooler(String name) {
setDaemon(true);
setName(name);
spoolingQueue = Queues.newLinkedBlockingDeque();
}
public void run() {
try {
while (shouldContinue) {
RawFragmentBatchWrapper batch;
try {
batch = spoolingQueue.take();
} catch (InterruptedException e) {
if (shouldContinue) {
continue;
} else {
break;
}
}
try {
batch.writeToStream(outputStream);
} catch (IOException e) {
context.getExecutorState().fail(e);
}
}
} catch (Throwable e) {
context.getExecutorState().fail(e);
} finally {
logger.info("Spooler thread exiting");
}
}
public void addBatchForSpooling(RawFragmentBatchWrapper batchWrapper) {
if (isSpoolingStopped()) {
spoolingQueue.add(batchWrapper);
} else {
// will not spill this batch
batchWrapper.available = true;
batchWrapper.batch.sendOk();
batchWrapper.latch.countDown();
}
}
public void terminate() {
stopSpooling();
shouldContinue = false;
if (spoolingThread.isAlive()) {
spoolingThread.interrupt();
}
}
}
class RawFragmentBatchWrapper {
private RawFragmentBatch batch;
private volatile boolean available;
private CountDownLatch latch;
private volatile int bodyLength;
private volatile boolean outOfMemory = false;
private long start = -1;
private long check;
public RawFragmentBatchWrapper(RawFragmentBatch batch, boolean available) {
Preconditions.checkNotNull(batch);
this.batch = batch;
this.available = available;
this.latch = new CountDownLatch(available ? 0 : 1);
if (available) {
batch.sendOk();
}
}
public boolean isNull() {
return batch == null;
}
public RawFragmentBatch get() throws InterruptedException, IOException {
if (available) {
assert batch.getHeader() != null : "batch header null";
return batch;
} else {
latch.await();
readFromStream();
available = true;
return batch;
}
}
public long getBodySize() {
if (batch.getBody() == null) {
return 0;
}
assert batch.getBody().readableBytes() >= 0;
return batch.getBody().readableBytes();
}
public void writeToStream(FSDataOutputStream stream) throws IOException {
Stopwatch watch = Stopwatch.createStarted();
available = false;
check = ThreadLocalRandom.current().nextLong();
start = stream.getPos();
logger.debug("Writing check value {} at position {}", check, start);
stream.writeLong(check);
batch.getHeader().writeDelimitedTo(stream);
ByteBuf buf = batch.getBody();
if (buf != null) {
bodyLength = buf.capacity();
} else {
bodyLength = 0;
}
if (bodyLength > 0) {
buf.getBytes(0, stream, bodyLength);
}
stream.hsync();
FileStatus status = fs.getFileStatus(path);
long len = status.getLen();
logger.debug("After spooling batch, stream at position {}. File length {}", stream.getPos(), len);
batch.sendOk();
latch.countDown();
long t = watch.elapsed(TimeUnit.MICROSECONDS);
logger.debug("Took {} us to spool {} to disk. Rate {} mb/s", t, bodyLength, bodyLength / t);
if (buf != null) {
buf.release();
}
}
public void readFromStream() throws IOException, InterruptedException {
long pos = start;
boolean tryAgain = true;
int duration = 0;
while (tryAgain) {
// Sometimes, the file isn't quite done writing when we attempt to read it. As such, we need to wait and retry.
Thread.sleep(duration);
try(final FSDataInputStream stream = fs.open(path);
final DrillBuf buf = allocator.buffer(bodyLength)) {
stream.seek(start);
final long currentPos = stream.getPos();
final long check = stream.readLong();
pos = stream.getPos();
assert check == this.check : String.format("Check values don't match: %d %d, Position %d", this.check, check, currentPos);
Stopwatch watch = Stopwatch.createStarted();
BitData.FragmentRecordBatch header = BitData.FragmentRecordBatch.parseDelimitedFrom(stream);
pos = stream.getPos();
assert header != null : "header null after parsing from stream";
buf.writeBytes(stream, bodyLength);
pos = stream.getPos();
batch = new RawFragmentBatch(header, buf, null);
available = true;
latch.countDown();
long t = watch.elapsed(TimeUnit.MICROSECONDS);
logger.debug("Took {} us to read {} from disk. Rate {} mb/s", t, bodyLength, bodyLength / t);
tryAgain = false;
} catch (EOFException e) {
FileStatus status = fs.getFileStatus(path);
logger.warn("EOF reading from file {} at pos {}. Current file size: {}", path, pos, status.getLen());
duration = Math.max(1, duration * 2);
if (duration < 60000) {
continue;
} else {
throw e;
}
} finally {
if (tryAgain) {
// we had a premature exit, release batch memory so we don't leak it.
if (batch != null) {
batch.getBody().release();
}
}
}
}
}
private boolean isOutOfMemory() {
return outOfMemory;
}
private void setOutOfMemory(boolean outOfMemory) {
this.outOfMemory = outOfMemory;
}
}
private Path getPath() {
ExecProtos.FragmentHandle handle = context.getHandle();
String qid = QueryIdHelper.getQueryId(handle.getQueryId());
int majorFragmentId = handle.getMajorFragmentId();
int minorFragmentId = handle.getMinorFragmentId();
String fileName = Joiner.on(Path.SEPARATOR).join(getDir(), qid, majorFragmentId, minorFragmentId, oppositeId, bufferIndex);
return new Path(fileName);
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.nd4j.linalg.io;
import org.apache.commons.compress.compressors.FileNameUtil;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.nd4j.base.Preconditions;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.attribute.FileAttribute;
import java.util.Enumeration;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* A slightly upgraded version of spring's
* classpath resource
*
*
*/
public class ClassPathResource extends AbstractFileResolvingResource {
private final String path;
private ClassLoader classLoader;
private Class<?> clazz;
public ClassPathResource(String path) {
this(path, (ClassLoader) null);
}
public ClassPathResource(String path, ClassLoader classLoader) {
Assert.notNull(path, "Path must not be null");
String pathToUse = StringUtils.cleanPath(path);
if (pathToUse.startsWith("/")) {
pathToUse = pathToUse.substring(1);
}
this.path = pathToUse;
this.classLoader = classLoader != null ? classLoader : ClassUtils.getDefaultClassLoader();
}
public ClassPathResource(String path, Class<?> clazz) {
Assert.notNull(path, "Path must not be null");
this.path = StringUtils.cleanPath(path);
this.clazz = clazz;
}
protected ClassPathResource(String path, ClassLoader classLoader, Class<?> clazz) {
this.path = StringUtils.cleanPath(path);
this.classLoader = classLoader;
this.clazz = clazz;
}
public final String getPath() {
return this.path;
}
public final ClassLoader getClassLoader() {
return this.classLoader != null ? this.classLoader : this.clazz.getClassLoader();
}
/**
* Get the File.
* If the file cannot be accessed directly (for example, it is in a JAR file), we will attempt to extract it from
* the JAR and copy it to the temporary directory, using {@link #getTempFileFromArchive()}
*
* @return The File, or a temporary copy if it can not be accessed directly
* @throws IOException
*/
@Override
public File getFile() throws IOException {
try{
return super.getFile();
} catch (FileNotFoundException e){
//java.io.FileNotFoundException: class path resource [iris.txt] cannot be resolved to absolute file path because
// it does not reside in the file system: jar:file:/.../dl4j-test-resources-0.9.2-SNAPSHOT.jar!/iris.txt
return getTempFileFromArchive();
}
}
/**
* Get a temp file from the classpath.<br>
* This is for resources where a file is needed and the classpath resource is in a jar file. The file is copied
* to the default temporary directory, using {@link Files#createTempFile(String, String, FileAttribute[])}.
* Consequently, the extracted file will have a different filename to the extracted one.
*
* @return the temp file
* @throws IOException If an error occurs when files are being copied
* @see #getTempFileFromArchive(File)
*/
public File getTempFileFromArchive() throws IOException {
return getTempFileFromArchive(null);
}
/**
* Get a temp file from the classpath, and (optionally) place it in the specified directory<br>
* Note that:<br>
* - If the directory is not specified, the file is copied to the default temporary directory, using
* {@link Files#createTempFile(String, String, FileAttribute[])}. Consequently, the extracted file will have a
* different filename to the extracted one.<br>
* - If the directory *is* specified, the file is copied directly - and the original filename is maintained
*
* @param rootDirectory May be null. If non-null, copy to the specified directory
* @return the temp file
* @throws IOException If an error occurs when files are being copied
* @see #getTempFileFromArchive(File)
*/
public File getTempFileFromArchive(File rootDirectory) throws IOException {
InputStream is = getInputStream();
File tmpFile;
if(rootDirectory != null){
//Maintain original file names, as it's going in a directory...
tmpFile = new File(rootDirectory, FilenameUtils.getName(path));
} else {
tmpFile = Files.createTempFile(FilenameUtils.getName(path), "tmp").toFile();
}
tmpFile.deleteOnExit();
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(tmpFile));
IOUtils.copy(is, bos);
bos.flush();
bos.close();
return tmpFile;
}
/**
* Extract the directory recursively to the specified location. Current ClassPathResource must point to
* a directory.<br>
* For example, if classpathresource points to "some/dir/", then the contents - not including the parent directory "dir" -
* will be extracted or copied to the specified destination.<br>
* @param destination Destination directory. Must exist
*/
public void copyDirectory(File destination) throws IOException {
Preconditions.checkState(destination.exists() && destination.isDirectory(), "Destination directory must exist and be a directory: %s", destination);
URL url = this.getUrl();
if (isJarURL(url)) {
/*
This is actually request for file, that's packed into jar. Probably the current one, but that doesn't matters.
*/
InputStream stream = null;
ZipFile zipFile = null;
try {
GetStreamFromZip getStreamFromZip = new GetStreamFromZip(url, path).invoke();
ZipEntry entry = getStreamFromZip.getEntry();
stream = getStreamFromZip.getStream();
zipFile = getStreamFromZip.getZipFile();
Preconditions.checkState(entry.isDirectory(), "Source must be a directory: %s", entry.getName());
String pathNoSlash = this.path;
if(pathNoSlash.endsWith("/") || pathNoSlash.endsWith("\\")){
pathNoSlash = pathNoSlash.substring(0, pathNoSlash.length()-1);
}
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while(entries.hasMoreElements()){
ZipEntry e = entries.nextElement();
String name = e.getName();
if(name.startsWith(pathNoSlash) && name.length() > pathNoSlash.length() && (name.charAt(pathNoSlash.length()) == '/' || name.charAt(pathNoSlash.length()) == '\\')){ //second condition: to avoid "/dir/a/" and "/dir/abc/" both matching startsWith
String relativePath = name.substring(this.path.length());
File extractTo = new File(destination, relativePath);
if(e.isDirectory()){
extractTo.mkdirs();
} else {
try(BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(extractTo))){
InputStream is = getInputStream(name, clazz, classLoader);
IOUtils.copy(is, bos);
}
}
}
}
stream.close();
zipFile.close();
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
if(stream != null)
IOUtils.closeQuietly(stream);
if(zipFile != null)
IOUtils.closeQuietly(zipFile);
}
} else {
File source;
try{
source = new File(url.toURI());
} catch (URISyntaxException e) {
throw new IOException("Error converting URL to a URI - path may be invalid? Path=" + url);
}
Preconditions.checkState(source.isDirectory(), "Source must be a directory: %s", source);
Preconditions.checkState(destination.exists() && destination.isDirectory(), "Destination must be a directory and must exist: %s", destination);
FileUtils.copyDirectory(source, destination);
}
}
public boolean exists() {
URL url;
if (this.clazz != null) {
url = this.clazz.getResource(this.path);
} else {
url = this.classLoader.getResource(this.path);
}
return url != null;
}
public InputStream getInputStream() throws IOException {
return getInputStream(path, clazz, classLoader);
}
private static InputStream getInputStream(String path, Class<?> clazz, ClassLoader classLoader) throws IOException {
InputStream is;
if (clazz != null) {
is = clazz.getResourceAsStream(path);
} else {
is = classLoader.getResourceAsStream(path);
}
if (is == null) {
throw new FileNotFoundException(path + " cannot be opened because it does not exist");
} else {
if(is instanceof BufferedInputStream)
return is;
return new BufferedInputStream(is);
}
}
public URL getURL() throws IOException {
URL url;
if (this.clazz != null) {
url = this.clazz.getResource(this.path);
} else {
url = this.classLoader.getResource(this.path);
}
if (url == null) {
throw new FileNotFoundException(
this.getDescription() + " cannot be resolved to URL because it does not exist");
} else {
return url;
}
}
public Resource createRelative(String relativePath) {
String pathToUse = StringUtils.applyRelativePath(this.path, relativePath);
return new ClassPathResource(pathToUse, this.classLoader, this.clazz);
}
public String getFilename() {
return StringUtils.getFilename(this.path);
}
public String getDescription() {
StringBuilder builder = new StringBuilder("class path resource [");
String pathToUse = this.path;
if (this.clazz != null && !pathToUse.startsWith("/")) {
builder.append(ClassUtils.classPackageAsResourcePath(this.clazz));
builder.append('/');
}
if (pathToUse.startsWith("/")) {
pathToUse = pathToUse.substring(1);
}
builder.append(pathToUse);
builder.append(']');
return builder.toString();
}
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (!(obj instanceof ClassPathResource)) {
return false;
} else {
ClassPathResource otherRes = (ClassPathResource) obj;
return this.path.equals(otherRes.path) && ObjectUtils.nullSafeEquals(this.classLoader, otherRes.classLoader)
&& ObjectUtils.nullSafeEquals(this.clazz, otherRes.clazz);
}
}
public int hashCode() {
return this.path.hashCode();
}
/**
* Returns URL of the requested resource
*
* @return URL of the resource, if it's available in current Jar
*/
private URL getUrl() {
ClassLoader loader = null;
try {
loader = Thread.currentThread().getContextClassLoader();
} catch (Exception e) {
// do nothing
}
if (loader == null) {
loader = ClassPathResource.class.getClassLoader();
}
URL url = loader.getResource(this.path);
if (url == null) {
// try to check for mis-used starting slash
// TODO: see TODO below
if (this.path.startsWith("/")) {
url = loader.getResource(this.path.replaceFirst("[\\\\/]", ""));
if (url != null)
return url;
} else {
// try to add slash, to make clear it's not an issue
// TODO: change this mechanic to actual path purifier
url = loader.getResource("/" + this.path);
if (url != null)
return url;
}
throw new IllegalStateException("Resource '" + this.path + "' cannot be found.");
}
return url;
}
/**
* Checks, if proposed URL is packed into archive.
*
* @param url URL to be checked
* @return True, if URL is archive entry, False otherwise
*/
private static boolean isJarURL(URL url) {
String protocol = url.getProtocol();
return "jar".equals(protocol) || "zip".equals(protocol) || "wsjar".equals(protocol)
|| "code-source".equals(protocol) && url.getPath().contains("!/");
}
private class GetStreamFromZip {
private URL url;
private ZipFile zipFile;
private ZipEntry entry;
private InputStream stream;
private String resourceName;
public GetStreamFromZip(URL url, String resourceName) {
this.url = url;
this.resourceName = resourceName;
}
public URL getUrl() {
return url;
}
public ZipFile getZipFile() {
return zipFile;
}
public ZipEntry getEntry() {
return entry;
}
public InputStream getStream() {
return stream;
}
public GetStreamFromZip invoke() throws IOException {
url = extractActualUrl(url);
zipFile = new ZipFile(url.getFile());
entry = zipFile.getEntry(this.resourceName);
if (entry == null) {
if (this.resourceName.startsWith("/")) {
entry = zipFile.getEntry(this.resourceName.replaceFirst("/", ""));
if (entry == null) {
throw new FileNotFoundException("Resource " + this.resourceName + " not found");
}
} else
throw new FileNotFoundException("Resource " + this.resourceName + " not found");
}
stream = zipFile.getInputStream(entry);
return this;
}
}
/**
* Extracts parent Jar URL from original ClassPath entry URL.
*
* @param jarUrl Original URL of the resource
* @return URL of the Jar file, containing requested resource
* @throws MalformedURLException
*/
private URL extractActualUrl(URL jarUrl) throws MalformedURLException {
String urlFile = jarUrl.getFile();
int separatorIndex = urlFile.indexOf("!/");
if (separatorIndex != -1) {
String jarFile = urlFile.substring(0, separatorIndex);
try {
return new URL(jarFile);
} catch (MalformedURLException var5) {
if (!jarFile.startsWith("/")) {
jarFile = "/" + jarFile;
}
return new URL("file:" + jarFile);
}
} else {
return jarUrl;
}
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.feel.codegen.feel11;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import ch.obermuhlner.math.big.BigDecimalMath;
import com.github.javaparser.StaticJavaParser;
import org.antlr.v4.runtime.tree.ParseTree;
import com.github.javaparser.JavaParser;
import com.github.javaparser.ast.Modifier;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.FieldDeclaration;
import com.github.javaparser.ast.body.Parameter;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.BooleanLiteralExpr;
import com.github.javaparser.ast.expr.LambdaExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.NullLiteralExpr;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import com.github.javaparser.ast.stmt.ReturnStmt;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.UnknownType;
import org.kie.dmn.api.feel.runtime.events.FEELEvent;
import org.kie.dmn.api.feel.runtime.events.FEELEvent.Severity;
import org.kie.dmn.api.feel.runtime.events.FEELEventListener;
import org.kie.dmn.feel.lang.CompiledExpression;
import org.kie.dmn.feel.lang.CompilerContext;
import org.kie.dmn.feel.lang.EvaluationContext;
import org.kie.dmn.feel.lang.ast.BaseNode;
import org.kie.dmn.feel.lang.ast.ForExpressionNode;
import org.kie.dmn.feel.lang.ast.ForExpressionNode.ForIteration;
import org.kie.dmn.feel.lang.ast.QuantifiedExpressionNode;
import org.kie.dmn.feel.lang.ast.QuantifiedExpressionNode.QEIteration;
import org.kie.dmn.feel.lang.ast.QuantifiedExpressionNode.Quantifier;
import org.kie.dmn.feel.lang.impl.CompiledExpressionImpl;
import org.kie.dmn.feel.lang.impl.SilentWrappingEvaluationContextImpl;
import org.kie.dmn.feel.lang.types.BuiltInType;
import org.kie.dmn.feel.parser.feel11.ASTBuilderVisitor;
import org.kie.dmn.feel.parser.feel11.FEELParser;
import org.kie.dmn.feel.parser.feel11.FEEL_1_1Parser;
import org.kie.dmn.feel.runtime.FEELFunction;
import org.kie.dmn.feel.runtime.UnaryTest;
import org.kie.dmn.feel.runtime.events.ASTEventBase;
import org.kie.dmn.feel.runtime.events.InvalidParametersEvent;
import org.kie.dmn.feel.runtime.events.SyntaxErrorEvent;
import org.kie.dmn.feel.util.EvalHelper;
import org.kie.dmn.feel.util.Msg;
import static com.github.javaparser.StaticJavaParser.parseClassOrInterfaceType;
public class CompiledFEELSupport {
public static ContextBuilder openContext(EvaluationContext ctx) {
return new ContextBuilder(ctx);
}
public static class ContextBuilder {
private Map<String, Object> resultContext = new HashMap<>();
private EvaluationContext evaluationContext;
public ContextBuilder(EvaluationContext evaluationContext) {
this.evaluationContext = evaluationContext;
evaluationContext.enterFrame();
}
public ContextBuilder setEntry(String key, Object value) {
resultContext.put(key, value);
evaluationContext.setValue(key, value);
return this;
}
public Map<String, Object> closeContext() {
evaluationContext.exitFrame();
return resultContext;
}
}
public static FilterBuilder filter(EvaluationContext ctx, Object value) {
return new FilterBuilder(ctx, value);
}
public static class FilterBuilder {
private EvaluationContext ctx;
private Object value;
public FilterBuilder(EvaluationContext evaluationContext, Object value) {
this.ctx = evaluationContext;
this.value = value;
}
public Object with(Function<EvaluationContext, Object> filterExpression) {
if (value == null) {
return null;
}
List list = value instanceof List ? (List) value : Arrays.asList(value);
Object f = filterExpression.apply(
new SilentWrappingEvaluationContextImpl(ctx)); // I need to try evaluate filter first, ignoring errors; only if evaluation fails, or is not a Number, it delegates to try `evaluateExpressionsInContext`
if (f instanceof Number) {
return withIndex(f);
}
List results = new ArrayList();
for (Object v : list) {
try {
ctx.enterFrame();
// handle it as a predicate
// Have the "item" variable set first, so to respect the DMN spec: The expression in square brackets can reference a list
// element using the name item, unless the list element is a context that contains the key "item".
ctx.setValue("item", v);
// using Root object logic to avoid having to eagerly inspect all attributes.
ctx.setRootObject(v);
Object r = filterExpression.apply(ctx);
if (r instanceof Boolean && r == Boolean.TRUE) {
results.add(v);
}
} catch (Exception e) {
ctx.notifyEvt(() -> new ASTEventBase(Severity.ERROR, Msg.createMessage(Msg.ERROR_EXECUTING_LIST_FILTER, filterExpression), null, e));
return null;
} finally {
ctx.exitFrame();
}
}
return results;
}
private Object withIndex(Object filterIndex) {
if (value == null) {
return null;
}
List list = value instanceof List ? (List) value : Arrays.asList(value);
if (filterIndex instanceof Number) {
int i = ((Number) filterIndex).intValue();
if (i > 0 && i <= list.size()) {
return list.get(i - 1);
} else if (i < 0 && Math.abs(i) <= list.size()) {
return list.get(list.size() + i);
} else {
ctx.notifyEvt(() -> new ASTEventBase(Severity.ERROR, Msg.createMessage(Msg.INDEX_OUT_OF_BOUND), null));
return null;
}
} else if (filterIndex == null) {
return Collections.emptyList();
} else {
ctx.notifyEvt(() -> new ASTEventBase(Severity.ERROR, Msg.createMessage(Msg.ERROR_EXECUTING_LIST_FILTER, filterIndex), null));
return null;
}
}
}
public static PathBuilder path(EvaluationContext ctx, Object value) {
return new PathBuilder(ctx, value);
}
public static class PathBuilder {
private EvaluationContext ctx;
private Object o;
public PathBuilder(EvaluationContext evaluationContext, Object value) {
this.ctx = evaluationContext;
this.o = value;
}
public Object with(final String... names) {
if (o instanceof List) {
List list = (List) o;
// list of contexts/elements as defined in the spec, page 114
List results = new ArrayList();
for (Object element : list) {
Object r = fetchValue(element, names);
if (r != null) {
results.add(r);
}
}
return results;
} else {
return fetchValue(o, names);
}
}
private Object fetchValue(final Object o, final String... names) {
Object result = o;
for (String nr : names) {
result = EvalHelper.getDefinedValue(result, nr)
.getValueResult()
.cata(err -> {
// no need to report error here, eg: [ {x:1, y:2}, {x:2} ].y results in [2] with no errors.
return null;
}, Function.identity());
}
return result;
}
}
public static ForBuilder ffor(EvaluationContext ctx) {
return new ForBuilder(ctx);
}
public static class ForBuilder {
private EvaluationContext ctx;
private List<IterationContextCompiled> iterationContexts = new ArrayList<>();
public ForBuilder(EvaluationContext evaluationContext) {
this.ctx = evaluationContext;
}
public ForBuilder with(Function<EvaluationContext, Object> nameExpression, Function<EvaluationContext, Object> iterationExpression) {
iterationContexts.add(new IterationContextCompiled(nameExpression, iterationExpression));
return this;
}
public ForBuilder with(Function<EvaluationContext, Object> nameExpression,
Function<EvaluationContext, Object> iterationExpression,
Function<EvaluationContext, Object> rangeEndExpression) {
iterationContexts.add(new IterationContextCompiled(nameExpression, iterationExpression, rangeEndExpression));
return this;
}
public Object rreturn(Function<EvaluationContext, Object> expression) {
try {
ctx.enterFrame();
List results = new ArrayList();
ctx.setValue("partial", results);
ForIteration[] ictx = initializeContexts(ctx, iterationContexts);
while (ForExpressionNode.nextIteration(ctx, ictx)) {
Object result = expression.apply(ctx);
results.add(result);
}
return results;
} catch (EndpointOfRangeNotOfNumberException e) {
// ast error already reported
return null;
} finally {
ctx.exitFrame();
}
}
private ForIteration[] initializeContexts(EvaluationContext ctx, List<IterationContextCompiled> iterationContexts) {
ForIteration[] ictx = new ForIteration[iterationContexts.size()];
int i = 0;
for (IterationContextCompiled icn : iterationContexts) {
ictx[i] = createQuantifiedExpressionIterationContext(ctx, icn);
if (i < iterationContexts.size() - 1 && ictx[i].hasNextValue()) {
ForExpressionNode.setValueIntoContext(ctx, ictx[i]);
}
i++;
}
return ictx;
}
private ForIteration createQuantifiedExpressionIterationContext(EvaluationContext ctx, IterationContextCompiled icn) {
ForIteration fi = null;
String name = (String) icn.getName().apply(ctx);
Object result = icn.getExpression().apply(ctx);
Object rangeEnd = icn.getRangeEndExpr().apply(ctx);
if (rangeEnd == null) {
Iterable values = result instanceof Iterable ? (Iterable) result : Collections.singletonList(result);
fi = new ForIteration(name, values);
} else {
valueMustBeANumber(ctx, result);
BigDecimal start = (BigDecimal) result;
valueMustBeANumber(ctx, rangeEnd);
BigDecimal end = (BigDecimal) rangeEnd;
fi = new ForIteration(name, start, end);
}
return fi;
}
private void valueMustBeANumber(EvaluationContext ctx, Object value) {
if (!(value instanceof BigDecimal)) {
ctx.notifyEvt(() -> new ASTEventBase(Severity.ERROR, Msg.createMessage(Msg.VALUE_X_NOT_A_VALID_ENDPOINT_FOR_RANGE_BECAUSE_NOT_A_NUMBER, value), null));
throw new EndpointOfRangeNotOfNumberException();
}
}
private static class EndpointOfRangeNotOfNumberException extends RuntimeException {
private static final long serialVersionUID = 1L;
}
}
public static class IterationContextCompiled {
private final Function<EvaluationContext, Object> name;
private final Function<EvaluationContext, Object> expression;
private final Function<EvaluationContext, Object> rangeEndExpr;
public IterationContextCompiled(Function<EvaluationContext, Object> name, Function<EvaluationContext, Object> expression) {
this.name = name;
this.expression = expression;
this.rangeEndExpr = (ctx) -> null;
}
public IterationContextCompiled(Function<EvaluationContext, Object> name, Function<EvaluationContext, Object> expression, Function<EvaluationContext, Object> rangeEndExpr) {
this.name = name;
this.expression = expression;
this.rangeEndExpr = rangeEndExpr;
}
public Function<EvaluationContext, Object> getName() {
return name;
}
public Function<EvaluationContext, Object> getExpression() {
return expression;
}
public Function<EvaluationContext, Object> getRangeEndExpr() {
return rangeEndExpr;
}
}
public static QuantBuilder quant(Quantifier quantOp, EvaluationContext ctx) {
return new QuantBuilder(quantOp, ctx);
}
public static class QuantBuilder {
private Quantifier quantOp;
private EvaluationContext ctx;
private List<IterationContextCompiled> iterationContexts = new ArrayList<>();
public QuantBuilder(Quantifier quantOp, EvaluationContext evaluationContext) {
this.quantOp = quantOp;
this.ctx = evaluationContext;
}
public QuantBuilder with(Function<EvaluationContext, Object> nameExpression, Function<EvaluationContext, Object> iterationExpression) {
iterationContexts.add(new IterationContextCompiled(nameExpression, iterationExpression));
return this;
}
public Object satisfies(Function<EvaluationContext, Object> expression) {
if (quantOp == Quantifier.SOME || quantOp == Quantifier.EVERY) {
return iterateContexts(ctx, iterationContexts, expression, quantOp);
}
// can never happen, but anyway:
ctx.notifyEvt(() -> new ASTEventBase(Severity.ERROR, Msg.createMessage(Msg.IS_NULL, "Quantifier"), null));
return null;
}
private Boolean iterateContexts(EvaluationContext ctx, List<IterationContextCompiled> iterationContexts, Function<EvaluationContext, Object> expression, Quantifier quantifier) {
try {
ctx.enterFrame();
QEIteration[] ictx = initializeContexts(ctx, iterationContexts);
while (QuantifiedExpressionNode.nextIteration(ctx, ictx)) {
Boolean result = (Boolean) expression.apply(ctx);
if (result != null && result.equals(quantifier.positiveTest())) {
return quantifier.positiveTest();
}
}
return quantifier.defaultValue();
} finally {
ctx.exitFrame();
}
}
private QEIteration[] initializeContexts(EvaluationContext ctx, List<IterationContextCompiled> iterationContexts) {
QEIteration[] ictx = new QEIteration[iterationContexts.size()];
int i = 0;
for (IterationContextCompiled icn : iterationContexts) {
ictx[i] = createQuantifiedExpressionIterationContext(ctx, icn);
if (i < ictx.length - 1) {
// initalize all contexts except the very last one, as it will be initialized in the nextIteration() method
QuantifiedExpressionNode.setValueIntoContext(ctx, ictx[i]);
}
i++;
}
return ictx;
}
private QEIteration createQuantifiedExpressionIterationContext(EvaluationContext ctx, IterationContextCompiled icn) {
String name = (String) icn.getName().apply(ctx);
Object result = icn.getExpression().apply(ctx);
Iterable values = result instanceof Iterable ? (Iterable) result : Collections.singletonList(result);
QEIteration qei = new QEIteration(name, values);
return qei;
}
}
public static Object invoke(EvaluationContext feelExprCtx, Object function, Object params) {
if (function == null) {
feelExprCtx.notifyEvt(() -> new ASTEventBase(Severity.ERROR, Msg.createMessage(Msg.FUNCTION_NOT_FOUND, function), null));
return null;
}
if (function instanceof FEELFunction) {
Object[] invocationParams = toFunctionParams(params);
FEELFunction f = (FEELFunction) function;
if (function instanceof CompiledCustomFEELFunction) {
CompiledCustomFEELFunction ff = (CompiledCustomFEELFunction) function;
if (ff.isProperClosure()) {
return ff.invokeReflectively(ff.getEvaluationContext(), invocationParams);
}
}
return f.invokeReflectively(feelExprCtx, invocationParams);
} else if (function instanceof UnaryTest) {
return ((UnaryTest) function).apply(feelExprCtx, ((List)params).get(0));
}
return null;
}
private static Object[] toFunctionParams(Object params) {
Object[] invocationParams = null;
if (params instanceof List) {
invocationParams = ((List) params).toArray(new Object[]{});
} else if (params instanceof Object[]) {
invocationParams = (Object[]) params;
} else {
invocationParams = new Object[]{params};
}
return invocationParams;
}
public static Object notifyCompilationError(EvaluationContext feelExprCtx, String message) {
feelExprCtx.notifyEvt(() -> new ASTEventBase(Severity.ERROR, message, null));
return null;
}
public static Object coerceNumber(Object value) {
return EvalHelper.coerceNumber(value);
}
/**
* Generates a compilable class that reports a (compile-time) error at runtime
*/
public static CompiledFEELExpression compiledError(String expression, String msg) {
return new CompilerBytecodeLoader()
.makeFromJPExpression(
expression,
compiledErrorExpression(msg),
Collections.emptySet());
}
public static DirectCompilerResult compiledErrorUnaryTest(String msg) {
LambdaExpr initializer = new LambdaExpr();
initializer.setEnclosingParameters(true);
initializer.addParameter(new Parameter(new UnknownType(), "feelExprCtx"));
initializer.addParameter(new Parameter(new UnknownType(), "left"));
Statement lambdaBody = new BlockStmt(new NodeList<>(
new ExpressionStmt(compiledErrorExpression(msg)),
new ReturnStmt(new BooleanLiteralExpr(false))
));
initializer.setBody(lambdaBody);
String constantName = "UT_EMPTY";
VariableDeclarator vd = new VariableDeclarator(parseClassOrInterfaceType(UnaryTest.class.getCanonicalName()), constantName);
vd.setInitializer(initializer);
FieldDeclaration fd = new FieldDeclaration();
fd.setModifier(Modifier.publicModifier().getKeyword(), true);
fd.setModifier(Modifier.staticModifier().getKeyword(), true);
fd.setModifier(Modifier.finalModifier().getKeyword(), true);
fd.addVariable(vd);
fd.setJavadocComment(" FEEL unary test: - ");
MethodCallExpr list = new MethodCallExpr(null, "list", new NodeList<>(new NameExpr(constantName)));
DirectCompilerResult directCompilerResult = DirectCompilerResult.of(list, BuiltInType.LIST);
directCompilerResult.addFieldDesclaration(fd);
return directCompilerResult;
}
public static MethodCallExpr compiledErrorExpression(String msg) {
return new MethodCallExpr(
new NameExpr("CompiledFEELSupport"),
"notifyCompilationError",
new NodeList<>(
new NameExpr("feelExprCtx"),
new StringLiteralExpr(msg)));
}
// thread-unsafe, but this is single-threaded so it's ok
public static class SyntaxErrorListener implements FEELEventListener {
private FEELEvent event = null;
@Override
public void onEvent(FEELEvent evt) {
if (evt instanceof SyntaxErrorEvent
|| evt instanceof InvalidParametersEvent) {
this.event = evt;
}
}
public boolean isError() { return event != null; }
public FEELEvent event() { return event; }
}
public static BigDecimal pow(BigDecimal l, BigDecimal r) {
return BigDecimalMath.pow( l, r, MathContext.DECIMAL128 );
}
}
| |
/*
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.taobao.weex.devtools.inspector.protocol.module;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import com.taobao.weex.devtools.inspector.console.CLog;
import com.taobao.weex.devtools.inspector.domstorage.DOMStoragePeerManager;
import com.taobao.weex.devtools.inspector.domstorage.SharedPreferencesHelper;
import com.taobao.weex.devtools.inspector.jsonrpc.JsonRpcException;
import com.taobao.weex.devtools.inspector.jsonrpc.JsonRpcPeer;
import com.taobao.weex.devtools.inspector.jsonrpc.JsonRpcResult;
import com.taobao.weex.devtools.inspector.protocol.ChromeDevtoolsDomain;
import com.taobao.weex.devtools.inspector.protocol.ChromeDevtoolsMethod;
import com.taobao.weex.devtools.json.ObjectMapper;
import com.taobao.weex.devtools.json.annotation.JsonProperty;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
public class DOMStorage implements ChromeDevtoolsDomain {
private final Context mContext;
private final DOMStoragePeerManager mDOMStoragePeerManager;
private final ObjectMapper mObjectMapper = new ObjectMapper();
public DOMStorage(Context context) {
mContext = context;
mDOMStoragePeerManager = new DOMStoragePeerManager(context);
}
@ChromeDevtoolsMethod
public void enable(JsonRpcPeer peer, JSONObject params) {
mDOMStoragePeerManager.addPeer(peer);
}
@ChromeDevtoolsMethod
public void disable(JsonRpcPeer peer, JSONObject params) {
mDOMStoragePeerManager.removePeer(peer);
}
@ChromeDevtoolsMethod
public JsonRpcResult getDOMStorageItems(JsonRpcPeer peer, JSONObject params)
throws JSONException {
StorageId storage = mObjectMapper.convertValue(
params.getJSONObject("storageId"),
StorageId.class);
ArrayList<List<String>> entries = new ArrayList<List<String>>();
String prefTag = storage.securityOrigin;
if (storage.isLocalStorage) {
SharedPreferences prefs = mContext.getSharedPreferences(prefTag, Context.MODE_PRIVATE);
for (Map.Entry<String, ?> prefsEntry : prefs.getAll().entrySet()) {
ArrayList<String> entry = new ArrayList<String>(2);
entry.add(prefsEntry.getKey());
entry.add(SharedPreferencesHelper.valueToString(prefsEntry.getValue()));
entries.add(entry);
}
}
GetDOMStorageItemsResult result = new GetDOMStorageItemsResult();
result.entries = entries;
return result;
}
@ChromeDevtoolsMethod
public void setDOMStorageItem(JsonRpcPeer peer, JSONObject params)
throws JSONException, JsonRpcException {
StorageId storage = mObjectMapper.convertValue(
params.getJSONObject("storageId"),
StorageId.class);
String key = params.getString("key");
String value = params.getString("value");
if (storage.isLocalStorage) {
SharedPreferences prefs = mContext.getSharedPreferences(
storage.securityOrigin,
Context.MODE_PRIVATE);
Object existingValue = prefs.getAll().get(key);
try {
if (existingValue == null) {
throw new DOMStorageAssignmentException(
"Unsupported: cannot add new key " + key + " due to lack of type inference");
} else {
SharedPreferences.Editor editor = prefs.edit();
try {
assignByType(editor, key, SharedPreferencesHelper.valueFromString(value, existingValue));
editor.apply();
} catch (IllegalArgumentException e) {
throw new DOMStorageAssignmentException(
String.format(Locale.US,
"Type mismatch setting %s to %s (expected %s)",
key,
value,
existingValue.getClass().getSimpleName()));
}
}
} catch (DOMStorageAssignmentException e) {
CLog.writeToConsole(
mDOMStoragePeerManager,
Console.MessageLevel.ERROR,
Console.MessageSource.STORAGE,
e.getMessage());
// Force the DevTools UI to refresh with the old value again (it assumes that the set
// operation succeeded). Note that we should be able to do this by throwing
// JsonRpcException but the UI doesn't respect setDOMStorageItem failure.
if (prefs.contains(key)) {
mDOMStoragePeerManager.signalItemUpdated(
storage,
key,
value,
SharedPreferencesHelper.valueToString(existingValue));
} else {
mDOMStoragePeerManager.signalItemRemoved(storage, key);
}
}
}
}
@ChromeDevtoolsMethod
public void removeDOMStorageItem(JsonRpcPeer peer, JSONObject params) throws JSONException {
StorageId storage = mObjectMapper.convertValue(
params.getJSONObject("storageId"),
StorageId.class);
String key = params.getString("key");
if (storage.isLocalStorage) {
SharedPreferences prefs = mContext.getSharedPreferences(
storage.securityOrigin,
Context.MODE_PRIVATE);
prefs.edit().remove(key).apply();
}
}
private static void assignByType(
SharedPreferences.Editor editor,
String key,
Object value)
throws IllegalArgumentException {
if (value instanceof Integer) {
editor.putInt(key, (Integer)value);
} else if (value instanceof Long) {
editor.putLong(key, (Long)value);
} else if (value instanceof Float) {
editor.putFloat(key, (Float)value);
} else if (value instanceof Boolean) {
editor.putBoolean(key, (Boolean)value);
} else if (value instanceof String) {
editor.putString(key, (String)value);
} else if (value instanceof Set) {
putStringSet(editor, key, (Set<String>)value);
} else {
throw new IllegalArgumentException("Unsupported type=" + value.getClass().getName());
}
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private static void putStringSet(SharedPreferences.Editor editor, String key, Set<String> value) {
editor.putStringSet(key, value);
}
public static class StorageId {
@JsonProperty(required = true)
public String securityOrigin;
@JsonProperty(required = true)
public boolean isLocalStorage;
}
private static class GetDOMStorageItemsResult implements JsonRpcResult {
@JsonProperty(required = true)
public List<List<String>> entries;
}
public static class DomStorageItemsClearedParams {
@JsonProperty(required = true)
public StorageId storageId;
}
public static class DomStorageItemRemovedParams {
@JsonProperty(required = true)
public StorageId storageId;
@JsonProperty(required = true)
public String key;
}
public static class DomStorageItemAddedParams {
@JsonProperty(required = true)
public StorageId storageId;
@JsonProperty(required = true)
public String key;
@JsonProperty(required = true)
public String newValue;
}
public static class DomStorageItemUpdatedParams {
@JsonProperty(required = true)
public StorageId storageId;
@JsonProperty(required = true)
public String key;
@JsonProperty(required = true)
public String oldValue;
@JsonProperty(required = true)
public String newValue;
}
/**
* Exception thrown internally when we fail to honor {@link #setDOMStorageItem}.
*/
private static class DOMStorageAssignmentException extends Exception {
public DOMStorageAssignmentException(String message) {
super(message);
}
}
}
| |
package com.bazaarvoice.emodb.auth.apikey;
import com.bazaarvoice.emodb.auth.identity.AuthIdentityManager;
import com.bazaarvoice.emodb.auth.identity.CacheManagingAuthIdentityManager;
import com.bazaarvoice.emodb.auth.identity.InMemoryAuthIdentityManager;
import com.bazaarvoice.emodb.auth.permissions.MatchingPermissionResolver;
import com.bazaarvoice.emodb.auth.permissions.PermissionIDs;
import com.bazaarvoice.emodb.auth.permissions.PermissionManager;
import com.bazaarvoice.emodb.auth.shiro.GuavaCacheManager;
import com.bazaarvoice.emodb.auth.shiro.InvalidatableCacheManager;
import com.bazaarvoice.emodb.auth.shiro.RolePermissionSet;
import com.bazaarvoice.emodb.cachemgr.api.CacheRegistry;
import com.bazaarvoice.emodb.cachemgr.core.DefaultCacheRegistry;
import com.bazaarvoice.emodb.common.dropwizard.lifecycle.SimpleLifeCycleRegistry;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.apache.shiro.authz.Permission;
import org.apache.shiro.cache.Cache;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.util.LifecycleUtils;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.Collection;
import java.util.Set;
import static org.mockito.AdditionalMatchers.not;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
public class ApiKeyRealmTest {
private AuthIdentityManager<ApiKey> _authIdentityManager;
private PermissionManager _permissionManager;
private ApiKeyRealm _underTest;
@Before
public void setup() {
CacheRegistry cacheRegistry = new DefaultCacheRegistry(new SimpleLifeCycleRegistry(), new MetricRegistry());
InvalidatableCacheManager _cacheManager = new GuavaCacheManager(cacheRegistry);
InMemoryAuthIdentityManager<ApiKey> authIdentityDAO = new InMemoryAuthIdentityManager<>();
_authIdentityManager = new CacheManagingAuthIdentityManager<>(authIdentityDAO, _cacheManager);
_permissionManager = mock(PermissionManager.class);
MatchingPermissionResolver permissionResolver = new MatchingPermissionResolver();
when(_permissionManager.getPermissionResolver()).thenReturn(permissionResolver);
_underTest = new ApiKeyRealm("ApiKeyRealm under test",
_cacheManager, _authIdentityManager, _permissionManager, null);
LifecycleUtils.init(_underTest);
// _permissionCaching.updatePermissions("othertestrole", new PermissionUpdateRequest().permit("city|get|Austin", "country|get|USA"));
}
// @Test
// public void simpleNull() {
// assertNotNull(_underTest.getAvailableRolesCache(), "precondition: there is a cache");
// when(_permissionReader.getPermissions(PermissionIDs.forRole("role"))).thenReturn(null);
// Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
// assertNull(resultPerms, "should be no permissions yet");
// }
//
@Test
public void simpleEmpty() {
assertNotNull(_underTest.getAvailableRolesCache(), "precondition: there is a cache");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role"))).thenReturn(Sets.<Permission>newHashSet());
Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
assertTrue(resultPerms.isEmpty(), "should be no permissions yet");
}
@Test
public void simpleExists() {
Cache<String, RolePermissionSet> cache = _underTest.getAvailableRolesCache();
assertEquals(cache.size(), 0, "precondition: cache is empty");
Permission p1 = mock(Permission.class);
when(_permissionManager.getPermissions(PermissionIDs.forRole("role"))).thenReturn(Sets.newHashSet(p1));
Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
assertNotNull(resultPerms.iterator().next(), "should have a permission");
assertEquals(cache.size(), 1, "side effect: cache has an element");
}
@Test
public void simpleNewExists() {
Cache<String, RolePermissionSet> cache = _underTest.getAvailableRolesCache();
assertEquals(cache.size(), 0, "precondition: cache is empty");
Permission p1 = mock(Permission.class);
when(p1.toString()).thenReturn("p1");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role"))).thenReturn(Sets.newHashSet(p1));
Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
assertEquals(resultPerms.iterator().next(), p1, "should have the first permission we added");
assertEquals(cache.size(), 1, "side effect: cache has one element");
Permission p2 = mock(Permission.class);
when(p2.toString()).thenReturn("p2");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role"))).thenReturn(Sets.newHashSet(p2));
cache.clear();
Collection<Permission> resultPerms2 = _underTest.getRolePermissions("role");
assertEquals(resultPerms2.iterator().next(), p2, "should have the second permission we added");
assertEquals(cache.size(), 1, "side effect: cache still has one element");
resultPerms2 = _underTest.getRolePermissions("role");
assertEquals(resultPerms2.iterator().next(), p2, "should still have the second permission we added");
assertEquals(cache.size(), 1, "side effect: cache still has one element");
}
// @Test
// public void simpleNowNull() {
// Cache<String, Set<Permission>> cache = _underTest.getAvailableRolesCache();
// assertEquals(cache.size(), 0, "precondition: cache is empty");
// Permission p1 = mock(Permission.class);
// when(p1.toString()).thenReturn("p1");
// when(_permissionReader.getRolePermissions("role")).thenReturn(Sets.<Permission>newHashSet(p1));
// Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
// assertEquals(resultPerms.iterator().next(), p1, "should have the first permission we added");
// assertEquals(cache.size(), 1, "side effect: cache has one element");
// when(_permissionReader.getRolePermissions("role")).thenReturn(null);
// cache.clear();
// resultPerms = _underTest.getRolePermissions("role");
// assertNull(resultPerms, "now should have null");
// assertEquals(cache.size(), 0, "side effect: cache has nothing");
// }
@Test
public void simpleNowEmpty() {
Cache<String, RolePermissionSet> cache = _underTest.getAvailableRolesCache();
assertEquals(cache.size(), 0, "precondition: cache is empty");
Permission p1 = mock(Permission.class);
when(p1.toString()).thenReturn("p1");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role"))).thenReturn(Sets.newHashSet(p1));
Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
assertEquals(resultPerms.iterator().next(), p1, "should have the first permission we added");
assertEquals(cache.size(), 1, "side effect: cache has one element");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role"))).thenReturn(Sets.<Permission>newHashSet());
cache.clear();
resultPerms = _underTest.getRolePermissions("role");
assertTrue(resultPerms.isEmpty(), "now should have empty");
assertEquals(cache.size(), 1, "side effect: cache has empty permission");
}
@Test
public void pseudoConcurrentNewExists() {
Cache<String, RolePermissionSet> cache = _underTest.getAvailableRolesCache();
assertEquals(cache.size(), 0, "precondition: cache is empty");
Permission p1 = mock(Permission.class);
when(p1.toString()).thenReturn("p1");
Permission p2 = mock(Permission.class);
when(p2.toString()).thenReturn("p2");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role"))).thenReturn(Sets.newHashSet(p1), Sets.newHashSet(p2));
Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
assertEquals(resultPerms.iterator().next(), p1, "should have the first permission we added");
assertEquals(cache.size(), 1, "side effect: cache has one element");
resultPerms = _underTest.getRolePermissions("role");
assertEquals(resultPerms.iterator().next(), p2, "should have the last permission we added");
assertEquals(cache.size(), 1, "side effect: cache has one element");
}
@Test
public void pseudoConcurrentNewThenCacheFlush() {
Cache<String, RolePermissionSet> cache = _underTest.getAvailableRolesCache();
assertEquals(cache.size(), 0, "precondition: cache is empty");
Permission p1 = mock(Permission.class);
when(p1.toString()).thenReturn("p1");
Permission p2 = mock(Permission.class);
when(p2.toString()).thenReturn("p2");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role")))
.thenReturn(Sets.newHashSet(p1))
.thenReturn(Sets.newHashSet(p2));
Collection<Permission> resultPerms = _underTest.getRolePermissions("role");
assertEquals(resultPerms.iterator().next(), p1, "should have the last permission we added");
assertEquals(cache.size(), 1, "side effect: cache has one element");
cache.clear();
resultPerms = _underTest.getRolePermissions("role");
assertEquals(resultPerms.iterator().next(), p2, "should again have the last permission we added");
assertEquals(cache.size(), 1, "side effect: cache again has one element");
}
@Test
public void pseudoConcurrentNewAndCacheFlush() {
final Cache<String, RolePermissionSet> cache = _underTest.getAvailableRolesCache();
assertEquals(cache.size(), 0, "precondition: cache is empty");
final Permission p1 = mock(Permission.class);
when(p1.toString()).thenReturn("p1");
final Permission p2 = mock(Permission.class);
when(p2.toString()).thenReturn("p2");
when(_permissionManager.getPermissions(PermissionIDs.forRole("role")))
.thenReturn(Sets.newHashSet(p1))
.thenAnswer(new Answer<Set<Permission>>() {
@Override
public Set<Permission> answer(InvocationOnMock invocationOnMock) throws Throwable {
cache.clear();
return Sets.newHashSet(p2);
}
})
.thenReturn(Sets.newHashSet(p2));
Permission resultPerm = _underTest.getRolePermissions("role").iterator().next();
assertEquals(resultPerm, p1, "should have permission p1");
resultPerm = _underTest.getRolePermissions("role").iterator().next();
assertEquals(resultPerm, p2, "should have permission p2");
resultPerm = _underTest.getRolePermissions("role").iterator().next();
assertEquals(resultPerm, p2, "should have permission p2");
assertNotNull(cache.get("role"), "Cached value for role should have been present");
assertEquals(cache.get("role").permissions(), ImmutableSet.of(p2), "Cached values incorrect");
}
@Test
public void testPermissionCheckById() {
String id = _authIdentityManager.createIdentity("apikey0", new ApiKeyModification().addRoles("role0"));
Permission rolePermission = mock(Permission.class);
Permission positivePermission = mock(Permission.class);
Permission negativePermission = mock(Permission.class);
when(rolePermission.implies(positivePermission)).thenReturn(true);
when(rolePermission.implies(not(eq(positivePermission)))).thenReturn(false);
when(_permissionManager.getPermissions(PermissionIDs.forRole("role0"))).thenReturn(ImmutableSet.of(rolePermission));
// Verify the ID is not cached
assertNull(_underTest.getIdAuthorizationCache().get(id));
// Verify permission was granted
assertTrue(_underTest.hasPermissionById(id, positivePermission));
// Verify the ID was cached
assertNotNull(_underTest.getIdAuthorizationCache().get(id));
// Verify no API key information was cached
assertTrue(_underTest.getAuthenticationCache().keys().isEmpty());
// Verify permission is granted using the API key
PrincipalCollection principals = _underTest.getAuthenticationInfo(new ApiKeyAuthenticationToken("apikey0")).getPrincipals();
assertTrue(_underTest.isPermitted(principals, positivePermission));
// Negative tests
assertFalse(_underTest.hasPermissionById(id, negativePermission));
assertFalse(_underTest.isPermitted(principals, negativePermission));
}
@Test
public void testCachedPermissionCheckById() {
String id = _authIdentityManager.createIdentity("apikey0", new ApiKeyModification().addRoles("role0"));
Permission rolePermission = mock(Permission.class);
Permission positivePermission = mock(Permission.class);
when(rolePermission.implies(positivePermission)).thenReturn(true);
when(rolePermission.implies(not(eq(positivePermission)))).thenReturn(false);
when(_permissionManager.getPermissions(PermissionIDs.forRole("role0"))).thenReturn(ImmutableSet.of(rolePermission));
// Verify permission is granted using the API key
PrincipalCollection principals = _underTest.getAuthenticationInfo(new ApiKeyAuthenticationToken("apikey0")).getPrincipals();
assertTrue(_underTest.isPermitted(principals, positivePermission));
// Verify the ID was cached
assertNotNull(_underTest.getIdAuthorizationCache().get(id));
// Verify permission was granted
assertTrue(_underTest.hasPermissionById(id, positivePermission));
}
@Test
public void testCachedPermissionCheckByInvalidId() {
// Verify permission is not granted to a non-existing ID
assertFalse(_underTest.hasPermissionById("id0", mock(Permission.class)));
// Verify the ID was cached
assertNotNull(_underTest.getIdAuthorizationCache().get("id0"));
// Test again now that the authentication info is cached
assertFalse(_underTest.hasPermissionById("id0", mock(Permission.class)));
}
}
| |
package kademlia.operation;
import kademlia.message.Receiver;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import kademlia.KadConfiguration;
import kademlia.KadServer;
import kademlia.KademliaNode;
import kademlia.exceptions.RoutingException;
import kademlia.message.Message;
import kademlia.message.NodeLookupMessage;
import kademlia.message.NodeReplyMessage;
import kademlia.node.KeyComparator;
import kademlia.node.Node;
import kademlia.node.KademliaId;
/**
* Finds the K closest nodes to a specified identifier
* The algorithm terminates when it has gotten responses from the K closest nodes it has seen.
* Nodes that fail to respond are removed from consideration
*
* @author Joshua Kissoon
* @created 20140219
*/
public class NodeLookupOperation implements Operation, Receiver
{
/* Constants */
private static final String UNASKED = "UnAsked";
private static final String AWAITING = "Awaiting";
private static final String ASKED = "Asked";
private static final String FAILED = "Failed";
private final KadServer server;
private final KademliaNode localNode;
private final KadConfiguration config;
private final Message lookupMessage; // Message sent to each peer
private final Map<Node, String> nodes;
/* Tracks messages in transit and awaiting reply */
private final Map<Integer, Node> messagesTransiting;
/* Used to sort nodes */
private final Comparator comparator;
{
messagesTransiting = new HashMap<>();
}
/**
* @param server KadServer used for communication
* @param localNode The local node making the communication
* @param lookupId The ID for which to find nodes close to
* @param config
*/
public NodeLookupOperation(KadServer server, KademliaNode localNode, KademliaId lookupId, KadConfiguration config)
{
this.server = server;
this.localNode = localNode;
this.config = config;
this.lookupMessage = new NodeLookupMessage(localNode.getNode(), lookupId);
/**
* We initialize a TreeMap to store nodes.
* This map will be sorted by which nodes are closest to the lookupId
*/
this.comparator = new KeyComparator(lookupId);
this.nodes = new TreeMap(this.comparator);
}
/**
* @throws java.io.IOException
* @throws kademlia.exceptions.RoutingException
*/
@Override
public synchronized void execute() throws IOException, RoutingException
{
try
{
/* Set the local node as already asked */
nodes.put(this.localNode.getNode(), ASKED);
/**
* We add all nodes here instead of the K-Closest because there may be the case that the K-Closest are offline
* - The operation takes care of looking at the K-Closest.
*/
this.addNodes(this.localNode.getRoutingTable().getAllNodes());
/* If we haven't finished as yet, wait for a maximum of config.operationTimeout() time */
int totalTimeWaited = 0;
int timeInterval = 10; // We re-check every n milliseconds
while (totalTimeWaited < this.config.operationTimeout())
{
if (!this.askNodesorFinish())
{
wait(timeInterval);
totalTimeWaited += timeInterval;
}
else
{
break;
}
}
/* Now after we've finished, we would have an idea of offline nodes, lets update our routing table */
this.localNode.getRoutingTable().setUnresponsiveContacts(this.getFailedNodes());
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
}
public List<Node> getClosestNodes()
{
return this.closestNodes(ASKED);
}
/**
* Add nodes from this list to the set of nodes to lookup
*
* @param list The list from which to add nodes
*/
public void addNodes(List<Node> list)
{
for (Node o : list)
{
/* If this node is not in the list, add the node */
if (!nodes.containsKey(o))
{
nodes.put(o, UNASKED);
}
}
}
/**
* Asks some of the K closest nodes seen but not yet queried.
* Assures that no more than DefaultConfiguration.CONCURRENCY messages are in transit at a time
*
* This method should be called every time a reply is received or a timeout occurs.
*
* If all K closest nodes have been asked and there are no messages in transit,
* the algorithm is finished.
*
* @return <code>true</code> if finished OR <code>false</code> otherwise
*/
private boolean askNodesorFinish() throws IOException
{
/* If >= CONCURRENCY nodes are in transit, don't do anything */
if (this.config.maxConcurrentMessagesTransiting() <= this.messagesTransiting.size())
{
return false;
}
/* Get unqueried nodes among the K closest seen that have not FAILED */
List<Node> unasked = this.closestNodesNotFailed(UNASKED);
if (unasked.isEmpty() && this.messagesTransiting.isEmpty())
{
/* We have no unasked nodes nor any messages in transit, we're finished! */
return true;
}
/**
* Send messages to nodes in the list;
* making sure than no more than CONCURRENCY messsages are in transit
*/
for (int i = 0; (this.messagesTransiting.size() < this.config.maxConcurrentMessagesTransiting()) && (i < unasked.size()); i++)
{
Node n = (Node) unasked.get(i);
int comm = server.sendMessage(n, lookupMessage, this);
this.nodes.put(n, AWAITING);
this.messagesTransiting.put(comm, n);
}
/* We're not finished as yet, return false */
return false;
}
/**
* @param status The status of the nodes to return
*
* @return The K closest nodes to the target lookupId given that have the specified status
*/
private List<Node> closestNodes(String status)
{
List<Node> closestNodes = new ArrayList<>(this.config.k());
int remainingSpaces = this.config.k();
for (Map.Entry e : this.nodes.entrySet())
{
if (status.equals(e.getValue()))
{
/* We got one with the required status, now add it */
closestNodes.add((Node) e.getKey());
if (--remainingSpaces == 0)
{
break;
}
}
}
return closestNodes;
}
/**
* Find The K closest nodes to the target lookupId given that have not FAILED.
* From those K, get those that have the specified status
*
* @param status The status of the nodes to return
*
* @return A List of the closest nodes
*/
private List<Node> closestNodesNotFailed(String status)
{
List<Node> closestNodes = new ArrayList<>(this.config.k());
int remainingSpaces = this.config.k();
for (Map.Entry<Node, String> e : this.nodes.entrySet())
{
if (!FAILED.equals(e.getValue()))
{
if (status.equals(e.getValue()))
{
/* We got one with the required status, now add it */
closestNodes.add(e.getKey());
}
if (--remainingSpaces == 0)
{
break;
}
}
}
return closestNodes;
}
/**
* Receive and handle the incoming NodeReplyMessage
*
* @param comm
*
* @throws java.io.IOException
*/
@Override
public synchronized void receive(Message incoming, int comm) throws IOException
{
if (!(incoming instanceof NodeReplyMessage))
{
/* Not sure why we get a message of a different type here... @todo Figure it out. */
return;
}
/* We receive a NodeReplyMessage with a set of nodes, read this message */
NodeReplyMessage msg = (NodeReplyMessage) incoming;
/* Add the origin node to our routing table */
Node origin = msg.getOrigin();
this.localNode.getRoutingTable().insert(origin);
/* Set that we've completed ASKing the origin node */
this.nodes.put(origin, ASKED);
/* Remove this msg from messagesTransiting since it's completed now */
this.messagesTransiting.remove(comm);
/* Add the received nodes to our nodes list to query */
this.addNodes(msg.getNodes());
this.askNodesorFinish();
}
/**
* A node does not respond or a packet was lost, we set this node as failed
*
* @param comm
*
* @throws java.io.IOException
*/
@Override
public synchronized void timeout(int comm) throws IOException
{
/* Get the node associated with this communication */
Node n = this.messagesTransiting.get(comm);
if (n == null)
{
return;
}
/* Mark this node as failed and inform the routing table that it is unresponsive */
this.nodes.put(n, FAILED);
this.localNode.getRoutingTable().setUnresponsiveContact(n);
this.messagesTransiting.remove(comm);
this.askNodesorFinish();
}
public List<Node> getFailedNodes()
{
List<Node> failedNodes = new ArrayList<>();
for (Map.Entry<Node, String> e : this.nodes.entrySet())
{
if (e.getValue().equals(FAILED))
{
failedNodes.add(e.getKey());
}
}
return failedNodes;
}
}
| |
/*
* Copyright 2014 Tagbangers, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wallride.web.support;
import com.mortennobel.imagescaling.AdvancedResizeOp;
import com.mortennobel.imagescaling.DimensionConstrain;
import com.mortennobel.imagescaling.ResampleFilters;
import com.mortennobel.imagescaling.ResampleOp;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.http.MediaType;
import org.springframework.util.FileCopyUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.HttpRequestHandler;
import org.springframework.web.bind.ServletRequestUtils;
import org.springframework.web.context.request.ServletWebRequest;
import org.springframework.web.servlet.HandlerMapping;
import org.springframework.web.servlet.support.WebContentGenerator;
import org.wallride.autoconfigure.WallRideProperties;
import org.wallride.domain.Media;
import org.wallride.service.MediaService;
import org.wallride.support.ExtendedResourceUtils;
import javax.imageio.ImageIO;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.Map;
public class MediaHttpRequestHandler extends WebContentGenerator implements HttpRequestHandler, InitializingBean {
private WallRideProperties wallRideProperties;
private MediaService mediaService;
private ResourceLoader resourceLoader;
private static Logger logger = LoggerFactory.getLogger(MediaHttpRequestHandler.class);
public void setWallRideProperties(WallRideProperties wallRideProperties) {
this.wallRideProperties = wallRideProperties;
}
public void setMediaService(MediaService mediaService) {
this.mediaService = mediaService;
}
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Override
public void afterPropertiesSet() throws Exception {
}
@Override
public void handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
checkAndPrepare(request, response, true);
Map<String, Object> pathVariables = (Map<String, Object>) request.getAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE);
String key = (String) pathVariables.get("key");
Media media = mediaService.getMedia(key);
int width = ServletRequestUtils.getIntParameter(request, "w", 0);
int height = ServletRequestUtils.getIntParameter(request, "h", 0);
int mode = ServletRequestUtils.getIntParameter(request, "m", 0);
Resource resource = readResource(media, width, height, Media.ResizeMode.values()[mode]);
if (resource == null) {
logger.debug("No matching resource found - returning 404");
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
if (new ServletWebRequest(request, response).checkNotModified(resource.lastModified())) {
logger.debug("Resource not modified - returning 304");
return;
}
long length = resource.contentLength();
if (length > Integer.MAX_VALUE) {
throw new IOException("Resource content too long (beyond Integer.MAX_VALUE): " + resource);
}
response.setContentLength((int) length);
response.setContentType(media.getMimeType());
if (!"image".equals(MediaType.parseMediaType(media.getMimeType()).getType())) {
response.setHeader("Content-Disposition", "attachment;filename*=utf-8''" + URLEncoder.encode(media.getOriginalName(), "UTF-8"));
}
FileCopyUtils.copy(resource.getInputStream(), response.getOutputStream());
}
// public Resource readResource(Media media) throws IOException, EncoderException {
// return readResource(media, 0, 0, null);
// }
private Resource readResource(final Media media, final int width, final int height, final Media.ResizeMode mode) throws IOException {
// Blog blog = blogService.getBlogById(Blog.DEFAULT_ID);
// final Resource prefix = resourceLoader.getResource(blog.getMediaPath());
final Resource prefix = resourceLoader.getResource(wallRideProperties.getMediaLocation());
final Resource resource = prefix.createRelative(media.getId());
if (!resource.exists()) {
return null;
}
Resource resized = resource;
boolean doResize = (width > 0 || height > 0);
if (doResize && "image".equals(MediaType.parseMediaType(media.getMimeType()).getType())) {
resized = prefix.createRelative(String.format("%s.resized/%dx%d-%d",
media.getId(),
width, height, mode.ordinal()));
if (!resized.exists() || resource.lastModified() > resized.lastModified()) {
File temp = File.createTempFile(
getClass().getCanonicalName() + ".resized-",
"." + MediaType.parseMediaType(media.getMimeType()).getSubtype());
temp.deleteOnExit();
resizeImage(resource, temp, width, height, mode);
// AmazonS3ResourceUtils.writeFile(temp, resized);
ExtendedResourceUtils.write(resized, temp);
FileUtils.deleteQuietly(temp);
}
}
return resized;
}
private void resizeImage(Resource resource, File file, int width, int height, Media.ResizeMode mode) throws IOException {
long startTime = System.currentTimeMillis();
if (width <= 0) {
width = Integer.MAX_VALUE;
}
if (height <= 0) {
height = Integer.MAX_VALUE;
}
BufferedImage image = ImageIO.read(resource.getInputStream());
ResampleOp resampleOp;
BufferedImage resized;
switch (mode) {
case RESIZE:
resampleOp = new ResampleOp(DimensionConstrain.createMaxDimension(width, height, true));
resampleOp.setFilter(ResampleFilters.getLanczos3Filter());
resampleOp.setUnsharpenMask(AdvancedResizeOp.UnsharpenMask.Normal);
resized = resampleOp.filter(image, null);
ImageIO.write(resized, StringUtils.getFilenameExtension(file.getName()), file);
break;
case CROP:
float wr = (float) width / (float) image.getWidth();
float hr = (float) height / (float) image.getHeight();
float fraction = (wr > hr) ? wr : hr;
if (fraction < 1) {
resampleOp = new ResampleOp(DimensionConstrain.createRelativeDimension(fraction));
resampleOp.setFilter(ResampleFilters.getLanczos3Filter());
resampleOp.setUnsharpenMask(AdvancedResizeOp.UnsharpenMask.Normal);
resized = resampleOp.filter(image, null);
} else {
resized = image;
}
if (resized.getWidth() > width) {
resized = resized.getSubimage((resized.getWidth() - width) / 2, 0, width, resized.getHeight());
} else if (resized.getHeight() > height) {
resized = resized.getSubimage(0, (resized.getHeight() - height) / 2, resized.getWidth(), height);
}
ImageIO.write(resized, StringUtils.getFilenameExtension(file.getName()), file);
break;
default:
throw new IllegalStateException();
}
long stopTime = System.currentTimeMillis();
logger.debug("Resized image: time [{}ms]", stopTime - startTime);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.datastructures;
import org.apache.ignite.*;
import org.apache.ignite.cache.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.processors.cache.*;
import org.apache.ignite.internal.processors.datastructures.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.testframework.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import static org.apache.ignite.cache.CacheMode.*;
/**
* Set failover tests.
*/
public abstract class GridCacheSetFailoverAbstractSelfTest extends IgniteCollectionAbstractTest {
/** */
private static final String SET_NAME = "testFailoverSet";
/** */
private static final long TEST_DURATION = 60_000;
/** {@inheritDoc} */
@Override protected int gridCount() {
return 4;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
// No-op.
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
// No-op.
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
startGrids(gridCount());
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected CacheMode collectionCacheMode() {
return PARTITIONED;
}
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return TEST_DURATION + 60_000;
}
/**
* @throws Exception If failed.
*/
@SuppressWarnings("WhileLoopReplaceableByForEach")
public void testNodeRestart() throws Exception {
IgniteSet<Integer> set = grid(0).set(SET_NAME, config(false));
final int ITEMS = 10_000;
Collection<Integer> items = new ArrayList<>(ITEMS);
for (int i = 0; i < ITEMS; i++)
items.add(i);
set.addAll(items);
assertEquals(ITEMS, set.size());
AtomicBoolean stop = new AtomicBoolean();
IgniteInternalFuture<?> killFut = startNodeKiller(stop);
long stopTime = System.currentTimeMillis() + TEST_DURATION;
try {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (System.currentTimeMillis() < stopTime) {
for (int i = 0; i < 10; i++) {
try {
int size = set.size();
// TODO: GG-7952, check for equality when GG-7952 fixed.
assertTrue(size > 0);
}
catch (IgniteException ignore) {
// No-op.
}
try {
Iterator<Integer> iter = set.iterator();
int cnt = 0;
while (iter.hasNext()) {
assertNotNull(iter.next());
cnt++;
}
// TODO: GG-7952, check for equality when GG-7952 fixed.
assertTrue(cnt > 0);
}
catch (IgniteException ignore) {
// No-op.
}
int val = rnd.nextInt(ITEMS);
assertTrue("Not contains: " + val, set.contains(val));
val = ITEMS + rnd.nextInt(ITEMS);
assertFalse("Contains: " + val, set.contains(val));
}
log.info("Remove set.");
set.close();
log.info("Create new set.");
set = grid(0).set(SET_NAME, config(false));
set.addAll(items);
}
}
finally {
stop.set(true);
}
killFut.get();
set.close();
if (false) { // TODO GG-8962: enable check when fixed.
int cnt = 0;
Set<IgniteUuid> setIds = new HashSet<>();
for (int i = 0; i < gridCount(); i++) {
Iterator<GridCacheEntryEx> entries =
((IgniteKernal)grid(i)).context().cache().internalCache().map().allEntries0().iterator();
while (entries.hasNext()) {
GridCacheEntryEx entry = entries.next();
if (entry.hasValue()) {
cnt++;
if (entry.key() instanceof GridCacheSetItemKey) {
GridCacheSetItemKey setItem = (GridCacheSetItemKey)entry.key();
if (setIds.add(setItem.setId()))
log.info("Unexpected set item [setId=" + setItem.setId() +
", grid: " + grid(i).name() +
", entry=" + entry + ']');
}
}
}
}
assertEquals("Found unexpected cache entries", 0, cnt);
}
}
/**
* Starts thread restarting random node.
*
* @param stop Stop flag.
* @return Future completing when thread finishes.
*/
private IgniteInternalFuture<?> startNodeKiller(final AtomicBoolean stop) {
return GridTestUtils.runAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (!stop.get()) {
int idx = rnd.nextInt(1, gridCount());
U.sleep(rnd.nextLong(2000, 3000));
log.info("Killing node: " + idx);
stopGrid(idx);
U.sleep(rnd.nextLong(500, 1000));
startGrid(idx);
}
return null;
}
});
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx;
import static org.junit.Assert.*;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import org.junit.*;
import org.mockito.*;
import rx.Observable.*;
import rx.exceptions.*;
import rx.functions.*;
import rx.observables.ConnectableObservable;
import rx.observers.*;
import rx.plugins.RxJavaHooks;
import rx.schedulers.*;
import rx.subjects.*;
import rx.subscriptions.BooleanSubscription;
public class ObservableTests {
@Mock
Observer<Integer> w;
private static final Func1<Integer, Boolean> IS_EVEN = new Func1<Integer, Boolean>() {
@Override
public Boolean call(Integer value) {
return value % 2 == 0;
}
};
@Before
public void before() {
MockitoAnnotations.initMocks(this);
}
@Test
public void fromArray() {
String[] items = new String[] { "one", "two", "three" };
assertEquals(new Integer(3), Observable.from(items).count().toBlocking().single());
assertEquals("two", Observable.from(items).skip(1).take(1).toBlocking().single());
assertEquals("three", Observable.from(items).takeLast(1).toBlocking().single());
}
@Test
public void fromIterable() {
ArrayList<String> items = new ArrayList<String>();
items.add("one");
items.add("two");
items.add("three");
assertEquals(new Integer(3), Observable.from(items).count().toBlocking().single());
assertEquals("two", Observable.from(items).skip(1).take(1).toBlocking().single());
assertEquals("three", Observable.from(items).takeLast(1).toBlocking().single());
}
@Test
public void fromArityArgs3() {
Observable<String> items = Observable.just("one", "two", "three");
assertEquals(new Integer(3), items.count().toBlocking().single());
assertEquals("two", items.skip(1).take(1).toBlocking().single());
assertEquals("three", items.takeLast(1).toBlocking().single());
}
@Test
public void fromArityArgs1() {
Observable<String> items = Observable.just("one");
assertEquals(new Integer(1), items.count().toBlocking().single());
assertEquals("one", items.takeLast(1).toBlocking().single());
}
@Test
public void testCreate() {
Observable<String> observable = Observable.create(new OnSubscribe<String>() {
@Override
public void call(Subscriber<? super String> Observer) {
Observer.onNext("one");
Observer.onNext("two");
Observer.onNext("three");
Observer.onCompleted();
}
});
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, times(1)).onNext("one");
verify(observer, times(1)).onNext("two");
verify(observer, times(1)).onNext("three");
verify(observer, never()).onError(any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testCountAFewItems() {
Observable<String> observable = Observable.just("a", "b", "c", "d");
observable.count().subscribe(w);
// we should be called only once
verify(w, times(1)).onNext(anyInt());
verify(w).onNext(4);
verify(w, never()).onError(any(Throwable.class));
verify(w, times(1)).onCompleted();
}
@Test
public void testCountZeroItems() {
Observable<String> observable = Observable.empty();
observable.count().subscribe(w);
// we should be called only once
verify(w, times(1)).onNext(anyInt());
verify(w).onNext(0);
verify(w, never()).onError(any(Throwable.class));
verify(w, times(1)).onCompleted();
}
@Test
public void testCountError() {
Observable<String> o = Observable.create(new OnSubscribe<String>() {
@Override
public void call(Subscriber<? super String> obsv) {
obsv.onError(new RuntimeException());
}
});
o.count().subscribe(w);
verify(w, never()).onNext(anyInt());
verify(w, never()).onCompleted();
verify(w, times(1)).onError(any(RuntimeException.class));
}
public void testTakeFirstWithPredicateOfSome() {
Observable<Integer> observable = Observable.just(1, 3, 5, 4, 6, 3);
observable.takeFirst(IS_EVEN).subscribe(w);
verify(w, times(1)).onNext(anyInt());
verify(w).onNext(4);
verify(w, times(1)).onCompleted();
verify(w, never()).onError(any(Throwable.class));
}
@Test
public void testTakeFirstWithPredicateOfNoneMatchingThePredicate() {
Observable<Integer> observable = Observable.just(1, 3, 5, 7, 9, 7, 5, 3, 1);
observable.takeFirst(IS_EVEN).subscribe(w);
verify(w, never()).onNext(anyInt());
verify(w, times(1)).onCompleted();
verify(w, never()).onError(any(Throwable.class));
}
@Test
public void testTakeFirstOfSome() {
Observable<Integer> observable = Observable.just(1, 2, 3);
observable.take(1).subscribe(w);
verify(w, times(1)).onNext(anyInt());
verify(w).onNext(1);
verify(w, times(1)).onCompleted();
verify(w, never()).onError(any(Throwable.class));
}
@Test
public void testTakeFirstOfNone() {
Observable<Integer> observable = Observable.empty();
observable.take(1).subscribe(w);
verify(w, never()).onNext(anyInt());
verify(w, times(1)).onCompleted();
verify(w, never()).onError(any(Throwable.class));
}
@Test
public void testFirstOfNone() {
Observable<Integer> observable = Observable.empty();
observable.first().subscribe(w);
verify(w, never()).onNext(anyInt());
verify(w, never()).onCompleted();
verify(w, times(1)).onError(isA(NoSuchElementException.class));
}
@Test
public void testFirstWithPredicateOfNoneMatchingThePredicate() {
Observable<Integer> observable = Observable.just(1, 3, 5, 7, 9, 7, 5, 3, 1);
observable.first(IS_EVEN).subscribe(w);
verify(w, never()).onNext(anyInt());
verify(w, never()).onCompleted();
verify(w, times(1)).onError(isA(NoSuchElementException.class));
}
@Test
public void testReduce() {
Observable<Integer> observable = Observable.just(1, 2, 3, 4);
observable.reduce(new Func2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer t1, Integer t2) {
return t1 + t2;
}
}).subscribe(w);
// we should be called only once
verify(w, times(1)).onNext(anyInt());
verify(w).onNext(10);
}
/**
* A reduce should fail with an NoSuchElementException if done on an empty Observable.
*/
@Test(expected = NoSuchElementException.class)
public void testReduceWithEmptyObservable() {
Observable<Integer> observable = Observable.range(1, 0);
observable.reduce(new Func2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer t1, Integer t2) {
return t1 + t2;
}
}).toBlocking().forEach(new Action1<Integer>() {
@Override
public void call(Integer t1) {
// do nothing ... we expect an exception instead
}
});
fail("Expected an exception to be thrown");
}
/**
* A reduce on an empty Observable and a seed should just pass the seed through.
*
* This is confirmed at https://github.com/ReactiveX/RxJava/issues/423#issuecomment-27642456
*/
@Test
public void testReduceWithEmptyObservableAndSeed() {
Observable<Integer> observable = Observable.range(1, 0);
int value = observable.reduce(1, new Func2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer t1, Integer t2) {
return t1 + t2;
}
}).toBlocking().last();
assertEquals(1, value);
}
@Test
public void testReduceWithInitialValue() {
Observable<Integer> observable = Observable.just(1, 2, 3, 4);
observable.reduce(50, new Func2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer t1, Integer t2) {
return t1 + t2;
}
}).subscribe(w);
// we should be called only once
verify(w, times(1)).onNext(anyInt());
verify(w).onNext(60);
}
@Test
public void testOnSubscribeFails() {
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
final RuntimeException re = new RuntimeException("bad impl");
Observable<String> o = Observable.create(new OnSubscribe<String>() {
@Override
public void call(Subscriber<? super String> t1) {
throw re;
}
});
o.subscribe(observer);
verify(observer, times(0)).onNext(anyString());
verify(observer, times(0)).onCompleted();
verify(observer, times(1)).onError(re);
}
@Test
public void testMaterializeDematerializeChaining() {
Observable<Integer> obs = Observable.just(1);
Observable<Integer> chained = obs.materialize().dematerialize();
@SuppressWarnings("unchecked")
Observer<Integer> observer = mock(Observer.class);
chained.subscribe(observer);
verify(observer, times(1)).onNext(1);
verify(observer, times(1)).onCompleted();
verify(observer, times(0)).onError(any(Throwable.class));
}
/**
* The error from the user provided Observer is not handled by the subscribe method try/catch.
*
* It is handled by the AtomicObserver that wraps the provided Observer.
*
* Result: Passes (if AtomicObserver functionality exists)
* @throws InterruptedException on interrupt
*/
@Test
public void testCustomObservableWithErrorInObserverAsynchronous() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
final AtomicInteger count = new AtomicInteger();
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
Observable.create(new OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> observer) {
final BooleanSubscription s = new BooleanSubscription();
new Thread(new Runnable() {
@Override
public void run() {
try {
if (!s.isUnsubscribed()) {
observer.onNext("1");
observer.onNext("2");
observer.onNext("three");
observer.onNext("4");
observer.onCompleted();
}
} finally {
latch.countDown();
}
}
}).start();
}
}).subscribe(new Subscriber<String>() {
@Override
public void onCompleted() {
System.out.println("completed");
}
@Override
public void onError(Throwable e) {
error.set(e);
System.out.println("error");
e.printStackTrace();
}
@Override
public void onNext(String v) {
int num = Integer.parseInt(v);
System.out.println(num);
// doSomething(num);
count.incrementAndGet();
}
});
// wait for async sequence to complete
latch.await();
assertEquals(2, count.get());
assertNotNull(error.get());
if (!(error.get() instanceof NumberFormatException)) {
fail("It should be a NumberFormatException");
}
}
/**
* The error from the user provided Observer is handled by the subscribe try/catch because this is synchronous
*
* Result: Passes
*/
@Test
public void testCustomObservableWithErrorInObserverSynchronous() {
final AtomicInteger count = new AtomicInteger();
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
Observable.create(new OnSubscribe<String>() {
@Override
public void call(Subscriber<? super String> observer) {
observer.onNext("1");
observer.onNext("2");
observer.onNext("three");
observer.onNext("4");
observer.onCompleted();
}
}).subscribe(new Subscriber<String>() {
@Override
public void onCompleted() {
System.out.println("completed");
}
@Override
public void onError(Throwable e) {
error.set(e);
System.out.println("error");
e.printStackTrace();
}
@Override
public void onNext(String v) {
int num = Integer.parseInt(v);
System.out.println(num);
// doSomething(num);
count.incrementAndGet();
}
});
assertEquals(2, count.get());
assertNotNull(error.get());
if (!(error.get() instanceof NumberFormatException)) {
fail("It should be a NumberFormatException");
}
}
/**
* The error from the user provided Observable is handled by the subscribe try/catch because this is synchronous
*
*
* Result: Passes
*/
@Test
public void testCustomObservableWithErrorInObservableSynchronous() {
final AtomicInteger count = new AtomicInteger();
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
Observable.create(new OnSubscribe<String>() {
@Override
public void call(Subscriber<? super String> observer) {
observer.onNext("1");
observer.onNext("2");
throw new NumberFormatException();
}
}).subscribe(new Subscriber<String>() {
@Override
public void onCompleted() {
System.out.println("completed");
}
@Override
public void onError(Throwable e) {
error.set(e);
System.out.println("error");
e.printStackTrace();
}
@Override
public void onNext(String v) {
System.out.println(v);
count.incrementAndGet();
}
});
assertEquals(2, count.get());
assertNotNull(error.get());
if (!(error.get() instanceof NumberFormatException)) {
fail("It should be a NumberFormatException");
}
}
@Test
public void testPublishLast() throws InterruptedException {
final AtomicInteger count = new AtomicInteger();
ConnectableObservable<String> connectable = Observable.create(new OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> observer) {
count.incrementAndGet();
new Thread(new Runnable() {
@Override
public void run() {
observer.onNext("first");
observer.onNext("last");
observer.onCompleted();
}
}).start();
}
}).takeLast(1).publish();
// subscribe once
final CountDownLatch latch = new CountDownLatch(2);
Action1<String> subscriptionAction = new Action1<String>() {
@Override
public void call(String value) {
assertEquals("last", value);
latch.countDown();
}
};
connectable.subscribe(subscriptionAction);
// subscribe twice
connectable.subscribe(subscriptionAction);
Subscription subscription = connectable.connect();
assertTrue(latch.await(1000, TimeUnit.MILLISECONDS));
assertEquals(1, count.get());
subscription.unsubscribe();
}
@Test
public void testReplay() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
ConnectableObservable<String> o = Observable.create(new OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> observer) {
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
observer.onNext("one");
observer.onCompleted();
}
}).start();
}
}).replay();
// we connect immediately and it will emit the value
Subscription s = o.connect();
try {
// we then expect the following 2 subscriptions to get that same value
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
// subscribe again
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
if (!latch.await(1000, TimeUnit.MILLISECONDS)) {
fail("subscriptions did not receive values");
}
assertEquals(1, counter.get());
} finally {
s.unsubscribe();
}
}
@Test
public void testCache() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
Observable<String> o = Observable.create(new OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> observer) {
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
observer.onNext("one");
observer.onCompleted();
}
}).start();
}
}).cache();
// we then expect the following 2 subscriptions to get that same value
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
// subscribe again
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
if (!latch.await(1000, TimeUnit.MILLISECONDS)) {
fail("subscriptions did not receive values");
}
assertEquals(1, counter.get());
}
@Test
public void testCacheWithCapacity() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
Observable<String> o = Observable.create(new OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> observer) {
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
observer.onNext("one");
observer.onCompleted();
}
}).start();
}
}).cacheWithInitialCapacity(1);
// we then expect the following 2 subscriptions to get that same value
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
// subscribe again
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
if (!latch.await(1000, TimeUnit.MILLISECONDS)) {
fail("subscriptions did not receive values");
}
assertEquals(1, counter.get());
}
/**
* https://github.com/ReactiveX/RxJava/issues/198
*
* Rx Design Guidelines 5.2
*
* "when calling the Subscribe method that only has an onNext argument, the OnError behavior will be
* to rethrow the exception on the thread that the message comes out from the Observable.
* The OnCompleted behavior in this case is to do nothing."
*/
@Test
public void testErrorThrownWithoutErrorHandlerSynchronous() {
try {
Observable.error(new RuntimeException("failure")).subscribe(new Action1<Object>() {
@Override
public void call(Object t1) {
// won't get anything
}
});
fail("expected exception");
} catch (Throwable e) {
assertEquals("failure", e.getMessage());
}
}
/**
* https://github.com/ReactiveX/RxJava/issues/198
*
* Rx Design Guidelines 5.2
*
* "when calling the Subscribe method that only has an onNext argument, the OnError behavior will be
* to rethrow the exception on the thread that the message comes out from the Observable.
* The OnCompleted behavior in this case is to do nothing."
*
* @throws InterruptedException
*/
@Test
public void testErrorThrownWithoutErrorHandlerAsynchronous() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Throwable> exception = new AtomicReference<Throwable>();
Observable.create(new OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> observer) {
new Thread(new Runnable() {
@Override
public void run() {
try {
observer.onError(new Error("failure"));
} catch (Throwable e) {
// without an onError handler it has to just throw on whatever thread invokes it
exception.set(e);
}
latch.countDown();
}
}).start();
}
}).subscribe(new Action1<String>() {
@Override
public void call(String t1) {
}
});
// wait for exception
latch.await(3000, TimeUnit.MILLISECONDS);
assertNotNull(exception.get());
assertEquals("failure", exception.get().getMessage());
}
@Test
public void testTakeWithErrorInObserver() {
final AtomicInteger count = new AtomicInteger();
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
Observable.just("1", "2", "three", "4").take(3).subscribe(new Subscriber<String>() {
@Override
public void onCompleted() {
System.out.println("completed");
}
@Override
public void onError(Throwable e) {
error.set(e);
System.out.println("error");
e.printStackTrace();
}
@Override
public void onNext(String v) {
int num = Integer.parseInt(v);
System.out.println(num);
// doSomething(num);
count.incrementAndGet();
}
});
assertEquals(2, count.get());
assertNotNull(error.get());
if (!(error.get() instanceof NumberFormatException)) {
fail("It should be a NumberFormatException");
}
}
@Test
public void testOfType() {
Observable<String> observable = Observable.just(1, "abc", false, 2L).ofType(String.class);
@SuppressWarnings("unchecked")
Observer<Object> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, never()).onNext(1);
verify(observer, times(1)).onNext("abc");
verify(observer, never()).onNext(false);
verify(observer, never()).onNext(2L);
verify(observer, never()).onError(
org.mockito.Matchers.any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testOfTypeWithPolymorphism() {
ArrayList<Integer> l1 = new ArrayList<Integer>();
l1.add(1);
LinkedList<Integer> l2 = new LinkedList<Integer>();
l2.add(2);
@SuppressWarnings("rawtypes")
Observable<List> observable = Observable.<Object> just(l1, l2, "123").ofType(List.class);
@SuppressWarnings("unchecked")
Observer<Object> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, times(1)).onNext(l1);
verify(observer, times(1)).onNext(l2);
verify(observer, never()).onNext("123");
verify(observer, never()).onError(
org.mockito.Matchers.any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testContains() {
Observable<Boolean> observable = Observable.just("a", "b", null).contains("b");
@SuppressWarnings("unchecked")
Observer<Object> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, times(1)).onNext(true);
verify(observer, never()).onNext(false);
verify(observer, never()).onError(
org.mockito.Matchers.any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testContainsWithInexistence() {
Observable<Boolean> observable = Observable.just("a", "b", null).contains("c");
@SuppressWarnings("unchecked")
Observer<Object> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, times(1)).onNext(false);
verify(observer, never()).onNext(true);
verify(observer, never()).onError(
org.mockito.Matchers.any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testContainsWithNull() {
Observable<Boolean> observable = Observable.just("a", "b", null).contains(null);
@SuppressWarnings("unchecked")
Observer<Object> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, times(1)).onNext(true);
verify(observer, never()).onNext(false);
verify(observer, never()).onError(
org.mockito.Matchers.any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testContainsWithEmptyObservable() {
Observable<Boolean> observable = Observable.<String> empty().contains("a");
@SuppressWarnings("unchecked")
Observer<Object> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, times(1)).onNext(false);
verify(observer, never()).onNext(true);
verify(observer, never()).onError(
org.mockito.Matchers.any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testIgnoreElements() {
Observable<Integer> observable = Observable.just(1, 2, 3).ignoreElements();
@SuppressWarnings("unchecked")
Observer<Integer> observer = mock(Observer.class);
observable.subscribe(observer);
verify(observer, never()).onNext(any(Integer.class));
verify(observer, never()).onError(any(Throwable.class));
verify(observer, times(1)).onCompleted();
}
@Test
public void testJustWithScheduler() {
TestScheduler scheduler = new TestScheduler();
Observable<Integer> observable = Observable.from(Arrays.asList(1, 2)).subscribeOn(scheduler);
@SuppressWarnings("unchecked")
Observer<Integer> observer = mock(Observer.class);
observable.subscribe(observer);
scheduler.advanceTimeBy(1, TimeUnit.MILLISECONDS);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext(1);
inOrder.verify(observer, times(1)).onNext(2);
inOrder.verify(observer, times(1)).onCompleted();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testStartWithWithScheduler() {
TestScheduler scheduler = new TestScheduler();
Observable<Integer> observable = Observable.just(3, 4).startWith(Arrays.asList(1, 2)).subscribeOn(scheduler);
@SuppressWarnings("unchecked")
Observer<Integer> observer = mock(Observer.class);
observable.subscribe(observer);
scheduler.advanceTimeBy(1, TimeUnit.MILLISECONDS);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext(1);
inOrder.verify(observer, times(1)).onNext(2);
inOrder.verify(observer, times(1)).onNext(3);
inOrder.verify(observer, times(1)).onNext(4);
inOrder.verify(observer, times(1)).onCompleted();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testRangeWithScheduler() {
TestScheduler scheduler = new TestScheduler();
Observable<Integer> observable = Observable.range(3, 4, scheduler);
@SuppressWarnings("unchecked")
Observer<Integer> observer = mock(Observer.class);
observable.subscribe(observer);
scheduler.advanceTimeBy(1, TimeUnit.MILLISECONDS);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext(3);
inOrder.verify(observer, times(1)).onNext(4);
inOrder.verify(observer, times(1)).onNext(5);
inOrder.verify(observer, times(1)).onNext(6);
inOrder.verify(observer, times(1)).onCompleted();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testMergeWith() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Observable.just(1).mergeWith(Observable.just(2)).subscribe(ts);
ts.assertReceivedOnNext(Arrays.asList(1, 2));
}
@Test
public void testConcatWith() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Observable.just(1).concatWith(Observable.just(2)).subscribe(ts);
ts.assertReceivedOnNext(Arrays.asList(1, 2));
}
@Test
public void testAmbWith() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Observable.just(1).ambWith(Observable.just(2)).subscribe(ts);
ts.assertReceivedOnNext(Arrays.asList(1));
}
@Test(expected = OnErrorNotImplementedException.class)
public void testSubscribeWithoutOnError() {
Observable<String> o = Observable.just("a", "b").flatMap(new Func1<String, Observable<String>>() {
@Override
public Observable<String> call(String s) {
return Observable.error(new Exception("test"));
}
});
o.subscribe();
}
@Test
public void testTakeWhileToList() {
final int expectedCount = 3;
final AtomicInteger count = new AtomicInteger();
for (int i = 0;i < expectedCount; i++) {
Observable
.just(Boolean.TRUE, Boolean.FALSE)
.takeWhile(new Func1<Boolean, Boolean>() {
@Override
public Boolean call(Boolean value) {
return value;
}
})
.toList()
.doOnNext(new Action1<List<Boolean>>() {
@Override
public void call(List<Boolean> booleans) {
count.incrementAndGet();
}
})
.subscribe();
}
assertEquals(expectedCount, count.get());
}
@Test
public void testCompose() {
TestSubscriber<String> ts = new TestSubscriber<String>();
Observable.just(1, 2, 3).compose(new Transformer<Integer, String>() {
@Override
public Observable<String> call(Observable<Integer> t1) {
return t1.map(new Func1<Integer, String>() {
@Override
public String call(Integer t1) {
return String.valueOf(t1);
}
});
}
}).subscribe(ts);
ts.assertTerminalEvent();
ts.assertNoErrors();
ts.assertReceivedOnNext(Arrays.asList("1", "2", "3"));
}
@Test
public void testErrorThrownIssue1685() throws Exception {
Subject<Object, Object> subject = ReplaySubject.create();
ExecutorService exec = Executors.newSingleThreadExecutor();
try {
final AtomicReference<Throwable> err = new AtomicReference<Throwable>();
Scheduler s = Schedulers.from(exec);
exec.submit(new Runnable() {
@Override
public void run() {
Thread.currentThread().setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
err.set(e);
}
});
}
}).get();
subject.subscribe();
Observable.error(new RuntimeException("oops"))
.materialize()
.delay(1, TimeUnit.SECONDS, s)
.dematerialize()
.subscribe(subject);
subject.materialize().toBlocking().first();
for (int i = 0; i < 50 && err.get() == null; i++) {
Thread.sleep(100); // the uncaught exception comes after the terminal event reaches toBlocking
}
assertNotNull("UncaughtExceptionHandler didn't get anything.", err.get());
System.out.println("Done");
} finally {
exec.shutdownNow();
}
}
@Test
public void testEmptyIdentity() {
assertEquals(Observable.empty(), Observable.empty());
}
@Test
public void testEmptyIsEmpty() {
Observable.<Integer>empty().subscribe(w);
verify(w).onCompleted();
verify(w, never()).onNext(any(Integer.class));
verify(w, never()).onError(any(Throwable.class));
}
@Test // cf. https://github.com/ReactiveX/RxJava/issues/2599
public void testSubscribingSubscriberAsObserverMaintainsSubscriptionChain() {
TestSubscriber<Object> subscriber = new TestSubscriber<Object>();
Subscription subscription = Observable.just("event").subscribe((Observer<Object>) subscriber);
subscription.unsubscribe();
subscriber.assertUnsubscribed();
}
@Test(expected=OnErrorNotImplementedException.class)
public void testForEachWithError() {
Observable.error(new Exception("boo"))
//
.forEach(new Action1<Object>() {
@Override
public void call(Object t) {
//do nothing
}});
}
@Test(expected=IllegalArgumentException.class)
public void testForEachWithNull() {
Observable.error(new Exception("boo"))
//
.forEach(null);
}
@Test
public void testExtend() {
final TestSubscriber<Object> subscriber = new TestSubscriber<Object>();
final Object value = new Object();
Observable.just(value).extend(new Func1<OnSubscribe<Object>,Object>(){
@Override
public Object call(OnSubscribe<Object> onSubscribe) {
onSubscribe.call(subscriber);
subscriber.assertNoErrors();
subscriber.assertCompleted();
subscriber.assertValue(value);
return subscriber.getOnNextEvents().get(0);
}});
}
@Test
public void nullOnSubscribe() {
Observable<Integer> source = Observable.create((OnSubscribe<Integer>)null);
try {
source.subscribe();
fail("Should have thrown IllegalStateException");
} catch (IllegalStateException ex) {
// expected
}
}
@Test
public void nullObserver() {
Observable<Integer> source = Observable.just(1);
try {
source.subscribe((Observer<Integer>)null);
fail("Should have thrown IllegalStateException");
} catch (NullPointerException ex) {
// expected
}
}
@Test
public void nullSubscriber() {
Observable<Integer> source = Observable.just(1);
try {
source.subscribe((Subscriber<Integer>)null);
fail("Should have thrown IllegalStateException");
} catch (IllegalArgumentException ex) {
// expected
}
}
@SuppressWarnings("deprecation")
@Test
public void testCacheHint() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
Observable<String> o = Observable.create(new OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> observer) {
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
observer.onNext("one");
observer.onCompleted();
}
}).start();
}
}).cache(1);
// we then expect the following 2 subscriptions to get that same value
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
// subscribe again
o.subscribe(new Action1<String>() {
@Override
public void call(String v) {
assertEquals("one", v);
latch.countDown();
}
});
assertTrue("subscriptions did not receive values", latch.await(1000, TimeUnit.MILLISECONDS));
assertEquals(1, counter.get());
}
@Test
public void subscribeWithNull() {
Action1<Integer> onNext = Actions.empty();
Action1<Throwable> onError = Actions.empty();
Action0 onCompleted = Actions.empty();
try {
Observable.just(1).subscribe((Action1<Integer>)null);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onNext can not be null", ex.getMessage());
}
try {
Observable.just(1).subscribe(null, onError);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onNext can not be null", ex.getMessage());
}
try {
Observable.just(1).subscribe(null, onError, onCompleted);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onNext can not be null", ex.getMessage());
}
try {
Observable.just(1).subscribe(onNext, null);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onError can not be null", ex.getMessage());
}
try {
Observable.just(1).subscribe(onNext, null, onCompleted);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onError can not be null", ex.getMessage());
}
try {
Observable.just(1).subscribe(onNext, onError, null);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onComplete can not be null", ex.getMessage());
}
}
@Test
public void forEachWithNull() {
Action1<Integer> onNext = Actions.empty();
Action1<Throwable> onError = Actions.empty();
Action0 onCompleted = Actions.empty();
try {
Observable.just(1).forEach((Action1<Integer>)null);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onNext can not be null", ex.getMessage());
}
try {
Observable.just(1).forEach(null, onError);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onNext can not be null", ex.getMessage());
}
try {
Observable.just(1).forEach(null, onError, onCompleted);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onNext can not be null", ex.getMessage());
}
try {
Observable.just(1).forEach(onNext, null);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onError can not be null", ex.getMessage());
}
try {
Observable.just(1).forEach(onNext, null, onCompleted);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onError can not be null", ex.getMessage());
}
try {
Observable.just(1).forEach(onNext, onError, null);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("onComplete can not be null", ex.getMessage());
}
}
@Test
public void observableThrowsWhileSubscriberIsUnsubscribed() {
TestSubscriber<Object> ts = TestSubscriber.create();
ts.unsubscribe();
final List<Throwable> list = new ArrayList<Throwable>();
RxJavaHooks.setOnError(new Action1<Throwable>() {
@Override
public void call(Throwable t) {
list.add(t);
}
});
try {
new FailingObservable().subscribe(ts);
assertEquals(1, list.size());
assertEquals("Forced failure", list.get(0).getMessage());
} finally {
RxJavaHooks.reset();
}
}
@Test
public void observableThrowsWhileOnErrorFails() {
Subscriber<Object> ts = new SafeSubscriber<Object>(new TestSubscriber<Object>()) {
@Override
public void onError(Throwable e) {
throw new TestException("Forced failure");
}
};
try {
new FailingObservable().subscribe(ts);
fail("Should have thrown OnErrorFailedException");
} catch (OnErrorFailedException ex) {
// expected
assertTrue(ex.getCause().toString(), ex.getCause() instanceof TestException);
assertEquals("Forced failure", ex.getCause().getMessage());
}
}
@Test
public void observableThrowsWhileOnErrorFailsUnsafe() {
Subscriber<Object> ts = new TestSubscriber<Object>() {
@Override
public void onError(Throwable e) {
throw new TestException("Forced failure");
}
};
try {
new FailingObservable().unsafeSubscribe(ts);
fail("Should have thrown OnErrorFailedException");
} catch (OnErrorFailedException ex) {
// expected
assertTrue(ex.getCause().toString(), ex.getCause() instanceof TestException);
assertEquals("Forced failure", ex.getCause().getMessage());
}
}
static final class FailingObservable extends Observable<Object> {
protected FailingObservable() {
super(new OnSubscribe<Object>() {
@Override
public void call(Subscriber<? super Object> t) {
throw new TestException("Forced failure");
}
});
}
}
@Test
public void forEachWithError() {
Action1<Throwable> onError = Actions.empty();
final List<Object> list = new ArrayList<Object>();
Observable.just(1).forEach(new Action1<Integer>() {
@Override
public void call(Integer t) {
list.add(t);
}
}, onError);
Observable.<Integer>error(new TestException()).forEach(new Action1<Integer>() {
@Override
public void call(Integer t) {
list.add(t);
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable t) {
list.add(t);
}
});
Observable.<Integer>empty().forEach(new Action1<Integer>() {
@Override
public void call(Integer t) {
list.add(t);
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable t) {
list.add(t);
}
}, new Action0() {
@Override
public void call() {
list.add(100);
}
});
assertEquals(3, list.size());
assertEquals(1, list.get(0));
assertTrue(list.get(1).toString(), list.get(1) instanceof TestException);
assertEquals(100, list.get(2));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.tang.formats;
import org.apache.reef.tang.Configuration;
import org.apache.reef.tang.ConfigurationBuilder;
import org.apache.reef.tang.Injector;
import org.apache.reef.tang.Tang;
import org.apache.reef.tang.annotations.Name;
import org.apache.reef.tang.annotations.NamedParameter;
import org.apache.reef.tang.annotations.Parameter;
import org.apache.reef.tang.exceptions.BindException;
import org.apache.reef.tang.exceptions.ClassHierarchyException;
import org.apache.reef.tang.exceptions.InjectionException;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.util.Set;
/*
* Define a configuration module that explains how Foo should be injected.
*
* A configuration module is like a configuration builder, except that it
* is not language independent (it should be defined in the same jar / whatever
* as the stuff it configures, and it has a concept of variables that can be
* required or optional.
*
* If you call build() without setting the required variables (or if the
* configuration declares variables that it does not use), then it blows up
* in your face.
*
* Note that MyConfigurationModule does not actually subclass
* ConfigurationModule. Instead, it has a static final field that contains a
* configuration module, and some other ones that define the parameters, and
* their types.
*
* There are some *ahem* non-idiomatic java things going on here.
*
* Sorry about that; if you can find my java programming license, you can take it
* away. :)
*
* First, you need the " = new RequiredImpl<>()" after each parameter. This is
* because you need to pass something into set (other than null). References to
* live objects happen to be unique, so that works.
*
* Second, ConfigurationModule() is abstract, and all of its methods are defined
* as final. To instantiate it, you need to put the {}'s between the () and the
* .bind stuff. This is so I can call getClass().getEnclosingClass() in its
* constructor, and discover all those juicy configuration parameters that
* were assigned above it. On the bright side, if you forget the {}'s you get
* a compiler error. It used to be that you'd get a cryptic NPE from the
* classloader. Also, note that adding methods to ConfigurationModule() won't
* work. The bind calls implement immutability by using a secret final clone
* method called deepCopy() that strips your subclass off, and uses an anonomyous
* inner class instead.
*
*
*/
interface Super {
}
final class MyBadConfigurationModule extends ConfigurationModuleBuilder {
}
final class MyConfigurationModule extends ConfigurationModuleBuilder {
// Tell us what implementation you want, or else!!
public static final RequiredImpl<TestConfigurationModule.Foo> THE_FOO = new RequiredImpl<>();
// If you want, you can change the fooness.
public static final OptionalParameter<Integer> FOO_NESS = new OptionalParameter<>();
public static final ConfigurationModule CONF = new MyConfigurationModule()
// This binds the above to tang configuration stuff. You can use parameters more than
// once, but you'd better use them all at least once, or I'll throw exceptions at you.
.bindImplementation(TestConfigurationModule.Foo.class, MyConfigurationModule.THE_FOO)
.bindNamedParameter(TestConfigurationModule.Fooness.class, MyConfigurationModule.FOO_NESS)
.build();
}
final class MyMissingBindConfigurationModule extends ConfigurationModuleBuilder {
// Tell us what implementation you want, or else!!
public static final RequiredImpl<TestConfigurationModule.Foo> THE_FOO = new RequiredImpl<>();
// If you want, you can change the fooness.
public static final OptionalParameter<Integer> FOO_NESS = new OptionalParameter<>();
// This conf doesn't use FOO_NESS. Expect trouble below
public static final ConfigurationModule BAD_CONF = new MyMissingBindConfigurationModule()
.bindImplementation(TestConfigurationModule.Foo.class, THE_FOO)
.build();
}
public class TestConfigurationModule {
/*
* Toy class hierarchy: FooImpl implements Foo, has a Fooness named
* parameter that defaults to 42.
*/
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void smokeTest() throws BindException, InjectionException {
// Here we set some configuration values. In true tang style,
// you won't be able to set them more than once ConfigurationModule's
// implementation is complete.
Configuration c = MyConfigurationModule.CONF
.set(MyConfigurationModule.THE_FOO, FooImpl.class)
.set(MyConfigurationModule.FOO_NESS, "" + 12)
.build();
Foo f = Tang.Factory.getTang().newInjector(c).getInstance(Foo.class);
Assert.assertEquals(f.getFooness(), 12);
}
@Test
public void smokeTestConfigFile() throws BindException, InjectionException, IOException {
// Here we set some configuration values. In true tang style,
// you won't be able to set them more than once ConfigurationModule's
// implementation is complete.
Configuration c = MyConfigurationModule.CONF
.set(MyConfigurationModule.THE_FOO, FooImpl.class)
.set(MyConfigurationModule.FOO_NESS, "" + 12)
.build();
Foo f = Tang.Factory.getTang().newInjector(c).getInstance(Foo.class);
Assert.assertEquals(f.getFooness(), 12);
final File tempFile = File.createTempFile("TangTest", ".avroconf");
final AvroConfigurationSerializer serializer = new AvroConfigurationSerializer();
serializer.toFile(c, tempFile);
serializer.fromFile(tempFile);
}
@Test
public void omitOptionalTest() throws BindException, InjectionException {
// Optional is optional.
Configuration c = MyConfigurationModule.CONF
.set(MyConfigurationModule.THE_FOO, FooImpl.class)
.build();
Foo f = Tang.Factory.getTang().newInjector(c).getInstance(Foo.class);
Assert.assertEquals(f.getFooness(), 42);
}
@Test
public void omitRequiredTest() throws Throwable {
thrown.expect(BindException.class);
thrown.expectMessage("Attempt to build configuration before setting required option(s): { THE_FOO }");
try {
MyConfigurationModule.CONF
.set(MyConfigurationModule.FOO_NESS, "" + 12)
.build();
} catch (ExceptionInInitializerError e) {
throw e.getCause();
}
}
@Test
public void badConfTest() throws Throwable {
thrown.expect(ClassHierarchyException.class);
thrown.expectMessage("Found declared options that were not used in binds: { FOO_NESS }");
try {
// Java's classloader semantics cause it to load a class when executing the
// first line that references the class in question.
@SuppressWarnings("unused")
Object o = MyMissingBindConfigurationModule.BAD_CONF;
} catch (ExceptionInInitializerError e) {
throw e.getCause();
}
}
@Test
public void nonExistentStringBindOK() throws BindException, InjectionException {
new MyBadConfigurationModule().bindImplementation(Foo.class, "i.do.not.exist");
}
@Test
public void nonExistentStringBindNotOK() throws BindException, InjectionException {
thrown.expect(ClassHierarchyException.class);
thrown.expectMessage("ConfigurationModule refers to unknown class: i.do.not.exist");
new MyBadConfigurationModule().bindImplementation(Foo.class, "i.do.not.exist").build();
}
@Test
public void multiBindTest() throws BindException, InjectionException {
// Here we set some configuration values. In true tang style,
// you won't be able to set them more than once ConfigurationModule's
// implementation is complete.
Configuration c = MultiBindConfigurationModule.CONF
.set(MultiBindConfigurationModule.THE_FOO, FooImpl.class)
.set(MultiBindConfigurationModule.FOO_NESS, "" + 12)
.build();
Foo f = Tang.Factory.getTang().newInjector(c).getInstance(Foo.class);
Foo g = (Foo) Tang.Factory.getTang().newInjector(c).getInstance(Object.class);
Assert.assertEquals(f.getFooness(), 12);
Assert.assertEquals(g.getFooness(), 12);
Assert.assertFalse(f == g);
}
@Test
public void foreignSetTest() throws Throwable {
thrown.expect(ClassHierarchyException.class);
thrown.expectMessage("Unknown Impl/Param when setting RequiredImpl. Did you pass in a field from some other module?");
try {
// Pass in something from the wrong module, watch it fail.
MultiBindConfigurationModule.CONF.set(MyConfigurationModule.THE_FOO, FooImpl.class);
} catch (ExceptionInInitializerError e) {
throw e.getCause();
}
}
@Test
public void foreignBindTest() throws Throwable {
thrown.expect(ClassHierarchyException.class);
thrown.expectMessage("Unknown Impl/Param when binding RequiredImpl. Did you pass in a field from some other module?");
try {
// Pass in something from the wrong module, watch it fail.
new MyConfigurationModule().bindImplementation(Object.class, MultiBindConfigurationModule.THE_FOO);
} catch (ExceptionInInitializerError e) {
throw e.getCause();
}
}
@Test
public void singletonTest() throws BindException, InjectionException {
Configuration c = new MyConfigurationModule()
.bindImplementation(Foo.class, MyConfigurationModule.THE_FOO)
.bindNamedParameter(Fooness.class, MyConfigurationModule.FOO_NESS)
.build()
.set(MyConfigurationModule.THE_FOO, FooImpl.class)
.build();
Injector i = Tang.Factory.getTang().newInjector(c);
Assert.assertTrue(i.getInstance(Foo.class) == i.getInstance(Foo.class));
}
@Test
public void immutablilityTest() throws BindException, InjectionException {
// builder methods return copies; the original module is immutable
ConfigurationModule builder1 = MyConfigurationModule.CONF
.set(MyConfigurationModule.THE_FOO, FooImpl.class);
Assert.assertFalse(builder1 == MyConfigurationModule.CONF);
Configuration config1 = builder1.build();
// reusable
Configuration config2 = MyConfigurationModule.CONF
.set(MyConfigurationModule.THE_FOO, FooAltImpl.class)
.build();
// instantiation of each just to be sure everything is fine in this situation
Injector i1 = Tang.Factory.getTang().newInjector(config1);
Injector i2 = Tang.Factory.getTang().newInjector(config2);
Assert.assertEquals(42, i1.getInstance(Foo.class).getFooness());
Assert.assertEquals(7, i2.getInstance(Foo.class).getFooness());
}
@Test
public void setParamTest() throws BindException, InjectionException {
Configuration c = SetConfigurationModule.CONF
.set(SetConfigurationModule.P, "a")
.set(SetConfigurationModule.P, "b")
.build();
Set<String> s = Tang.Factory.getTang().newInjector(c).getNamedInstance(SetName.class);
Assert.assertEquals(s.size(), 2);
Assert.assertTrue(s.contains("a"));
Assert.assertTrue(s.contains("b"));
}
@Test
public void setClassTest() throws BindException, InjectionException {
Configuration c = SetClassConfigurationModule.CONF
.set(SetClassConfigurationModule.P, SubA.class)
.set(SetClassConfigurationModule.P, SubB.class)
.build();
Set<Super> s = Tang.Factory.getTang().newInjector(c).getNamedInstance(SetClass.class);
Assert.assertEquals(2, s.size());
boolean sawA = false, sawB = false;
for (Super sup : s) {
if (sup instanceof SubA) {
sawA = true;
} else if (sup instanceof SubB) {
sawB = true;
} else {
Assert.fail();
}
}
Assert.assertTrue(sawA && sawB);
}
@Test
public void setClassRoundTripTest() throws BindException, InjectionException {
Configuration c = SetClassConfigurationModule.CONF
.set(SetClassConfigurationModule.P, SubA.class)
.set(SetClassConfigurationModule.P, SubB.class)
.build();
ConfigurationBuilder cb = Tang.Factory.getTang().newConfigurationBuilder();
ConfigurationFile.addConfiguration(cb, ConfigurationFile.toConfigurationString(c));
Set<Super> s = Tang.Factory.getTang().newInjector(cb.build()).getNamedInstance(SetClass.class);
Assert.assertEquals(2, s.size());
boolean sawA = false, sawB = false;
for (Super sup : s) {
if (sup instanceof SubA) {
sawA = true;
} else if (sup instanceof SubB) {
sawB = true;
} else {
Assert.fail();
}
}
Assert.assertTrue(sawA && sawB);
}
@Test(expected = ClassHierarchyException.class)
public void errorOnStaticTimeSet() throws BindException, InjectionException {
StaticTimeSet.CONF.assertStaticClean();
}
@Test(expected = ClassHierarchyException.class)
public void errorOnSetMerge() throws BindException, InjectionException {
ConfigurationModuleBuilder b = new ConfigurationModuleBuilder() {
};
b.merge(StaticTimeSet.CONF);
}
static interface Foo {
public int getFooness();
}
static class FooImpl implements Foo {
private final int fooness;
@Inject
FooImpl(@Parameter(Fooness.class) int fooness) {
this.fooness = fooness;
}
public int getFooness() {
return this.fooness;
}
}
static class FooAltImpl implements Foo {
@SuppressWarnings("unused")
private final int fooness;
@Inject
FooAltImpl(@Parameter(Fooness.class) int fooness) {
this.fooness = fooness;
}
public int getFooness() {
return 7;
}
}
public static final class MultiBindConfigurationModule extends ConfigurationModuleBuilder {
// Tell us what implementation you want, or else!!
public static final RequiredImpl<Foo> THE_FOO = new RequiredImpl<>();
// If you want, you can change the fooness.
public static final OptionalParameter<Integer> FOO_NESS = new OptionalParameter<>();
public static final ConfigurationModule CONF = new MultiBindConfigurationModule()
// This binds the above to tang configuration stuff. You can use parameters more than
// once, but you'd better use them all at least once, or I'll throw exceptions at you.
.bindImplementation(Foo.class, THE_FOO)
.bindImplementation(Object.class, THE_FOO)
.bindNamedParameter(Fooness.class, FOO_NESS)
.build();
}
@NamedParameter(default_value = "42")
class Fooness implements Name<Integer> {
}
}
@NamedParameter
class SetName implements Name<Set<String>> {
}
class SetConfigurationModule extends ConfigurationModuleBuilder {
public final static RequiredParameter<String> P = new RequiredParameter<>();
public static final ConfigurationModule CONF = new SetConfigurationModule()
.bindSetEntry(SetName.class, SetConfigurationModule.P)
.build();
}
@NamedParameter
class SetClass implements Name<Set<Super>> {
}
class SetClassConfigurationModule extends ConfigurationModuleBuilder {
public final static RequiredParameter<Super> P = new RequiredParameter<>();
public static final ConfigurationModule CONF = new SetClassConfigurationModule()
.bindSetEntry(SetClass.class, SetClassConfigurationModule.P)
.build();
}
class SubA implements Super {
@Inject
public SubA() {
}
}
class SubB implements Super {
@Inject
public SubB() {
}
}
class StaticTimeSet extends ConfigurationModuleBuilder {
public static final OptionalImpl<Super> X = new OptionalImpl<>();
public static final ConfigurationModule CONF = new StaticTimeSet()
.bindImplementation(Super.class, X)
.build()
.set(X, SubA.class);
}
| |
package org.gs.events.components;
import java.util.Calendar;
import org.genericsystem.common.Generic;
import org.genericsystem.defaults.tools.RxJavaHelpers;
import org.genericsystem.reactor.Context;
import org.genericsystem.reactor.Tag;
import org.genericsystem.reactor.annotations.Attribute;
import org.genericsystem.reactor.annotations.BindText;
import org.genericsystem.reactor.annotations.Children;
import org.genericsystem.reactor.annotations.SetText;
import org.genericsystem.reactor.annotations.Style;
import org.genericsystem.reactor.context.TextBinding;
import org.genericsystem.reactor.gscomponents.HtmlTag.HtmlDiv;
import org.genericsystem.reactor.gscomponents.HtmlTag.HtmlInputText;
import org.genericsystem.reactor.gscomponents.HtmlTag.HtmlLabel;
import org.genericsystem.reactor.gscomponents.HtmlTag.HtmlP;
import org.gs.events.components.InputDate.DivContainer;
import org.gs.events.model.Date;
import org.gs.events.model.Date.Day;
import org.gs.events.model.Date.Month;
import org.gs.events.model.Date.Year;
import io.reactivex.Observable;
import javafx.beans.property.Property;
import javafx.collections.MapChangeListener;
@Children(DivContainer.class)
public class InputDate extends HtmlDiv {
@Override
public void init() {
createNewContextProperty("error");
createNewContextProperty("selected");
}
@Children({ YearSelect.class, Slash1.class, MonthSelect.class, Slash2.class, DaySelect.class, ErrorMsg.class })
public static class DivContainer extends HtmlDiv {
@Override
public void init() {
addPrefixBinding(context -> {
if (context.getGeneric().isInstanceOf(context.find(Date.class)))
getContextProperty("selected", context.getParent()).setValue(context.getGeneric());
});
}
}
@Attribute(name = "maxlength", value = "4")
@Style(name = "width", value = "32px")
@Style(name = "display", value = "inline")
@BindText(YEAR_TEXT.class)
public static class YearSelect extends HtmlInputText {
@Override
public void init() {
addPrefixBinding(context -> {
this.getDomNodeAttributes(context).addListener((MapChangeListener<String, String>) change -> {
if ("value".equals(change.getKey())) {
if (change.wasAdded())
getContextProperty("selected", context).setValue(checkDate(context, this.getParent()));
}
});
});
}
}
@SetText(value = "/")
@Style(name = "margin", value = "3px")
public static class Slash1 extends HtmlLabel {
}
public static Generic checkDate(Context context, Tag tag) {
String yyyy = tag.find(YearSelect.class).getDomNodeAttributes(context).get("value");
if (yyyy == "")
yyyy = null;
String mm = tag.find(MonthSelect.class).getDomNodeAttributes(context).get("value");
if (mm == "")
mm = null;
String dd = tag.find(DaySelect.class).getDomNodeAttributes(context).get("value");
if (dd == "")
dd = null;
if (yyyy != null) {
try {
int yr = Integer.parseInt(yyyy);
Generic year = context.find(Year.class);
Generic y = year.setInstance(yr);
if (mm != null) {
try {
int mo = Integer.parseInt(mm);
if (mo < 1 || mo > 12) {
tag.getContextProperty("error", context)
.setValue("The month must be an integer between 1 and 12");
return null;
} else {
Generic month = context.find(Month.class);
Generic m = month.setInstance(mo, y);
if (dd != null) {
try {
int da = Integer.parseInt(dd);
Calendar cal = Calendar.getInstance();
cal.set(yr, mo - 1, 15); // the month starts with 0
int maxVal = cal.getActualMaximum(Calendar.DAY_OF_MONTH);
if (da >= 1 && da <= maxVal) {
Generic day = context.find(Day.class);
tag.getContextProperty("error", context).setValue(null);
return day.setInstance(da, m);
} else {
tag.getContextProperty("error", context)
.setValue("The day must be an integer between 1 and " + maxVal);
return null;
}
} catch (Exception e) {
tag.getContextProperty("error", context).setValue("The day must be an integer");
return null;
}
} else {
tag.getContextProperty("error", context).setValue(null);
return m;
}
}
} catch (Exception e) {
tag.getContextProperty("error", context).setValue("The month must be an integer");
return null;
}
} else {
if (dd == null) {
tag.getContextProperty("error", context).setValue(null);
return y;
} else {
tag.getContextProperty("error", context).setValue("The month is not set");
return null;
}
}
} catch (Exception e) {
tag.getContextProperty("error", context).setValue("The year must be an integer");
return null;
}
} else {
if (mm != null || dd != null)
tag.getContextProperty("error", context).setValue("The year is not set");
else
tag.getContextProperty("error", context).setValue(null);
return null;
}
}
@Attribute(name = "maxlength", value = "2")
@Style(name = "width", value = "16px")
@Style(name = "display", value = "inline")
@BindText(MONTH_TEXT.class)
public static class MonthSelect extends HtmlInputText {
@Override
public void init() {
addPrefixBinding(context -> {
this.getDomNodeAttributes(context).addListener((MapChangeListener<String, String>) change -> {
if ("value".equals(change.getKey())) {
if (change.wasAdded())
getContextProperty("selected", context).setValue(checkDate(context, this.getParent()));
}
});
});
}
}
@SetText(value = "/")
@Style(name = "margin", value = "3px")
public static class Slash2 extends HtmlLabel {
}
@Attribute(name = "maxlength", value = "2")
@Style(name = "width", value = "16px")
@Style(name = "display", value = "inline")
@BindText(DAY_TEXT.class)
public static class DaySelect extends HtmlInputText {
@Override
public void init() {
addPrefixBinding(context -> {
this.getDomNodeAttributes(context).addListener((MapChangeListener<String, String>) change -> {
if ("value".equals(change.getKey())) {
if (change.wasAdded())
getContextProperty("selected", context).setValue(checkDate(context, this.getParent()));
}
});
});
}
}
@BindText(GENERIC_TEXT.class)
public static class ErrorMsg extends HtmlP {
}
public static class YEAR_TEXT implements TextBinding {
@Override
public Observable<String> apply(Context context, Tag tag) {
Integer year = getYear(context.getGeneric(), context.find(Year.class), context.find(Month.class),
context.find(Day.class));
return Observable.just(year != null ? String.valueOf(year) : "");
}
public static Integer getYear(Generic g, Generic year, Generic month, Generic day) {
if (g.isInstanceOf(day))
return (Integer) g.getBaseComponent().getBaseComponent().getValue();
else if (g.isInstanceOf(month))
return (Integer) g.getBaseComponent().getValue();
else if (g.isInstanceOf(year))
return (Integer) g.getValue();
else
return null;
}
}
public static class MONTH_TEXT implements TextBinding {
@Override
public Observable<String> apply(Context context, Tag tag) {
Integer month = getMonth(context.getGeneric(), context.find(Year.class), context.find(Month.class),
context.find(Day.class));
return Observable.just(month != null ? String.valueOf(month) : "");
}
public static Integer getMonth(Generic g, Generic year, Generic month, Generic day) {
if (g.isInstanceOf(day))
return (Integer) g.getBaseComponent().getValue();
else if (g.isInstanceOf(month))
return (Integer) g.getValue();
else
return null;
}
}
public static class DAY_TEXT implements TextBinding {
@Override
public Observable<String> apply(Context context, Tag tag) {
Integer day = getDay(context.getGeneric(), context.find(Year.class), context.find(Month.class),
context.find(Day.class));
return Observable.just(day != null ? String.valueOf(day) : "");
}
public static Integer getDay(Generic g, Generic year, Generic month, Generic day) {
if (g.isInstanceOf(day))
return (Integer) g.getValue();
else
return null;
}
}
public static class GENERIC_TEXT implements TextBinding {
@Override
public Observable<String> apply(Context context, Tag tag) {
Property<?> prop = tag.getContextProperty("error", context);
return RxJavaHelpers.optionalValuesOf(prop).map(opt -> opt.isPresent() ? opt.get().toString() : "");
}
}
}
| |
package apoc.atomic;
import apoc.util.ArrayBackedList;
import apoc.util.TestUtil;
import apoc.util.Util;
import org.junit.*;
import org.neo4j.cypher.internal.frontend.v2_3.ast.functions.E;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.Transaction;
import org.neo4j.helpers.TransactionTemplate;
import org.neo4j.test.TestGraphDatabaseFactory;
import java.util.Arrays;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static apoc.util.MapUtil.map;
import static apoc.util.TestUtil.testCall;
import static org.junit.Assert.assertEquals;
/**
* @author AgileLARUS
*
* @since 26-06-17
*/
public class AtomicTest {
private GraphDatabaseService db;
@Before public void setUp() throws Exception {
db = new TestGraphDatabaseFactory().newImpermanentDatabase();
TestUtil.registerProcedure(db, Atomic.class);
}
@After public void tearDown() {
db.shutdown();
}
@Test
public void testAddLong(){
db.execute("CREATE (p:Person {name:'Tom',age: 40}) CREATE (c:Person {name:'John',age: 40}) CREATE (a:Person {name:'Anne',age: 22})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) RETURN n;").next().get("n");
testCall(db, "CALL apoc.atomic.add({node},{property},{value})",map("node",node,"property","age","value",10), (r) -> {});
assertEquals(50L, db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age"));
}
@Test
public void testAddLongRelationship(){
db.execute("CREATE (p:Person {name:'Tom',age: 40}) CREATE (c:Person {name:'John',age: 40}) CREATE (p)-[:KNOWS{since:1965}]->(c)");
Relationship rel = (Relationship) db.execute("MATCH (n:Person {name:'Tom'})-[r:KNOWS]-(c) RETURN r;").next().get("r");
testCall(db, "CALL apoc.atomic.add({rel},{property},{value},{times})",map("rel",rel,"property","since","value",10,"times",5), (r) -> {});
assertEquals(1975L, db.execute("MATCH (n:Person {name:'Tom'})-[r:KNOWS]-(c) RETURN r.since as since;").next().get("since"));
}
@Test
public void testAddDouble(){
db.execute("CREATE (p:Person {name:'Tom',age: 40}) CREATE (c:Person {name:'John',age: "+new Double(35)+"}) CREATE (a:Person {name:'Anne',age: 22})");
Node node = (Node) db.execute("MATCH (n:Person {name:'John'}) RETURN n;").next().get("n");
testCall(db, "CALL apoc.atomic.add({node},{property},{value},{times})",map("node",node,"property","age","value",10,"times",5), (r) -> {});
assertEquals(new Double(45), db.execute("MATCH (n:Person {name:'John'}) RETURN n.age as age;").next().get("age"));
}
@Test
public void testSubLong(){
db.execute("CREATE (p:Person {name:'Tom',age: 40}) CREATE (c:Person {name:'John',age: 35}) CREATE (a:Person {name:'Anne',age: 22})");
Node node = (Node) db.execute("MATCH (n:Person {name:'John'}) RETURN n;").next().get("n");
testCall(db, "CALL apoc.atomic.subtract({node},{property},{value},{times})",map("node",node,"property","age","value",10,"times",5), (r) -> {});
assertEquals(25L, db.execute("MATCH (n:Person {name:'John'}) RETURN n.age as age;").next().get("age"));
}
@Test
public void testSubLongRelationship(){
db.execute("CREATE (p:Person {name:'Tom',age: 40}) CREATE (c:Person {name:'John',age: 40}) CREATE (p)-[:KNOWS{since:1965}]->(c)");
Relationship rel = (Relationship) db.execute("MATCH (n:Person {name:'Tom'})-[r:KNOWS]-(c) RETURN r;").next().get("r");
testCall(db, "CALL apoc.atomic.subtract({rel},{property},{value},{times})",map("rel",rel,"property","since","value",10,"times",5), (r) -> {});
assertEquals(1955L, db.execute("MATCH (n:Person {name:'Tom'})-[r:KNOWS]-(c) RETURN r.since as since;").next().get("since"));
}
@Test
public void testConcat(){
db.execute("CREATE (p:Person {name:'Tom',age: 35})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) RETURN n;").next().get("n");
testCall(db, "CALL apoc.atomic.concat({node},{property},{value},{times})",map("node",node,"property","name","value","asson","times",5), (r) -> {});
assertEquals(35L, db.execute("MATCH (n:Person {name:'Tomasson'}) RETURN n.age as age;").next().get("age"));
}
@Test
public void testConcatRelationship(){
db.execute("CREATE (p:Person {name:'Angelo',age: 22}) CREATE (c:Company {name:'Larus'}) CREATE (p)-[:WORKS_FOR{role:\"software dev\"}]->(c)");
Relationship rel = (Relationship) db.execute("MATCH (n:Person {name:'Angelo'})-[r:WORKS_FOR]-(c) RETURN r;").next().get("r");
testCall(db, "CALL apoc.atomic.concat({rel},{property},{value},{times})",map("rel",rel,"property","role","value","eloper","times",5), (r) -> {});
assertEquals("software developer", db.execute("MATCH (n:Person {name:'Angelo'})-[r:WORKS_FOR]-(c) RETURN r.role as role;").next().get("role"));
}
@Test
public void testRemoveArrayValueLong(){
db.execute("CREATE (p:Person {name:'Tom',age: [40,50,60]})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
testCall(db, "CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",1,"times",5), (r) -> {});
assertEquals(Arrays.asList(40L, 60L).toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray());
}
@Test
public void testRemoveFirstElementArrayValueLong(){
db.execute("CREATE (p:Person {name:'Tom',age: [40,50,60]})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
testCall(db, "CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",0,"times",5), (r) -> {});
assertEquals(Arrays.asList(50L, 60L).toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray());
}
@Test
public void testRemoveLastElementArrayValueLong(){
db.execute("CREATE (p:Person {name:'Tom',age: [40,50,60]})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
testCall(db, "CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",2,"times",5), (r) -> {});
assertEquals(Arrays.asList(40L, 50L).toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray());
}
@Test
public void testRemoveLastItemArray(){
db.execute("CREATE (p:Person {name:'Tom',age: [40]})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
testCall(db, "CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",0,"times",5), (r) -> {});
assertEquals(Arrays.asList().toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray());
}
@Test(expected = RuntimeException.class)
public void testRemoveOutOfArrayIndex(){
db.execute("CREATE (p:Person {name:'Tom',age: [40,50,60]})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
testCall(db, "CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",5,"times",5), (r) -> {});
assertEquals(Arrays.asList(40,50,60).toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray());
}
@Test(expected = RuntimeException.class)
public void testRemoveEmptyArray(){
db.execute("CREATE (p:Person {name:'Tom',age: []})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
testCall(db, "CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",5,"times",5), (r) -> {});
assertEquals(Arrays.asList().toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray());
}
@Test
public void testInsertArrayValueLong(){
db.execute("CREATE (p:Person {name:'Tom',age: 40})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
testCall(db, "CALL apoc.atomic.insert({node},{property},{position},{value},{times})",map("node",node,"property","age","position",2,"value",55L,"times",5), (r) -> {});
assertEquals(Arrays.asList(40L,55L).toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray());
}
@Test
public void testInsertArrayValueLongRelationship(){
db.execute("CREATE (p:Person {name:'Tom',age: 40}) CREATE (c:Person {name:'John',age: 40}) CREATE (p)-[:KNOWS{since:[40,50,60]}]->(c)");
Relationship rel = (Relationship) db.execute("MATCH (n:Person {name:'Tom'})-[r:KNOWS]-(c) RETURN r;").next().get("r");
testCall(db, "CALL apoc.atomic.insert({rel},{property},{position},{value},{times})",map("rel",rel,"property","since","position",2,"value",55L,"times",5), (r) -> {});
assertEquals(Arrays.asList(40L, 50L, 55L, 60L).toArray(), new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'})-[r:KNOWS]-(c) RETURN r.since as since;").next().get("since")).toArray());
}
@Test
public void testUpdateNode(){
db.execute("CREATE (p:Person {name:'Tom',salary1: 1800, salary2:1500})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) RETURN n;").next().get("n");
testCall(db, "CALL apoc.atomic.update({node},{property},{operation},{times})",map("node",node,"property","salary1","operation","n.salary1 + n.salary2","times",5), (r) -> {});
assertEquals(3300L, db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.salary1 as salary;").next().get("salary"));
}
@Test
public void testUpdateRel(){
db.execute("CREATE (t:Person {name:'Tom'})-[:KNOWS {forYears:5}]->(m:Person {name:'Mary'})");
Relationship rel = (Relationship) db.execute("MATCH (t:Person {name:'Tom'})-[r:KNOWS]->(m:Person {name:'Mary'}) RETURN r;").next().get("r");
testCall(db, "CALL apoc.atomic.update({rel},{property},{operation},{times})",map("rel",rel,"property","forYears","operation","n.forYears *3 + n.forYears","times",5), (r) -> {});
assertEquals(20L, db.execute("MATCH (t:Person {name:'Tom'})-[r:KNOWS]->(m:Person {name:'Mary'}) RETURN r.forYears as forYears;").next().get("forYears"));
}
@Test
public void testConcurrentAdd() throws Exception {
db.execute("CREATE (p:Person {name:'Tom',age: 40})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
ExecutorService executorService = Executors.newFixedThreadPool(2);
Runnable task = () -> {
db.execute("MATCH (p:Person {name:'Tom'}) WITH p CALL apoc.atomic.add(p,'age',10, 5) YIELD oldValue, newValue RETURN *").next().get("newValue");
};
Runnable task2 = () -> {
db.execute("MATCH (p:Person {name:'Tom'}) WITH p CALL apoc.atomic.add(p,'age',10, 5) YIELD oldValue, newValue RETURN *").next().get("newValue");
};
executorService.execute(task);
executorService.execute(task2);
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.SECONDS);
assertEquals(60L, db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age"));
}
@Test
public void testConcurrentSubtract() throws Exception {
db.execute("CREATE (p:Person {name:'Tom',age: 40})");
ExecutorService executorService = Executors.newFixedThreadPool(2);
Runnable task = () -> {
db.execute("MATCH (p:Person {name:'Tom'}) WITH p CALL apoc.atomic.subtract(p,'age',10, 5) YIELD oldValue, newValue RETURN *").next().get("newValue");
};
Runnable task2 = () -> {
db.execute("MATCH (p:Person {name:'Tom'}) WITH p CALL apoc.atomic.subtract(p,'age',10, 5) YIELD oldValue, newValue RETURN *").next().get("newValue");
};
executorService.execute(task);
executorService.execute(task2);
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.SECONDS);
assertEquals(20L, db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age"));
}
@Test
public void testConcurrentConcat() throws Exception {
db.execute("CREATE (p:Person {name:'Tom',age: 40})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) RETURN n;").next().get("n");
ExecutorService executorService = Executors.newFixedThreadPool(2);
Runnable task = () -> {
db.execute("CALL apoc.atomic.concat({node},{property},{value},{times})", map("node",node,"property","name","value","asson","times",5)).next().get("newValue");
};
Runnable task2 = () -> {
db.execute("CALL apoc.atomic.concat({node},{property},{value},{times})", map("node",node,"property","name","value","s","times",5)).next().get("newValue");
};
executorService.execute(task);
executorService.execute(task2);
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.SECONDS);
String name = db.execute("MATCH (n:Person) return n.name as name;").next().get("name").toString();
assertEquals(9, name.length());
}
@Test
public void testConcurrentInsert() throws InterruptedException {
db.execute("CREATE (p:Person {name:'Tom',age: 40})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) RETURN n;").next().get("n");
ExecutorService executorService = Executors.newFixedThreadPool(2);
Runnable task = () -> {
db.execute("CALL apoc.atomic.insert({node},{property},{position},{value},{times})", map("node",node,"property","age","position",2,"value",41L,"times",5)).next().get("newValue");
};
Runnable task2 = () -> {
db.execute("CALL apoc.atomic.insert({node},{property},{position},{value},{times})", map("node",node,"property","age","position",2,"value",42L,"times",5)).next().get("newValue");
};
executorService.execute(task);
executorService.execute(task2);
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.SECONDS);
assertEquals(3, new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray().length);
}
@Test
public void testConcurrentRemove() throws InterruptedException {
db.execute("CREATE (p:Person {name:'Tom',age: [40,50,60]}) CREATE (c:Person {name:'John',age: 40}) CREATE (a:Person {name:'Anne',age: 22})");
Node node = (Node) db.execute("MATCH (n:Person {name:'Tom'}) return n;").next().get("n");
ExecutorService executorService = Executors.newFixedThreadPool(2);
Runnable task = () -> {
db.execute("CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",0,"times",5)).next().get("newValue");
};
Runnable task2 = () -> {
db.execute("CALL apoc.atomic.remove({node},{property},{position},{times})",map("node",node,"property","age","position",1,"times",5)).next().get("newValue");
};
executorService.execute(task);
executorService.execute(task2);
executorService.shutdown();
executorService.awaitTermination(10, TimeUnit.SECONDS);
assertEquals(1 , new ArrayBackedList(db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.age as age;").next().get("age")).toArray().length);
}
@Test
public void testConcurrentUpdate() throws Exception {
db.execute("CREATE (p:Person {name:'Tom',salary1: 100, salary2: 100})");
ExecutorService executorService = Executors.newFixedThreadPool(2);
Runnable task = () -> {
db.execute("MATCH (p:Person {name:'Tom'}) WITH p CALL apoc.atomic.update(p,'salary1','n.salary1 - n.salary2',5) YIELD oldValue, newValue RETURN *").next().get("newValue");
};
Runnable task2 = () -> {
db.execute("MATCH (p:Person {name:'Tom'}) WITH p CALL apoc.atomic.update(p,'salary1','n.salary1 + n.salary2',5) YIELD oldValue, newValue RETURN *").next().get("newValue");
};
executorService.execute(task);
executorService.execute(task2);
executorService.shutdown();
executorService.awaitTermination(2, TimeUnit.SECONDS);
assertEquals(100L, db.execute("MATCH (n:Person {name:'Tom'}) RETURN n.salary1 as salary;").next().get("salary"));
}
}
| |
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject.assistedinject;
import static com.google.inject.Asserts.assertContains;
import static com.google.inject.Asserts.assertEqualsBothWays;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import com.google.inject.AbstractModule;
import com.google.inject.ConfigurationException;
import com.google.inject.CreationException;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Stage;
import com.google.inject.TypeLiteral;
import com.google.inject.assistedinject.FactoryProvider2Test.Equals.ComparisonMethod;
import com.google.inject.assistedinject.FactoryProvider2Test.Equals.Impl;
import com.google.inject.internal.Annotations;
import com.google.inject.internal.InternalFlags;
import com.google.inject.matcher.Matchers;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@SuppressWarnings("deprecation")
@RunWith(JUnit4.class)
public class FactoryProvider2Test {
private enum Color {
BLUE,
GREEN,
RED,
GRAY,
BLACK,
ORANGE,
PINK
}
@Test
public void testAssistedFactory() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(5.0d);
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Mustang blueMustang = (Mustang) carFactory.create(Color.BLUE);
assertEquals(Color.BLUE, blueMustang.color);
assertEquals(5.0d, blueMustang.engineSize, 0.0);
Mustang redMustang = (Mustang) carFactory.create(Color.RED);
assertEquals(Color.RED, redMustang.color);
assertEquals(5.0d, redMustang.engineSize, 0.0);
}
@Test
public void testAssistedFactoryWithAnnotations() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(int.class).annotatedWith(Names.named("horsePower")).toInstance(250);
bind(int.class).annotatedWith(Names.named("modelYear")).toInstance(1984);
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Camaro.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Camaro blueCamaro = (Camaro) carFactory.create(Color.BLUE);
assertEquals(Color.BLUE, blueCamaro.color);
assertEquals(1984, blueCamaro.modelYear);
assertEquals(250, blueCamaro.horsePower);
Camaro redCamaro = (Camaro) carFactory.create(Color.RED);
assertEquals(Color.RED, redCamaro.color);
assertEquals(1984, redCamaro.modelYear);
assertEquals(250, redCamaro.horsePower);
}
public interface Car {}
interface ColoredCarFactory {
Car create(Color color);
}
public static class Mustang implements Car {
private final double engineSize;
private final Color color;
@Inject
public Mustang(double engineSize, @Assisted Color color) {
this.engineSize = engineSize;
this.color = color;
}
public void drive() {}
}
public static class Camaro implements Car {
private final int horsePower;
private final int modelYear;
private final Color color;
@Inject
public Camaro(
@Named("horsePower") int horsePower,
@Named("modelYear") int modelYear,
@Assisted Color color) {
this.horsePower = horsePower;
this.modelYear = modelYear;
this.color = color;
}
}
interface SummerCarFactory {
Car create(Color color, boolean convertable);
}
@Test
public void testFactoryUsesInjectedConstructor() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(float.class).toInstance(140f);
bind(SummerCarFactory.class)
.toProvider(FactoryProvider.newFactory(SummerCarFactory.class, Corvette.class));
}
});
SummerCarFactory carFactory = injector.getInstance(SummerCarFactory.class);
Corvette redCorvette = (Corvette) carFactory.create(Color.RED, false);
assertEquals(Color.RED, redCorvette.color);
assertEquals(140f, redCorvette.maxMph, 0.0f);
assertFalse(redCorvette.isConvertable);
}
public static class Corvette implements Car {
private boolean isConvertable;
private Color color;
private float maxMph;
@SuppressWarnings("unused")
public Corvette(Color color, boolean isConvertable) {
throw new IllegalStateException("Not an @AssistedInject constructor");
}
@Inject
public Corvette(@Assisted Color color, Float maxMph, @Assisted boolean isConvertable) {
this.isConvertable = isConvertable;
this.color = color;
this.maxMph = maxMph;
}
}
@Test
public void testConstructorDoesntNeedAllFactoryMethodArguments() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(SummerCarFactory.class)
.toProvider(FactoryProvider.newFactory(SummerCarFactory.class, Beetle.class));
}
});
SummerCarFactory factory = injector.getInstance(SummerCarFactory.class);
Beetle beetle = (Beetle) factory.create(Color.RED, true);
assertSame(Color.RED, beetle.color);
}
public static class Beetle implements Car {
private final Color color;
@Inject
public Beetle(@Assisted Color color) {
this.color = color;
}
}
@Test
public void testMethodsAndFieldsGetInjected() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).toInstance("turbo");
bind(int.class).toInstance(911);
bind(double.class).toInstance(50000d);
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Porsche.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Porsche grayPorsche = (Porsche) carFactory.create(Color.GRAY);
assertEquals(Color.GRAY, grayPorsche.color);
assertEquals(50000d, grayPorsche.price, 0.0);
assertEquals(911, grayPorsche.model);
assertEquals("turbo", grayPorsche.name);
}
public static class Porsche implements Car {
private final Color color;
private final double price;
private @Inject String name;
private int model;
@Inject
public Porsche(@Assisted Color color, double price) {
this.color = color;
this.price = price;
}
@Inject
void setModel(int model) {
this.model = model;
}
}
@Test
public void testProviderInjection() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).toInstance("trans am");
bind(ColoredCarFactory.class)
.toProvider(
FactoryProvider.newFactory(ColoredCarFactory.class, Firebird.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Firebird blackFirebird = (Firebird) carFactory.create(Color.BLACK);
assertEquals(Color.BLACK, blackFirebird.color);
assertEquals("trans am", blackFirebird.modifiersProvider.get());
}
public static class Firebird implements Car {
private final Provider<String> modifiersProvider;
private final Color color;
@Inject
public Firebird(Provider<String> modifiersProvider, @Assisted Color color) {
this.modifiersProvider = modifiersProvider;
this.color = color;
}
}
@Test
public void testAssistedProviderInjection() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).toInstance("trans am");
bind(ColoredCarFactory.class)
.toProvider(
FactoryProvider.newFactory(ColoredCarFactory.class, Flamingbird.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Flamingbird flamingbird = (Flamingbird) carFactory.create(Color.BLACK);
assertEquals(Color.BLACK, flamingbird.colorProvider.get());
assertEquals("trans am", flamingbird.modifiersProvider.get());
Flamingbird flamingbird2 = (Flamingbird) carFactory.create(Color.RED);
assertEquals(Color.RED, flamingbird2.colorProvider.get());
assertEquals("trans am", flamingbird2.modifiersProvider.get());
// Make sure the original flamingbird is black still.
assertEquals(Color.BLACK, flamingbird.colorProvider.get());
}
public static class Flamingbird implements Car {
private final Provider<String> modifiersProvider;
private final Provider<Color> colorProvider;
@Inject
public Flamingbird(
Provider<String> modifiersProvider, @Assisted Provider<Color> colorProvider) {
this.modifiersProvider = modifiersProvider;
this.colorProvider = colorProvider;
}
}
@Test
public void testTypeTokenInjection() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(new TypeLiteral<Set<String>>() {})
.toInstance(Collections.singleton("Flux Capacitor"));
bind(new TypeLiteral<Set<Integer>>() {}).toInstance(Collections.singleton(88));
bind(ColoredCarFactory.class)
.toProvider(
FactoryProvider.newFactory(ColoredCarFactory.class, DeLorean.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
DeLorean deLorean = (DeLorean) carFactory.create(Color.GRAY);
assertEquals(Color.GRAY, deLorean.color);
assertEquals("Flux Capacitor", deLorean.features.iterator().next());
assertEquals(Integer.valueOf(88), deLorean.featureActivationSpeeds.iterator().next());
}
public static class DeLorean implements Car {
private final Set<String> features;
private final Set<Integer> featureActivationSpeeds;
private final Color color;
@Inject
public DeLorean(
Set<String> extraFeatures, Set<Integer> featureActivationSpeeds, @Assisted Color color) {
this.features = extraFeatures;
this.featureActivationSpeeds = featureActivationSpeeds;
this.color = color;
}
}
@Test
public void testTypeTokenProviderInjection() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(new TypeLiteral<Set<String>>() {}).toInstance(Collections.singleton("Datsun"));
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Z.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Z orangeZ = (Z) carFactory.create(Color.ORANGE);
assertEquals(Color.ORANGE, orangeZ.color);
assertEquals("Datsun", orangeZ.manufacturersProvider.get().iterator().next());
}
public static class Z implements Car {
private final Provider<Set<String>> manufacturersProvider;
private final Color color;
@Inject
public Z(Provider<Set<String>> manufacturersProvider, @Assisted Color color) {
this.manufacturersProvider = manufacturersProvider;
this.color = color;
}
}
public static class Prius implements Car {
final Color color;
@Inject
private Prius(@Assisted Color color) {
this.color = color;
}
}
@Test
public void testAssistInjectionInNonPublicConstructor() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Prius.class));
}
});
Prius prius = (Prius) injector.getInstance(ColoredCarFactory.class).create(Color.ORANGE);
assertEquals(prius.color, Color.ORANGE);
}
public static class ExplodingCar implements Car {
@Inject
public ExplodingCar(@SuppressWarnings("unused") @Assisted Color color) {
throw new IllegalStateException("kaboom!");
}
}
@Test
public void testExceptionDuringConstruction() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(
FactoryProvider.newFactory(ColoredCarFactory.class, ExplodingCar.class));
}
});
try {
injector.getInstance(ColoredCarFactory.class).create(Color.ORANGE);
fail();
} catch (IllegalStateException e) {
assertEquals("kaboom!", e.getMessage());
}
}
public static class DefectiveCar implements Car {
@Inject
public DefectiveCar() throws ExplosionException {
throw new ExplosionException();
}
}
public static class ExplosionException extends Exception {}
public static class FireException extends Exception {}
public interface DefectiveCarFactoryWithNoExceptions {
Car createCar();
}
public interface DefectiveCarFactory {
Car createCar() throws FireException;
}
public interface CorrectDefectiveCarFactory {
Car createCar() throws FireException, ExplosionException;
}
@Test
public void testConstructorExceptionsAreThrownByFactory() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(CorrectDefectiveCarFactory.class)
.toProvider(
FactoryProvider.newFactory(
CorrectDefectiveCarFactory.class, DefectiveCar.class));
}
});
try {
injector.getInstance(CorrectDefectiveCarFactory.class).createCar();
fail();
} catch (FireException e) {
fail();
} catch (ExplosionException expected) {
}
}
public static class WildcardCollection {
public interface Factory {
WildcardCollection create(Collection<?> items);
}
@Inject
public WildcardCollection(@SuppressWarnings("unused") @Assisted Collection<?> items) {}
}
@Test
public void testWildcardGenerics() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(WildcardCollection.Factory.class)
.toProvider(
FactoryProvider.newFactory(
WildcardCollection.Factory.class, WildcardCollection.class));
}
});
WildcardCollection.Factory factory = injector.getInstance(WildcardCollection.Factory.class);
factory.create(Collections.emptyList());
}
public static class SteeringWheel {}
public static class Fiat implements Car {
private final SteeringWheel steeringWheel;
private final Color color;
@Inject
public Fiat(SteeringWheel steeringWheel, @Assisted Color color) {
this.steeringWheel = steeringWheel;
this.color = color;
}
}
@Test
public void testFactoryWithImplicitBindings() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Fiat.class));
}
});
ColoredCarFactory coloredCarFactory = injector.getInstance(ColoredCarFactory.class);
Fiat fiat = (Fiat) coloredCarFactory.create(Color.GREEN);
assertEquals(Color.GREEN, fiat.color);
assertNotNull(fiat.steeringWheel);
}
@Test
public void testFactoryFailsWithMissingBinding() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class));
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"No injectable constructor for type Double.",
"at FactoryProvider2Test$ColoredCarFactory.create(FactoryProvider2Test.java");
}
}
@Test
public void testFactoryFailsWithMissingBindingInToolStage() {
try {
Guice.createInjector(
Stage.TOOL,
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class));
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"No injectable constructor for type Double.",
"at FactoryProvider2Test$ColoredCarFactory.create(FactoryProvider2Test.java");
}
}
@Test
public void testMethodsDeclaredInObject() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(5.0d);
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
assertEqualsBothWays(carFactory, carFactory);
}
static class Subaru implements Car {
@Inject @Assisted Provider<Color> colorProvider;
}
@Test
public void testInjectingProviderOfParameter() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Subaru.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Subaru subaru = (Subaru) carFactory.create(Color.RED);
assertSame(Color.RED, subaru.colorProvider.get());
assertSame(Color.RED, subaru.colorProvider.get());
Subaru sedan = (Subaru) carFactory.create(Color.BLUE);
assertSame(Color.BLUE, sedan.colorProvider.get());
assertSame(Color.BLUE, sedan.colorProvider.get());
// and make sure the subaru is still red
assertSame(Color.RED, subaru.colorProvider.get());
}
@Test
public void testInjectingNullParameter() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Subaru.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Subaru subaru = (Subaru) carFactory.create(null);
assertNull(subaru.colorProvider.get());
assertNull(subaru.colorProvider.get());
}
interface ProviderBasedColoredCarFactory {
Car createCar(Provider<Color> colorProvider, Provider<String> stringProvider);
Mustang createMustang(@Assisted("color") Provider<Color> colorProvider);
}
@Test
public void testAssistedProviderIsDisallowed() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ProviderBasedColoredCarFactory.class)
.toProvider(
FactoryProvider.newFactory(
ProviderBasedColoredCarFactory.class, Subaru.class));
}
});
fail();
} catch (CreationException expected) {
assertEquals(expected.getMessage(), 4, expected.getErrorMessages().size());
// Assert each method individually, because JDK7 doesn't guarantee method ordering.
assertContains(
expected.getMessage(),
"A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [1] with key [Provider<FactoryProvider2Test$Color>"
+ " annotated with @Assisted("
+ Annotations.memberValueString("value", "color")
+ ")] on method"
+ " [FactoryProvider2Test$ProviderBasedColoredCarFactory.createMustang()]");
assertContains(
expected.getMessage(),
"A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [1] with key [Provider<FactoryProvider2Test$Color>] on"
+ " method [FactoryProvider2Test$ProviderBasedColoredCarFactory.createCar()]");
assertContains(
expected.getMessage(),
"A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [2] with key [Provider<String>] on method"
+ " [FactoryProvider2Test$ProviderBasedColoredCarFactory.createCar()]");
assertContains(
expected.getMessage(),
"No implementation for FactoryProvider2Test$ProviderBasedColoredCarFactory was bound.");
}
}
interface JavaxProviderBasedColoredCarFactory {
Car createCar(
javax.inject.Provider<Color> colorProvider, javax.inject.Provider<String> stringProvider);
Mustang createMustang(@Assisted("color") javax.inject.Provider<Color> colorProvider);
}
@Test
public void testAssistedJavaxProviderIsDisallowed() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(JavaxProviderBasedColoredCarFactory.class)
.toProvider(
FactoryProvider.newFactory(
JavaxProviderBasedColoredCarFactory.class, Subaru.class));
}
});
fail();
} catch (CreationException expected) {
assertEquals(expected.getMessage(), 4, expected.getErrorMessages().size());
assertContains(
expected.getMessage(),
") A Provider may not be a type in a factory method of an AssistedInject.\n"
+ " Offending instance is parameter [1] with key"
+ " [Provider<FactoryProvider2Test$Color> annotated with @Assisted("
+ Annotations.memberValueString("value", "color")
+ ")]"
+ " on method"
+ " [FactoryProvider2Test$JavaxProviderBasedColoredCarFactory.createMustang()]");
assertContains(
expected.getMessage(),
") A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [1] with key [Provider<FactoryProvider2Test$Color>] on"
+ " method [FactoryProvider2Test$JavaxProviderBasedColoredCarFactory.createCar()]");
assertContains(
expected.getMessage(),
") A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [2] with key [Provider<String>] on method"
+ " [FactoryProvider2Test$JavaxProviderBasedColoredCarFactory.createCar()]");
assertContains(
expected.getMessage(),
"No implementation for FactoryProvider2Test$JavaxProviderBasedColoredCarFactory was"
+ " bound.");
}
}
@Test
public void testFactoryUseBeforeInitialization() {
ColoredCarFactory carFactory =
FactoryProvider.newFactory(ColoredCarFactory.class, Subaru.class).get();
try {
carFactory.create(Color.RED);
fail();
} catch (IllegalStateException expected) {
assertContains(
expected.getMessage(),
"Factories.create() factories cannot be used until they're initialized by Guice.");
}
}
interface MustangFactory {
Mustang create(Color color);
}
@Test
public void testFactoryBuildingConcreteTypes() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(double.class).toInstance(5.0d);
// note there is no 'thatMakes()' call here:
bind(MustangFactory.class)
.toProvider(FactoryProvider.newFactory(MustangFactory.class, Mustang.class));
}
});
MustangFactory factory = injector.getInstance(MustangFactory.class);
Mustang mustang = factory.create(Color.RED);
assertSame(Color.RED, mustang.color);
assertEquals(5.0d, mustang.engineSize, 0.0);
}
static class Fleet {
@Inject Mustang mustang;
@Inject Camaro camaro;
}
interface FleetFactory {
Fleet createFleet(Color color);
}
@Test
public void testInjectDeepIntoConstructedObjects() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(double.class).toInstance(5.0d);
bind(int.class).annotatedWith(Names.named("horsePower")).toInstance(250);
bind(int.class).annotatedWith(Names.named("modelYear")).toInstance(1984);
bind(FleetFactory.class)
.toProvider(FactoryProvider.newFactory(FleetFactory.class, Fleet.class));
}
});
FleetFactory fleetFactory = injector.getInstance(FleetFactory.class);
Fleet fleet = fleetFactory.createFleet(Color.RED);
assertSame(Color.RED, fleet.mustang.color);
assertEquals(5.0d, fleet.mustang.engineSize, 0.0);
assertSame(Color.RED, fleet.camaro.color);
assertEquals(250, fleet.camaro.horsePower);
assertEquals(1984, fleet.camaro.modelYear);
}
interface TwoToneCarFactory {
Car create(@Assisted("paint") Color paint, @Assisted("fabric") Color fabric);
}
static class Maxima implements Car {
@Inject
@Assisted("paint")
Color paint;
@Inject
@Assisted("fabric")
Color fabric;
}
@Test
public void testDistinctKeys() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(TwoToneCarFactory.class)
.toProvider(FactoryProvider.newFactory(TwoToneCarFactory.class, Maxima.class));
}
});
TwoToneCarFactory factory = injector.getInstance(TwoToneCarFactory.class);
Maxima maxima = (Maxima) factory.create(Color.BLACK, Color.GRAY);
assertSame(Color.BLACK, maxima.paint);
assertSame(Color.GRAY, maxima.fabric);
}
interface DoubleToneCarFactory {
Car create(@Assisted("paint") Color paint, @Assisted("paint") Color morePaint);
}
@Test
public void testDuplicateKeys() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(DoubleToneCarFactory.class)
.toProvider(FactoryProvider.newFactory(DoubleToneCarFactory.class, Maxima.class));
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"FactoryProvider2Test$Color annotated with @Assisted("
+ Annotations.memberValueString("value", "paint")
+ ") was bound multiple times.");
}
}
@Test
public void testMethodInterceptorsOnAssistedTypes() {
assumeTrue(InternalFlags.isBytecodeGenEnabled());
final AtomicInteger invocationCount = new AtomicInteger();
final MethodInterceptor interceptor =
new MethodInterceptor() {
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
invocationCount.incrementAndGet();
return methodInvocation.proceed();
}
};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(Matchers.any(), Matchers.any(), interceptor);
bind(Double.class).toInstance(5.0d);
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class));
}
});
ColoredCarFactory factory = injector.getInstance(ColoredCarFactory.class);
Mustang mustang = (Mustang) factory.create(Color.GREEN);
assertEquals(0, invocationCount.get());
mustang.drive();
assertEquals(1, invocationCount.get());
}
/**
* Our factories aren't reusable across injectors. Although this behaviour isn't something we
* like, I have a test case to make sure the error message is pretty.
*/
@Test
public void testFactoryReuseErrorMessageIsPretty() {
final Provider<ColoredCarFactory> factoryProvider =
FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class);
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(5.0d);
bind(ColoredCarFactory.class).toProvider(factoryProvider);
}
});
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(5.0d);
bind(ColoredCarFactory.class).toProvider(factoryProvider);
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(), "Factories.create() factories may only be used in one Injector!");
}
}
@Test
public void testNonAssistedFactoryMethodParameter() {
try {
FactoryProvider.newFactory(NamedParameterFactory.class, Mustang.class);
fail();
} catch (ConfigurationException expected) {
assertContains(
expected.getMessage(),
"Only @Assisted is allowed for factory parameters, but found @Named");
}
}
interface NamedParameterFactory {
Car create(@Named("seats") int seats, double engineSize);
}
@Test
public void testDefaultAssistedAnnotation() throws NoSuchFieldException {
Assisted plainAssisted =
Subaru.class.getDeclaredField("colorProvider").getAnnotation(Assisted.class);
assertEqualsBothWays(FactoryProvider2.DEFAULT_ANNOTATION, plainAssisted);
assertEquals(FactoryProvider2.DEFAULT_ANNOTATION.toString(), plainAssisted.toString());
}
interface GenericColoredCarFactory<T extends Car> {
T create(Color color);
}
@Test
public void testGenericAssistedFactory() {
final TypeLiteral<GenericColoredCarFactory<Mustang>> mustangTypeLiteral =
new TypeLiteral<GenericColoredCarFactory<Mustang>>() {};
final TypeLiteral<GenericColoredCarFactory<Camaro>> camaroTypeLiteral =
new TypeLiteral<GenericColoredCarFactory<Camaro>>() {};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(5.0d);
bind(int.class).annotatedWith(Names.named("horsePower")).toInstance(250);
bind(int.class).annotatedWith(Names.named("modelYear")).toInstance(1984);
bind(mustangTypeLiteral)
.toProvider(
FactoryProvider.newFactory(
mustangTypeLiteral, TypeLiteral.get(Mustang.class)));
bind(camaroTypeLiteral)
.toProvider(
FactoryProvider.newFactory(
camaroTypeLiteral, TypeLiteral.get(Camaro.class)));
}
});
GenericColoredCarFactory<Mustang> mustangFactory =
injector.getInstance(Key.get(mustangTypeLiteral));
GenericColoredCarFactory<Camaro> camaroFactory =
injector.getInstance(Key.get(camaroTypeLiteral));
Mustang blueMustang = mustangFactory.create(Color.BLUE);
assertEquals(Color.BLUE, blueMustang.color);
assertEquals(5.0d, blueMustang.engineSize, 0.0);
Camaro redCamaro = camaroFactory.create(Color.RED);
assertEquals(Color.RED, redCamaro.color);
assertEquals(1984, redCamaro.modelYear);
assertEquals(250, redCamaro.horsePower);
}
@SuppressWarnings("unused")
public interface Insurance<T extends Car> {}
public static class MustangInsurance implements Insurance<Mustang> {
private final double premium;
private final double limit;
@SuppressWarnings("unused")
private Mustang car;
@Inject
public MustangInsurance(
@Named("lowLimit") double limit, @Assisted Mustang car, @Assisted double premium) {
this.premium = premium;
this.limit = limit;
this.car = car;
}
public void sell() {}
}
public static class CamaroInsurance implements Insurance<Camaro> {
private final double premium;
private final double limit;
@SuppressWarnings("unused")
private Camaro car;
@Inject
public CamaroInsurance(
@Named("highLimit") double limit, @Assisted Camaro car, @Assisted double premium) {
this.premium = premium;
this.limit = limit;
this.car = car;
}
public void sell() {}
}
public interface MustangInsuranceFactory {
public Insurance<Mustang> create(Mustang car, double premium);
}
public interface CamaroInsuranceFactory {
public Insurance<Camaro> create(Camaro car, double premium);
}
@Test
public void testAssistedFactoryForConcreteType() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).annotatedWith(Names.named("lowLimit")).toInstance(50000.0d);
bind(Double.class).annotatedWith(Names.named("highLimit")).toInstance(100000.0d);
bind(MustangInsuranceFactory.class)
.toProvider(
FactoryProvider.newFactory(
MustangInsuranceFactory.class, MustangInsurance.class));
bind(CamaroInsuranceFactory.class)
.toProvider(
FactoryProvider.newFactory(
CamaroInsuranceFactory.class, CamaroInsurance.class));
}
});
MustangInsuranceFactory mustangInsuranceFactory =
injector.getInstance(MustangInsuranceFactory.class);
CamaroInsuranceFactory camaroInsuranceFactory =
injector.getInstance(CamaroInsuranceFactory.class);
Mustang mustang = new Mustang(5000d, Color.BLACK);
MustangInsurance mustangPolicy =
(MustangInsurance) mustangInsuranceFactory.create(mustang, 800.0d);
assertEquals(800.0d, mustangPolicy.premium, 0.0);
assertEquals(50000.0d, mustangPolicy.limit, 0.0);
Camaro camaro = new Camaro(3000, 1967, Color.BLUE);
CamaroInsurance camaroPolicy = (CamaroInsurance) camaroInsuranceFactory.create(camaro, 800.0d);
assertEquals(800.0d, camaroPolicy.premium, 0.0);
assertEquals(100000.0d, camaroPolicy.limit, 0.0);
}
public interface InsuranceFactory<T extends Car> {
public Insurance<T> create(T car, double premium);
}
@Test
public void testAssistedFactoryForParameterizedType() {
final TypeLiteral<InsuranceFactory<Mustang>> mustangInsuranceFactoryType =
new TypeLiteral<InsuranceFactory<Mustang>>() {};
final TypeLiteral<InsuranceFactory<Camaro>> camaroInsuranceFactoryType =
new TypeLiteral<InsuranceFactory<Camaro>>() {};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).annotatedWith(Names.named("lowLimit")).toInstance(50000.0d);
bind(Double.class).annotatedWith(Names.named("highLimit")).toInstance(100000.0d);
bind(mustangInsuranceFactoryType)
.toProvider(
FactoryProvider.newFactory(
mustangInsuranceFactoryType, TypeLiteral.get(MustangInsurance.class)));
bind(camaroInsuranceFactoryType)
.toProvider(
FactoryProvider.newFactory(
camaroInsuranceFactoryType, TypeLiteral.get(CamaroInsurance.class)));
}
});
InsuranceFactory<Mustang> mustangInsuranceFactory =
injector.getInstance(Key.get(mustangInsuranceFactoryType));
InsuranceFactory<Camaro> camaroInsuranceFactory =
injector.getInstance(Key.get(camaroInsuranceFactoryType));
Mustang mustang = new Mustang(5000d, Color.BLACK);
MustangInsurance mustangPolicy =
(MustangInsurance) mustangInsuranceFactory.create(mustang, 800.0d);
assertEquals(800.0d, mustangPolicy.premium, 0.0);
assertEquals(50000.0d, mustangPolicy.limit, 0.0);
Camaro camaro = new Camaro(3000, 1967, Color.BLUE);
CamaroInsurance camaroPolicy = (CamaroInsurance) camaroInsuranceFactory.create(camaro, 800.0d);
assertEquals(800.0d, camaroPolicy.premium, 0.0);
assertEquals(100000.0d, camaroPolicy.limit, 0.0);
}
public static class AutoInsurance<T extends Car> implements Insurance<T> {
private final double premium;
private final double limit;
private final T car;
@Inject
public AutoInsurance(double limit, @Assisted T car, @Assisted double premium) {
this.limit = limit;
this.car = car;
this.premium = premium;
}
public void sell() {}
}
@Test
public void testAssistedFactoryForTypeVariableParameters() {
final TypeLiteral<InsuranceFactory<Camaro>> camaroInsuranceFactoryType =
new TypeLiteral<InsuranceFactory<Camaro>>() {};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(50000.0d);
bind(camaroInsuranceFactoryType)
.toProvider(
FactoryProvider.newFactory(
camaroInsuranceFactoryType,
new TypeLiteral<AutoInsurance<Camaro>>() {}));
}
});
InsuranceFactory<Camaro> camaroInsuranceFactory =
injector.getInstance(Key.get(camaroInsuranceFactoryType));
Camaro camaro = new Camaro(3000, 1967, Color.BLUE);
AutoInsurance<?> camaroPolicy =
(AutoInsurance<?>) camaroInsuranceFactory.create(camaro, 800.0d);
assertEquals(800.0d, camaroPolicy.premium, 0.0);
assertEquals(50000.0d, camaroPolicy.limit, 0.0);
assertEquals(camaro, camaroPolicy.car);
}
@Test
public void testInjectingAndUsingInjector() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Segway.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Segway green = (Segway) carFactory.create(Color.GREEN);
assertSame(Color.GREEN, green.getColor());
assertSame(Color.GREEN, green.getColor());
Segway pink = (Segway) carFactory.create(Color.PINK);
assertSame(Color.PINK, pink.getColor());
assertSame(Color.PINK, pink.getColor());
assertSame(Color.GREEN, green.getColor());
}
@Test
public void testDuplicateAssistedFactoryBinding() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(5.0d);
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class));
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Mustang.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Mustang blueMustang = (Mustang) carFactory.create(Color.BLUE);
assertEquals(Color.BLUE, blueMustang.color);
assertEquals(5.0d, blueMustang.engineSize, 0.0);
Mustang redMustang = (Mustang) carFactory.create(Color.RED);
assertEquals(Color.RED, redMustang.color);
assertEquals(5.0d, redMustang.engineSize, 0.0);
}
public interface Equals {
enum ComparisonMethod {
SHALLOW,
DEEP;
}
interface Factory {
Equals equals(Equals.ComparisonMethod comparisonMethod);
}
public static class Impl implements Equals {
private final double sigma;
private final ComparisonMethod comparisonMethod;
@AssistedInject
public Impl(double sigma, @Assisted ComparisonMethod comparisonMethod) {
this.sigma = sigma;
this.comparisonMethod = comparisonMethod;
}
}
}
@Test
public void testFactoryMethodCalledEquals() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).toInstance(0.01d);
bind(Equals.Factory.class)
.toProvider(
FactoryProvider.newFactory(Equals.Factory.class, Equals.Impl.class));
}
});
Equals.Factory equalsFactory = injector.getInstance(Equals.Factory.class);
Equals.Impl shallowEquals = (Impl) equalsFactory.equals(ComparisonMethod.SHALLOW);
assertEquals(ComparisonMethod.SHALLOW, shallowEquals.comparisonMethod);
assertEquals(0.01d, shallowEquals.sigma, 0.0);
}
static class Segway implements Car {
@Inject Injector injector;
Color getColor() {
return injector.getInstance(Key.get(Color.class, FactoryProvider2.DEFAULT_ANNOTATION));
}
}
@Test
public void testReturnValueMatchesParamValue() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
public void configure() {
install(new FactoryModuleBuilder().build(Delegater.Factory.class));
}
});
Delegater delegate = new Delegater();
Delegater user = injector.getInstance(Delegater.Factory.class).create(delegate);
assertSame(delegate, user.delegate);
}
static class Delegater {
interface Factory {
Delegater create(Delegater delegate);
}
private final Delegater delegate;
@Inject
Delegater(@Assisted Delegater delegater) {
this.delegate = delegater;
}
Delegater() {
this.delegate = null;
}
}
public abstract static class AbstractAssisted {
interface Factory<O extends AbstractAssisted, I extends CharSequence> {
O create(I string);
}
}
static class ConcreteAssisted extends AbstractAssisted {
@Inject
ConcreteAssisted(@SuppressWarnings("unused") @Assisted String string) {}
}
static class ConcreteAssistedWithOverride extends AbstractAssisted {
@AssistedInject
ConcreteAssistedWithOverride(@SuppressWarnings("unused") @Assisted String string) {}
@AssistedInject
ConcreteAssistedWithOverride(@SuppressWarnings("unused") @Assisted StringBuilder sb) {}
interface Factory extends AbstractAssisted.Factory<ConcreteAssistedWithOverride, String> {
@Override
ConcreteAssistedWithOverride create(String string);
}
interface Factory2 extends AbstractAssisted.Factory<ConcreteAssistedWithOverride, String> {
@Override
ConcreteAssistedWithOverride create(String string);
ConcreteAssistedWithOverride create(StringBuilder sb);
}
}
static class ConcreteAssistedWithoutOverride extends AbstractAssisted {
@Inject
ConcreteAssistedWithoutOverride(@SuppressWarnings("unused") @Assisted String string) {}
interface Factory extends AbstractAssisted.Factory<ConcreteAssistedWithoutOverride, String> {}
}
public static class Public extends AbstractAssisted {
@AssistedInject
Public(@SuppressWarnings("unused") @Assisted String string) {}
@AssistedInject
Public(@SuppressWarnings("unused") @Assisted StringBuilder sb) {}
public interface Factory extends AbstractAssisted.Factory<Public, String> {
@Override
Public create(String string);
Public create(StringBuilder sb);
}
}
// See https://github.com/google/guice/issues/904
@Test
public void testGeneratedDefaultMethodsForwardCorrectly() {
final Key<AbstractAssisted.Factory<ConcreteAssisted, String>> concreteKey =
new Key<AbstractAssisted.Factory<ConcreteAssisted, String>>() {};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
install(
new FactoryModuleBuilder().build(ConcreteAssistedWithOverride.Factory.class));
install(
new FactoryModuleBuilder().build(ConcreteAssistedWithOverride.Factory2.class));
install(
new FactoryModuleBuilder()
.build(ConcreteAssistedWithoutOverride.Factory.class));
install(new FactoryModuleBuilder().build(Public.Factory.class));
install(new FactoryModuleBuilder().build(concreteKey));
}
});
ConcreteAssistedWithOverride.Factory factory1 =
injector.getInstance(ConcreteAssistedWithOverride.Factory.class);
factory1.create("foo");
AbstractAssisted.Factory<ConcreteAssistedWithOverride, String> factory1Abstract = factory1;
factory1Abstract.create("foo");
ConcreteAssistedWithOverride.Factory2 factory2 =
injector.getInstance(ConcreteAssistedWithOverride.Factory2.class);
factory2.create("foo");
factory2.create(new StringBuilder("foo"));
AbstractAssisted.Factory<ConcreteAssistedWithOverride, String> factory2Abstract = factory2;
factory2Abstract.create("foo");
ConcreteAssistedWithoutOverride.Factory factory3 =
injector.getInstance(ConcreteAssistedWithoutOverride.Factory.class);
factory3.create("foo");
AbstractAssisted.Factory<ConcreteAssistedWithoutOverride, String> factory3Abstract = factory3;
factory3Abstract.create("foo");
Public.Factory factory4 = injector.getInstance(Public.Factory.class);
factory4.create("foo");
factory4.create(new StringBuilder("foo"));
AbstractAssisted.Factory<Public, String> factory4Abstract = factory4;
factory4Abstract.create("foo");
AbstractAssisted.Factory<ConcreteAssisted, String> factory5 = injector.getInstance(concreteKey);
factory5.create("foo");
}
}
| |
package org.multibit.viewsystem.swing;
import org.multibit.controller.Controller;
import org.multibit.utils.ImageLoader;
import org.multibit.viewsystem.DisplayHint;
import org.multibit.viewsystem.Viewable;
import org.multibit.viewsystem.swing.view.components.FontSizer;
import org.multibit.viewsystem.swing.view.panels.HelpContentsPanel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import javax.swing.plaf.TabbedPaneUI;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
public class MultiBitTabbedPane extends JTabbedPane {
private static final long serialVersionUID = 6530125716859367873L;
private ImageIcon closeTabIcon;
private Dimension closeButtonSize;
private static final int CLOSE_ICON_WIDTH = 10;
private static final int CLOSE_ICON_HEIGHT = 10;
private static final int SEPARATION_DISTANCE = 2;
private int tabCounter = 0;
private Controller controller;
private final MultiBitTabbedPane thisTabbedPane;
private static boolean enableUpdates = false;
private static final Logger log = LoggerFactory.getLogger(MultiBitTabbedPane.class);
public MultiBitTabbedPane(final Controller controller) {
thisTabbedPane = this;
this.controller = controller;
applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
// Create an image icon of the small 'X' for use with a close
// button on each tab. The png loaded is a 10x10 graphic
closeTabIcon = ImageLoader.createImageIcon(ImageLoader.CLOSE_TAB_ICON_FILE);
// Create a Dimension that can be used to size the close buttons.
closeButtonSize = new Dimension(CLOSE_ICON_WIDTH + SEPARATION_DISTANCE, CLOSE_ICON_HEIGHT + SEPARATION_DISTANCE);
ToolTipManager.sharedInstance().registerComponent(this);
}
@Override
public void setSelectedIndex(int index) {
super.setSelectedIndex(index);
if (!enableUpdates) {
return;
}
log.debug("Set selected index = " + index);
try {
// Get current tab.
JPanel tabPanelComponent = (JPanel) getComponentAt(index);
Viewable selectedView = null;
if (tabPanelComponent != null) {
Component[] childComponents = tabPanelComponent.getComponents();
selectedView = null;
if (childComponents != null && childComponents.length > 0 && childComponents[0] instanceof Viewable) {
selectedView = ((Viewable) childComponents[0]);
if (selectedView != null && controller.getCurrentView() == selectedView.getViewId()) {
// We are already displaying the correct tab.
// Just update the contents.
selectedView.displayView(DisplayHint.COMPLETE_REDRAW);
controller.fireDataChangedUpdateNow();
} else {
// Select the new tab, update the content.
controller.setCurrentView(selectedView.getViewId());
selectedView.displayView(DisplayHint.COMPLETE_REDRAW);
// Fire data change but no need to redisplay the view
enableUpdates = false;
controller.fireDataChangedUpdateNow();
enableUpdates = true;
}
}
}
Component tabComponent = getTabComponentAt(index);
if (tabComponent != null && tabComponent instanceof JLabel) {
JLabel tabLabel = (JLabel) tabComponent;
if (selectedView != null) {
tabLabel.setToolTipText(HelpContentsPanel.createTooltipText(selectedView.getViewTooltip()));
}
}
} catch (Throwable e) {
// Do not let errors percolate out of tab display.
log.error(e.getClass().getName() + " " + e.getMessage());
}
}
public Viewable getCurrentlyShownView() {
// Get current tab.
JPanel tabComponent = (JPanel) getSelectedComponent();
if (tabComponent != null) {
Component[] childComponents = tabComponent.getComponents();
Viewable selectedView = null;
if (childComponents != null && childComponents.length > 0 && childComponents[0] instanceof Viewable) {
selectedView = ((Viewable) childComponents[0]);
return selectedView;
}
}
return null;
}
@Override
public void addTab(String title, Icon icon, Component component) {
addTab(title, icon, "", component, false);
}
public void addTab(String title, Icon icon, String tooltip, Component component) {
addTab(title, icon, tooltip, component, false);
}
public void addTab(String title, Icon icon, String tooltip, Component component, boolean isCloseable) {
final Component finalComponent = component;
// Create a panel that represents the tab and ensure that it is
// transparent.
JPanel tab = new JPanel(new GridBagLayout());
tab.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
GridBagConstraints constraints = new GridBagConstraints();
tab.setOpaque(false);
// Create a label and a Close button for the tab. Be sure to
// set its preferred size to nearly the size of the icon, and
// create an action listener that will locate the tab and
// remote it from the tabbed pane.
JLabel tabLabel = new JLabel(title);
tabLabel.setFont(FontSizer.INSTANCE.getAdjustedDefaultFont());
tabLabel.setIcon(icon);
tabLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
tabCounter++;
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 0;
constraints.gridy = 0;
constraints.gridwidth = 1;
constraints.gridheight = 1;
constraints.weightx = 0.8;
constraints.weighty = 1;
constraints.anchor = GridBagConstraints.CENTER;
tab.add(tabLabel, constraints);
if (isCloseable) {
JButton tabCloseButton = new JButton(closeTabIcon);
tabCloseButton.setPreferredSize(closeButtonSize);
tabCloseButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
int closeTabNumber = thisTabbedPane.indexOfComponent(finalComponent);
thisTabbedPane.removeTabAt(closeTabNumber);
// notify controller of new view being shown
JPanel selectedTab = (JPanel) thisTabbedPane.getSelectedComponent();
Component[] components = selectedTab.getComponents();
if (components != null && components.length > 0 && components[0] instanceof Viewable) {
Viewable selectedView = (Viewable) components[0];
selectedView.displayView(DisplayHint.COMPLETE_REDRAW);
controller.displayView(selectedView.getViewId());
}
}
});
JPanel fill1 = new JPanel();
fill1.setOpaque(false);
fill1.setMinimumSize(new Dimension(4, 4));
fill1.setPreferredSize(new Dimension(4, 4));
fill1.setMaximumSize(new Dimension(4, 4));
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 0;
constraints.gridwidth = 1;
constraints.gridheight = 1;
constraints.weightx = 0.05;
constraints.weighty = 1;
constraints.anchor = GridBagConstraints.CENTER;
tab.add(fill1, constraints);
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 2;
constraints.gridy = 0;
constraints.gridwidth = 1;
constraints.gridheight = 1;
constraints.weightx = 0.2;
constraints.weighty = 1;
constraints.anchor = GridBagConstraints.BASELINE_TRAILING;
tab.add(tabCloseButton, constraints);
}
// Add the tab to the tabbed pane. Note that the first
// parameter, which would ordinarily be a String that
// represents the tab title, is null.
addTab(null, component);
// Instead of using a String/Icon combination for the tab,
// use our panel instead.
ToolTipManager.sharedInstance().unregisterComponent(tab);
setTabComponentAt(getTabCount() - 1, tab);
}
@Override
public String getToolTipText(MouseEvent e) {
int index = ((TabbedPaneUI)ui).tabForCoordinate(this, e.getX(), e.getY());
if (index != -1) {
JComponent selectedTab = (JComponent)getComponentAt(index);
Component[] components = selectedTab.getComponents();
if (components != null && components.length > 0 && components[0] instanceof Viewable) {
return HelpContentsPanel.createTooltipText(((Viewable) components[0]).getViewTooltip());
}
}
return null;
}
public void removeAllTabs() {
int tabCount = this.getTabCount();
for (int i = 0; i < tabCount; i++) {
this.removeTabAt(0);
}
}
public static boolean isEnableUpdates() {
return enableUpdates;
}
public static void setEnableUpdates(boolean enableUpdates) {
MultiBitTabbedPane.enableUpdates = enableUpdates;
}
public Insets getInsets() {
return new Insets(0, 0, 0, 0);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.antlr.runtime.tree.Tree;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.TaskQueue;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLTask;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.table.create.like.CreateTableLikeDesc;
import org.apache.hadoop.hive.ql.ddl.table.drop.DropTableDesc;
import org.apache.hadoop.hive.ql.ddl.table.misc.properties.AlterTableSetPropertiesDesc;
import org.apache.hadoop.hive.ql.exec.StatsTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.ExportWork;
import org.apache.hadoop.hive.ql.session.SessionState;
/**
* A subclass of the {@link org.apache.hadoop.hive.ql.parse.SemanticAnalyzer} that just handles
* acid export statements. It works by rewriting the acid export into insert statements into a temporary table,
* and then export it from there.
*/
public class AcidExportSemanticAnalyzer extends RewriteSemanticAnalyzer {
AcidExportSemanticAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
}
protected void analyze(ASTNode tree) throws SemanticException {
if (tree.getToken().getType() != HiveParser.TOK_EXPORT) {
throw new RuntimeException("Asked to parse token " + tree.getName() + " in " +
"AcidExportSemanticAnalyzer");
}
analyzeAcidExport(tree);
}
/**
* Exporting an Acid table is more complicated than a flat table. It may contains delete events,
* which can only be interpreted properly withing the context of the table/metastore where they
* were generated. It may also contain insert events that belong to transactions that aborted
* where the same constraints apply.
* In order to make the export artifact free of these constraints, the export does a
* insert into tmpTable select * from <export table> to filter/apply the events in current
* context and then export the tmpTable. This export artifact can now be imported into any
* table on any cluster (subject to schema checks etc).
* See {@link #analyzeAcidExport(ASTNode)}
* @param tree Export statement
* @return true if exporting an Acid table.
*/
public static boolean isAcidExport(ASTNode tree) throws SemanticException {
assert tree != null && tree.getToken() != null && tree.getToken().getType() == HiveParser.TOK_EXPORT;
Tree tokTab = tree.getChild(0);
assert tokTab != null && tokTab.getType() == HiveParser.TOK_TAB;
Table tableHandle = null;
try {
tableHandle = getTable((ASTNode) tokTab.getChild(0), Hive.get(), false);
} catch(HiveException ex) {
throw new SemanticException(ex);
}
//tableHandle can be null if table doesn't exist
return tableHandle != null && AcidUtils.isFullAcidTable(tableHandle);
}
private static String getTmptTableNameForExport(Table exportTable) {
String tmpTableDb = exportTable.getDbName();
String tmpTableName = exportTable.getTableName() + "_" + UUID.randomUUID().toString().replace('-', '_');
return Warehouse.getQualifiedName(tmpTableDb, tmpTableName);
}
/**
* See {@link #isAcidExport(ASTNode)}
* 1. create the temp table T
* 2. compile 'insert into T select * from acidTable'
* 3. compile 'export acidTable' (acidTable will be replaced with T during execution)
* 4. create task to drop T
*
* Using a true temp (session level) table means it should not affect replication and the table
* is not visible outside the Session that created for security
*/
private void analyzeAcidExport(ASTNode ast) throws SemanticException {
assert ast != null && ast.getToken() != null && ast.getToken().getType() == HiveParser.TOK_EXPORT;
ASTNode tableTree = (ASTNode)ast.getChild(0);
assert tableTree != null && tableTree.getType() == HiveParser.TOK_TAB;
ASTNode tokRefOrNameExportTable = (ASTNode) tableTree.getChild(0);
Table exportTable = getTargetTable(tokRefOrNameExportTable);
if (exportTable != null && (exportTable.isView() || exportTable.isMaterializedView())) {
throw new SemanticException("Views and Materialized Views can not be exported.");
}
assert AcidUtils.isFullAcidTable(exportTable);
//need to create the table "manually" rather than creating a task since it has to exist to
// compile the insert into T...
final String newTableName = getTmptTableNameForExport(exportTable); //this is db.table
final TableName newTableNameRef = HiveTableName.of(newTableName);
Map<String, String> tblProps = new HashMap<>();
tblProps.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, Boolean.FALSE.toString());
String location;
// for temporary tables we set the location to something in the session's scratch dir
// it has the same life cycle as the tmp table
try {
// Generate a unique ID for temp table path.
// This path will be fixed for the life of the temp table.
Path path = new Path(SessionState.getTempTableSpace(conf), UUID.randomUUID().toString());
path = Warehouse.getDnsPath(path, conf);
location = path.toString();
} catch (MetaException err) {
throw new SemanticException("Error while generating temp table path:", err);
}
CreateTableLikeDesc ctlt = new CreateTableLikeDesc(newTableName,
false, true, null,
null, location, null, null,
tblProps,
true, //important so we get an exception on name collision
Warehouse.getQualifiedName(exportTable.getTTable()), false);
Table newTable;
try {
ReadEntity dbForTmpTable = new ReadEntity(db.getDatabase(exportTable.getDbName()));
inputs.add(dbForTmpTable); //so the plan knows we are 'reading' this db - locks, security...
DDLTask createTableTask = (DDLTask) TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), ctlt), conf);
createTableTask.setConf(conf); //above get() doesn't set it
Context context = new Context(conf);
createTableTask.initialize(null, null, new TaskQueue(context), context);
createTableTask.execute();
newTable = db.getTable(newTableName);
} catch(HiveException ex) {
throw new SemanticException(ex);
}
//now generate insert statement
//insert into newTableName select * from ts <where partition spec>
StringBuilder rewrittenQueryStr = generateExportQuery(newTable.getPartCols(), tokRefOrNameExportTable, tableTree,
newTableName);
ReparseResult rr = parseRewrittenQuery(rewrittenQueryStr, ctx.getCmd());
Context rewrittenCtx = rr.rewrittenCtx;
rewrittenCtx.setIsUpdateDeleteMerge(false); //it's set in parseRewrittenQuery()
ASTNode rewrittenTree = rr.rewrittenTree;
try {
useSuper = true;
//newTable has to exist at this point to compile
super.analyze(rewrittenTree, rewrittenCtx);
} finally {
useSuper = false;
}
//now we have the rootTasks set up for Insert ... Select
removeStatsTasks(rootTasks);
//now make an ExportTask from temp table
/*analyzeExport() creates TableSpec which in turn tries to build
"public List<Partition> partitions" by looking in the metastore to find Partitions matching
the partition spec in the Export command. These of course don't exist yet since we've not
ran the insert stmt yet!!!!!!!
*/
Task<ExportWork> exportTask = ExportSemanticAnalyzer.analyzeExport(ast, newTableName, db, conf, inputs, outputs);
// Add an alter table task to set transactional props
// do it after populating temp table so that it's written as non-transactional table but
// update props before export so that export archive metadata has these props. This way when
// IMPORT is done for this archive and target table doesn't exist, it will be created as Acid.
Map<String, String> mapProps = new HashMap<>();
mapProps.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, Boolean.TRUE.toString());
AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(newTableNameRef, null, null, false,
mapProps, false, false, null);
addExportTask(rootTasks, exportTask, TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
// Now make a task to drop temp table
// {@link DropTableAnalyzer#analyzeInternal(ASTNode ast)
ReplicationSpec replicationSpec = new ReplicationSpec();
DropTableDesc dropTblDesc = new DropTableDesc(newTableName, false, true, replicationSpec);
Task<DDLWork> dropTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), dropTblDesc), conf);
exportTask.addDependentTask(dropTask);
markReadEntityForUpdate();
if (ctx.isExplainPlan()) {
try {
//so that "explain" doesn't "leak" tmp tables
// TODO: catalog
db.dropTable(newTable.getDbName(), newTable.getTableName(), true, true, true);
} catch(HiveException ex) {
LOG.warn("Unable to drop " + newTableName + " due to: " + ex.getMessage(), ex);
}
}
}
/**
* Generate
* insert into newTableName select * from ts <where partition spec>
* for EXPORT command.
*/
private StringBuilder generateExportQuery(List<FieldSchema> partCols, ASTNode tokRefOrNameExportTable,
ASTNode tableTree, String newTableName) throws SemanticException {
StringBuilder rewrittenQueryStr = new StringBuilder("insert into ").append(newTableName);
addPartitionColsToInsert(partCols, rewrittenQueryStr);
rewrittenQueryStr.append(" select * from ").append(getFullTableNameForSQL(tokRefOrNameExportTable));
//builds partition spec so we can build suitable WHERE clause
TableSpec exportTableSpec = new TableSpec(db, conf, tableTree, false, true);
if (exportTableSpec.getPartSpec() != null) {
StringBuilder whereClause = null;
int partColsIdx = -1; //keep track of corresponding col in partCols
for (Map.Entry<String, String> ent : exportTableSpec.getPartSpec().entrySet()) {
partColsIdx++;
if (ent.getValue() == null) {
continue; //partial spec
}
if (whereClause == null) {
whereClause = new StringBuilder(" WHERE ");
}
if (whereClause.length() > " WHERE ".length()) {
whereClause.append(" AND ");
}
whereClause.append(HiveUtils.unparseIdentifier(ent.getKey(), conf))
.append(" = ").append(genPartValueString(partCols.get(partColsIdx).getType(), ent.getValue()));
}
if (whereClause != null) {
rewrittenQueryStr.append(whereClause);
}
}
return rewrittenQueryStr;
}
/**
* Makes the exportTask run after all other tasks of the "insert into T ..." are done.
*/
private void addExportTask(List<Task<?>> rootTasks,
Task<ExportWork> exportTask, Task<DDLWork> alterTable) {
for (Task<? extends Serializable> t : rootTasks) {
if (t.getNumChild() <= 0) {
//todo: ConditionalTask#addDependentTask(Task) doesn't do the right thing: HIVE-18978
t.addDependentTask(alterTable);
//this is a leaf so add exportTask to follow it
alterTable.addDependentTask(exportTask);
} else {
addExportTask(t.getDependentTasks(), exportTask, alterTable);
}
}
}
private void removeStatsTasks(List<Task<?>> rootTasks) {
List<Task<?>> statsTasks = findStatsTasks(rootTasks, null);
if (statsTasks == null) {
return;
}
for (Task<?> statsTask : statsTasks) {
if (statsTask.getParentTasks() == null) {
continue; //should never happen
}
for (Task<?> t : new ArrayList<>(statsTask.getParentTasks())) {
t.removeDependentTask(statsTask);
}
}
}
private List<Task<?>> findStatsTasks(
List<Task<?>> rootTasks, List<Task<?>> statsTasks) {
for (Task<? extends Serializable> t : rootTasks) {
if (t instanceof StatsTask) {
if (statsTasks == null) {
statsTasks = new ArrayList<>();
}
statsTasks.add(t);
}
if (t.getDependentTasks() != null) {
statsTasks = findStatsTasks(t.getDependentTasks(), statsTasks);
}
}
return statsTasks;
}
}
| |
//
// Copyright 2018 SenX S.A.S.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.plugins.tcp;
import com.google.common.base.Charsets;
import io.warp10.script.MemoryWarpScriptStack;
import io.warp10.script.WarpScriptStack.Macro;
import io.warp10.script.WarpScriptStopException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.LockSupport;
public class TCPManager extends Thread {
private static final Logger LOG = LoggerFactory.getLogger(TCPManager.class);
private static final int DEFAULT_RETRY = 30000;
private static final int DEFAULT_QSIZE = 1024;
private static final int DEFAULT_MAXMESSAGES = 1;
private static final int DEFAULT_MAXCONNECTIONS = 1;
private static final String DEFAULT_CHARSET = "UTF-8";
private static final String PARAM_MODE = "mode";
private static final String PARAM_RETRY = "retry";
private static final String PARAM_MACRO = "macro";
private static final String PARAM_PARALLELISM = "parallelism";
private static final String PARAM_PARTITIONER = "partitioner";
private static final String PARAM_QSIZE = "qsize";
private static final String PARAM_HOST = "host";
private static final String PARAM_PORT = "port";
private static final String PARAM_TIMEOUT = "timeout";
private static final String PARAM_MAXMESSAGES = "maxMessages";
private static final String PARAM_MAXCONNECTIONS = "maxConnections";
private static final String PARAM_CHARSET = "charset";
private final MemoryWarpScriptStack stack;
private final String mode;
private final long retry;
private final Macro macro;
private final Macro partitioner;
private final String host;
private final long timeout;
private final int maxMessages;
private final int maxConnections;
private final String charset;
private final int parallelism;
private final int port;
private Thread[] executors;
private boolean done;
private final String warpscript;
private final LinkedBlockingQueue<List<Object>>[] queues;
private ServerSocket serverSocket;
private ThreadPoolExecutor clientsExecutor;
private Socket clientSocket;
public TCPManager(Path p) throws Exception {
//
// Read content of mc2 file
//
ByteArrayOutputStream baos = new ByteArrayOutputStream();
InputStream in = new FileInputStream(p.toFile());
byte[] buf = new byte[8192];
while (true) {
int len = in.read(buf);
if (len < 0) {
break;
}
baos.write(buf, 0, len);
}
in.close();
warpscript = new String(baos.toByteArray(), Charsets.UTF_8);
stack = new MemoryWarpScriptStack(null, null, new Properties());
stack.maxLimits();
try {
stack.execMulti(warpscript);
} catch (Throwable t) {
t.printStackTrace();
LOG.error("Caught exception while loading '" + p.getFileName() + "'.", t);
}
Object top = stack.pop();
if (!(top instanceof Map)) {
throw new RuntimeException("TCP consumer spec must leave a configuration map on top of the stack.");
}
Map<Object, Object> config = (Map<Object, Object>) top;
//
// Extract parameters
//
mode = (String) config.get(PARAM_MODE);
retry = ((Number) config.getOrDefault(PARAM_RETRY, DEFAULT_RETRY)).longValue();
macro = (Macro) config.get(PARAM_MACRO);
partitioner = (Macro) config.get(PARAM_PARTITIONER);
host = String.valueOf(config.get(PARAM_HOST));
port = ((Number) config.get(PARAM_PORT)).intValue();
parallelism = ((Number) config.getOrDefault(PARAM_PARALLELISM, 1)).intValue();
timeout = ((Number) config.getOrDefault(PARAM_TIMEOUT, 0L)).longValue();
maxMessages = ((Number) config.getOrDefault(PARAM_MAXMESSAGES, DEFAULT_MAXMESSAGES)).intValue();
maxConnections = ((Number) config.getOrDefault(PARAM_MAXCONNECTIONS, DEFAULT_MAXCONNECTIONS)).intValue();
charset = String.valueOf(config.getOrDefault(PARAM_CHARSET, DEFAULT_CHARSET));
int qsize = ((Number) config.getOrDefault(PARAM_QSIZE, DEFAULT_QSIZE)).intValue();
if (null == partitioner) {
queues = new LinkedBlockingQueue[1];
queues[0] = new LinkedBlockingQueue<List<Object>>(qsize);
} else {
queues = new LinkedBlockingQueue[parallelism];
for (int i = 0; i < parallelism; i++) {
queues[i] = new LinkedBlockingQueue<List<Object>>(qsize);
}
}
initExecutors();
// Create server or client socket
if ("server".equals(mode)) {
serverSocket = new ServerSocket(port, 50, InetAddress.getByName(host));
} else if (!"client".equals(mode)) {
throw new RuntimeException("Mode must be either server or client.");
}
done = false;
clientsExecutor = new ThreadPoolExecutor(maxConnections, maxConnections, 30000L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(maxConnections), new NamedThreadFactory("Warp TCP Client for port " + port));
clientsExecutor.allowCoreThreadTimeOut(true);
setDaemon(true);
setName("[TCP Server for port " + port + "]");
start();
}
private void initExecutors() {
executors = new Thread[parallelism];
for (int i = 0; i < parallelism; i++) {
final MemoryWarpScriptStack stack = new MemoryWarpScriptStack(null, null, new Properties());
stack.maxLimits();
final LinkedBlockingQueue<List<Object>> queue = queues[Math.min(i, queues.length - 1)];
executors[i] = new Thread() {
@Override
public void run() {
while (true) {
try {
List<List<Object>> msgs = new ArrayList<List<Object>>();
if (timeout > 0) {
List<Object> msg = queue.poll(timeout, TimeUnit.MILLISECONDS);
if (null != msg) {
msgs.add(msg);
queue.drainTo(msgs, maxMessages);
}
} else {
List<Object> msg = queue.take();
msgs.add(msg);
queue.drainTo(msgs, maxMessages);
}
stack.clear();
if (0 < msgs.size()) {
stack.push(msgs);
} else {
stack.push(null);
}
stack.exec(macro);
} catch (InterruptedException e) {
return;
} catch (WarpScriptStopException wsse) {
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
executors[i].setName("[TCP Executor on port " + port + " #" + i + "]");
executors[i].setDaemon(true);
executors[i].start();
}
}
@Override
public void run() {
while (!done) {
try {
if (null != serverSocket) {
clientSocket = serverSocket.accept();
} else if (null == clientSocket || clientSocket.isClosed()) {
try {
clientSocket = new Socket(host, port);
} catch (SocketException | UnknownHostException e) {
// Retry later
LockSupport.parkNanos(retry * 1000000);
continue;
}
} else {
// Everything is fine, check later if it still is
LockSupport.parkNanos(retry * 1000000);
continue;
}
// Execute a new TCPClient with the new Socket
try {
clientsExecutor.execute(new TCPClient(clientSocket, partitioner, queues, charset));
} catch (RejectedExecutionException ree) {
// If there are too many connections, immediately close this one.
clientSocket.close();
}
} catch (SocketException se) {
// Closed socket
} catch (IOException e) {
LOG.error("Caught exception while receiving message", e);
}
}
clientsExecutor.shutdownNow();
try {
serverSocket.close();
} catch (Exception e) {
}
}
public void end() {
done = true;
if (null != serverSocket) {
try {
serverSocket.close();
} catch (IOException e) {
}
}
if (null != clientSocket) {
try {
clientSocket.close();
} catch (IOException e) {
}
}
for (Thread t: executors) {
t.interrupt();
}
}
public String getWarpScript() {
return warpscript;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.extractMethod;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.controlFlow.*;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.util.containers.HashSet;
import com.intellij.util.containers.IntArrayList;
import org.jetbrains.annotations.NotNull;
import java.util.*;
public class ControlFlowWrapper {
private static final Logger LOG = Logger.getInstance(ControlFlowWrapper.class);
private final ControlFlow myControlFlow;
private final int myFlowStart;
private final int myFlowEnd;
private boolean myGenerateConditionalExit;
private Collection<PsiStatement> myExitStatements;
private PsiStatement myFirstExitStatementCopy;
private IntArrayList myExitPoints;
public ControlFlowWrapper(Project project, PsiElement codeFragment, PsiElement[] elements) throws PrepareFailedException {
try {
myControlFlow =
ControlFlowFactory.getInstance(project).getControlFlow(codeFragment, new LocalsControlFlowPolicy(codeFragment), false, false);
}
catch (AnalysisCanceledException e) {
throw new PrepareFailedException(RefactoringBundle.message("extract.method.control.flow.analysis.failed"), e.getErrorElement());
}
if (LOG.isDebugEnabled()) {
LOG.debug(myControlFlow.toString());
}
int flowStart = -1;
int index = 0;
while (index < elements.length) {
flowStart = myControlFlow.getStartOffset(elements[index]);
if (flowStart >= 0) break;
index++;
}
int flowEnd;
if (flowStart < 0) {
// no executable code
flowStart = 0;
flowEnd = 0;
}
else {
index = elements.length - 1;
while (true) {
flowEnd = myControlFlow.getEndOffset(elements[index]);
if (flowEnd >= 0) break;
index--;
}
}
myFlowStart = flowStart;
myFlowEnd = flowEnd;
if (LOG.isDebugEnabled()) {
LOG.debug("start offset:" + myFlowStart);
LOG.debug("end offset:" + myFlowEnd);
}
}
public PsiStatement getFirstExitStatementCopy() {
return myFirstExitStatementCopy;
}
public Collection<PsiStatement> prepareExitStatements(final @NotNull PsiElement[] elements,
final @NotNull PsiElement enclosingCodeFragment)
throws ExitStatementsNotSameException {
myExitPoints = new IntArrayList();
myExitStatements = ControlFlowUtil
.findExitPointsAndStatements(myControlFlow, myFlowStart, myFlowEnd, myExitPoints, ControlFlowUtil.DEFAULT_EXIT_STATEMENTS_CLASSES);
if (ControlFlowUtil.hasObservableThrowExitPoints(myControlFlow, myFlowStart, myFlowEnd, elements, enclosingCodeFragment)) {
throw new ExitStatementsNotSameException();
}
if (LOG.isDebugEnabled()) {
LOG.debug("exit points:");
for (int i = 0; i < myExitPoints.size(); i++) {
LOG.debug(" " + myExitPoints.get(i));
}
LOG.debug("exit statements:");
for (PsiStatement exitStatement : myExitStatements) {
LOG.debug(" " + exitStatement);
}
}
if (myExitPoints.isEmpty()) {
// if the fragment never exits assume as if it exits in the end
myExitPoints.add(myControlFlow.getEndOffset(elements[elements.length - 1]));
}
if (myExitPoints.size() != 1) {
myGenerateConditionalExit = true;
areExitStatementsTheSame();
}
return myExitStatements;
}
private void areExitStatementsTheSame() throws ExitStatementsNotSameException {
if (myExitStatements.isEmpty()) {
throw new ExitStatementsNotSameException();
}
PsiStatement first = null;
for (PsiStatement statement : myExitStatements) {
if (first == null) {
first = statement;
continue;
}
if (!PsiEquivalenceUtil.areElementsEquivalent(first, statement)) {
throw new ExitStatementsNotSameException();
}
}
myFirstExitStatementCopy = (PsiStatement)first.copy();
}
public boolean isGenerateConditionalExit() {
return myGenerateConditionalExit;
}
public Collection<PsiStatement> getExitStatements() {
return myExitStatements;
}
public static class ExitStatementsNotSameException extends Exception {}
@NotNull
public PsiVariable[] getOutputVariables() {
return getOutputVariables(myGenerateConditionalExit);
}
@NotNull
public PsiVariable[] getOutputVariables(boolean collectVariablesAtExitPoints) {
PsiVariable[] myOutputVariables = ControlFlowUtil.getOutputVariables(myControlFlow, myFlowStart, myFlowEnd, myExitPoints.toArray());
if (collectVariablesAtExitPoints) {
//variables declared in selected block used in return statements are to be considered output variables when extracting guard methods
final Set<PsiVariable> outputVariables = new HashSet<>(Arrays.asList(myOutputVariables));
for (PsiStatement statement : myExitStatements) {
statement.accept(new JavaRecursiveElementVisitor() {
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
final PsiElement resolved = expression.resolve();
if (resolved instanceof PsiVariable) {
final PsiVariable variable = (PsiVariable)resolved;
if (isWrittenInside(variable)) {
outputVariables.add(variable);
}
}
}
private boolean isWrittenInside(final PsiVariable variable) {
final List<Instruction> instructions = myControlFlow.getInstructions();
for (int i = myFlowStart; i < myFlowEnd; i++) {
Instruction instruction = instructions.get(i);
if (instruction instanceof WriteVariableInstruction && variable.equals(((WriteVariableInstruction)instruction).variable)) {
return true;
}
}
return false;
}
});
}
myOutputVariables = outputVariables.toArray(new PsiVariable[outputVariables.size()]);
}
Arrays.sort(myOutputVariables, PsiUtil.BY_POSITION);
return myOutputVariables;
}
public boolean isReturnPresentBetween() {
return ControlFlowUtil.returnPresentBetween(myControlFlow, myFlowStart, myFlowEnd);
}
private void removeParametersUsedInExitsOnly(PsiElement codeFragment, List<PsiVariable> inputVariables) {
LocalSearchScope scope = new LocalSearchScope(codeFragment);
Variables:
for (Iterator<PsiVariable> iterator = inputVariables.iterator(); iterator.hasNext();) {
PsiVariable variable = iterator.next();
for (PsiReference ref : ReferencesSearch.search(variable, scope)) {
PsiElement element = ref.getElement();
int elementOffset = myControlFlow.getStartOffset(element);
if (elementOffset == -1) {
continue Variables;
}
if (elementOffset >= myFlowStart && elementOffset <= myFlowEnd) {
if (!isInExitStatements(element, myExitStatements)) continue Variables;
}
}
iterator.remove();
}
}
private static boolean isInExitStatements(PsiElement element, Collection<PsiStatement> exitStatements) {
for (PsiStatement exitStatement : exitStatements) {
if (PsiTreeUtil.isAncestor(exitStatement, element, false)) return true;
}
return false;
}
private boolean needExitStatement(final PsiStatement exitStatement) {
if (exitStatement instanceof PsiContinueStatement) {
//IDEADEV-11748
PsiStatement statement = ((PsiContinueStatement)exitStatement).findContinuedStatement();
if (statement == null) return true;
if (statement instanceof PsiLoopStatement) statement = ((PsiLoopStatement)statement).getBody();
int endOffset = myControlFlow.getEndOffset(statement);
return endOffset > myFlowEnd;
}
return true;
}
public List<PsiVariable> getInputVariables(final PsiElement codeFragment, PsiElement[] elements, PsiVariable[] outputVariables) {
final List<PsiVariable> inputVariables = ControlFlowUtil.getInputVariables(myControlFlow, myFlowStart, myFlowEnd);
List<PsiVariable> myInputVariables;
if (skipVariablesFromExitStatements(outputVariables)) {
List<PsiVariable> inputVariableList = new ArrayList<>(inputVariables);
removeParametersUsedInExitsOnly(codeFragment, inputVariableList);
myInputVariables = inputVariableList;
}
else {
List<PsiVariable> inputVariableList = new ArrayList<>(inputVariables);
for (Iterator<PsiVariable> iterator = inputVariableList.iterator(); iterator.hasNext(); ) {
PsiVariable variable = iterator.next();
for (PsiElement element : elements) {
if (PsiTreeUtil.isAncestor(element, variable, false)) {
iterator.remove();
break;
}
}
}
myInputVariables = inputVariableList;
}
//varargs variables go last, otherwise order is induced by original ordering
Collections.sort(myInputVariables, (v1, v2) -> {
if (v1.getType() instanceof PsiEllipsisType) {
return 1;
}
if (v2.getType() instanceof PsiEllipsisType) {
return -1;
}
return v1.getTextOffset() - v2.getTextOffset();
});
return myInputVariables;
}
public PsiStatement getExitStatementCopy(PsiElement returnStatement,
final PsiElement[] elements) {
PsiStatement exitStatementCopy = null;
// replace all exit-statements such as break's or continue's with appropriate return
for (PsiStatement exitStatement : myExitStatements) {
if (exitStatement instanceof PsiReturnStatement) {
if (!myGenerateConditionalExit) continue;
}
else if (exitStatement instanceof PsiBreakStatement) {
PsiStatement statement = ((PsiBreakStatement)exitStatement).findExitedStatement();
if (statement == null) continue;
int startOffset = myControlFlow.getStartOffset(statement);
int endOffset = myControlFlow.getEndOffset(statement);
if (myFlowStart <= startOffset && endOffset <= myFlowEnd) continue;
}
else if (exitStatement instanceof PsiContinueStatement) {
PsiStatement statement = ((PsiContinueStatement)exitStatement).findContinuedStatement();
if (statement == null) continue;
int startOffset = myControlFlow.getStartOffset(statement);
int endOffset = myControlFlow.getEndOffset(statement);
if (myFlowStart <= startOffset && endOffset <= myFlowEnd) continue;
}
else {
LOG.error(String.valueOf(exitStatement));
continue;
}
int index = -1;
for (int j = 0; j < elements.length; j++) {
if (exitStatement.equals(elements[j])) {
index = j;
break;
}
}
if (exitStatementCopy == null) {
if (needExitStatement(exitStatement)) {
exitStatementCopy = (PsiStatement)exitStatement.copy();
}
}
PsiElement result = exitStatement.replace(returnStatement);
if (index >= 0) {
elements[index] = result;
}
}
return exitStatementCopy;
}
public List<PsiVariable> getUsedVariables(int start) {
return getUsedVariables(start, myControlFlow.getSize());
}
public List<PsiVariable> getUsedVariables(int start, int end) {
return ControlFlowUtil.getUsedVariables(myControlFlow, start, end);
}
public Collection<ControlFlowUtil.VariableInfo> getInitializedTwice(int start) {
return ControlFlowUtil.getInitializedTwice(myControlFlow, start, myControlFlow.getSize());
}
public List<PsiVariable> getUsedVariables() {
return getUsedVariables(myFlowEnd);
}
public List<PsiVariable> getUsedVariablesInBody(PsiElement codeFragment, PsiVariable[] outputVariables) {
final List<PsiVariable> variables = getUsedVariables(myFlowStart, myFlowEnd);
if (skipVariablesFromExitStatements(outputVariables)) {
removeParametersUsedInExitsOnly(codeFragment, variables);
}
return variables;
}
private boolean skipVariablesFromExitStatements(PsiVariable[] outputVariables) {
return myGenerateConditionalExit && outputVariables.length == 0;
}
public Collection<ControlFlowUtil.VariableInfo> getInitializedTwice() {
return getInitializedTwice(myFlowEnd);
}
public void setGenerateConditionalExit(boolean generateConditionalExit) {
myGenerateConditionalExit = generateConditionalExit;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.cql3.functions.Function;
import org.apache.cassandra.cql3.functions.Functions;
import org.apache.cassandra.db.CounterColumn;
import org.apache.cassandra.db.ExpiringColumn;
import org.apache.cassandra.db.IColumn;
import org.apache.cassandra.db.context.CounterContext;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.Int32Type;
import org.apache.cassandra.db.marshal.LongType;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class Selection
{
private static final Logger logger = LoggerFactory.getLogger(Selection.class);
private final List<CFDefinition.Name> columnsList;
private final List<ColumnSpecification> metadata;
private final boolean collectTimestamps;
private final boolean collectTTLs;
protected Selection(List<CFDefinition.Name> columnsList, List<ColumnSpecification> metadata, boolean collectTimestamps, boolean collectTTLs)
{
this.columnsList = columnsList;
this.metadata = metadata;
this.collectTimestamps = collectTimestamps;
this.collectTTLs = collectTTLs;
}
public static Selection wildcard(CFDefinition cfDef)
{
List<CFDefinition.Name> all = new ArrayList<CFDefinition.Name>();
for (CFDefinition.Name name : cfDef)
all.add(name);
return new SimpleSelection(all);
}
private static boolean isUsingFunction(List<RawSelector> rawSelectors)
{
for (RawSelector rawSelector : rawSelectors)
{
if (!(rawSelector instanceof ColumnIdentifier))
return true;
}
return false;
}
private static int addAndGetIndex(CFDefinition.Name name, List<CFDefinition.Name> l)
{
int idx = l.indexOf(name);
if (idx < 0)
{
idx = l.size();
l.add(name);
}
return idx;
}
private static Selector makeSelector(CFDefinition cfDef, RawSelector raw, List<CFDefinition.Name> names, List<ColumnSpecification> metadata) throws InvalidRequestException
{
if (raw instanceof ColumnIdentifier)
{
CFDefinition.Name name = cfDef.get((ColumnIdentifier)raw);
if (name == null)
throw new InvalidRequestException(String.format("Undefined name %s in selection clause", raw));
if (metadata != null)
metadata.add(name);
return new SimpleSelector(name.toString(), addAndGetIndex(name, names), name.type);
}
else if (raw instanceof RawSelector.WritetimeOrTTL)
{
RawSelector.WritetimeOrTTL tot = (RawSelector.WritetimeOrTTL)raw;
CFDefinition.Name name = cfDef.get(tot.id);
if (name == null)
throw new InvalidRequestException(String.format("Undefined name %s in selection clause", tot.id));
if (name.kind != CFDefinition.Name.Kind.COLUMN_METADATA && name.kind != CFDefinition.Name.Kind.VALUE_ALIAS)
throw new InvalidRequestException(String.format("Cannot use selection function %s on PRIMARY KEY part %s", tot.isWritetime ? "writeTime" : "ttl", name));
if (name.type.isCollection())
throw new InvalidRequestException(String.format("Cannot use selection function %s on collections", tot.isWritetime ? "writeTime" : "ttl"));
if (metadata != null)
metadata.add(makeWritetimeOrTTLSpec(cfDef, tot));
return new WritetimeOrTTLSelector(name.toString(), addAndGetIndex(name, names), tot.isWritetime);
}
else
{
RawSelector.WithFunction withFun = (RawSelector.WithFunction)raw;
List<Selector> args = new ArrayList<Selector>(withFun.args.size());
for (RawSelector rawArg : withFun.args)
args.add(makeSelector(cfDef, rawArg, names, null));
AbstractType<?> returnType = Functions.getReturnType(withFun.functionName, cfDef.cfm.ksName, cfDef.cfm.cfName);
if (returnType == null)
throw new InvalidRequestException(String.format("Unknown function '%s'", withFun.functionName));
ColumnSpecification spec = makeFunctionSpec(cfDef, withFun, returnType);
Function fun = Functions.get(withFun.functionName, args, spec);
if (metadata != null)
metadata.add(spec);
return new FunctionSelector(fun, args);
}
}
private static ColumnSpecification makeWritetimeOrTTLSpec(CFDefinition cfDef, RawSelector.WritetimeOrTTL tot)
{
return new ColumnSpecification(cfDef.cfm.ksName,
cfDef.cfm.cfName,
new ColumnIdentifier(tot.toString(), true),
tot.isWritetime ? LongType.instance : Int32Type.instance);
}
private static ColumnSpecification makeFunctionSpec(CFDefinition cfDef, RawSelector.WithFunction fun, AbstractType<?> returnType) throws InvalidRequestException
{
if (returnType == null)
throw new InvalidRequestException(String.format("Unknown function %s called in selection clause", fun.functionName));
return new ColumnSpecification(cfDef.cfm.ksName,
cfDef.cfm.cfName,
new ColumnIdentifier(fun.toString(), true),
returnType);
}
public static Selection fromSelectors(CFDefinition cfDef, List<RawSelector> rawSelectors) throws InvalidRequestException
{
boolean usesFunction = isUsingFunction(rawSelectors);
if (usesFunction)
{
List<CFDefinition.Name> names = new ArrayList<CFDefinition.Name>();
List<ColumnSpecification> metadata = new ArrayList<ColumnSpecification>(rawSelectors.size());
List<Selector> selectors = new ArrayList<Selector>(rawSelectors.size());
boolean collectTimestamps = false;
boolean collectTTLs = false;
for (RawSelector rawSelector : rawSelectors)
{
Selector selector = makeSelector(cfDef, rawSelector, names, metadata);
selectors.add(selector);
if (selector instanceof WritetimeOrTTLSelector)
{
collectTimestamps |= ((WritetimeOrTTLSelector)selector).isWritetime;
collectTTLs |= !((WritetimeOrTTLSelector)selector).isWritetime;
}
}
return new SelectionWithFunctions(names, metadata, selectors, collectTimestamps, collectTTLs);
}
else
{
List<CFDefinition.Name> names = new ArrayList<CFDefinition.Name>(rawSelectors.size());
for (RawSelector rawSelector : rawSelectors)
{
assert rawSelector instanceof ColumnIdentifier;
CFDefinition.Name name = cfDef.get((ColumnIdentifier)rawSelector);
if (name == null)
throw new InvalidRequestException(String.format("Undefined name %s in selection clause", rawSelector));
names.add(name);
}
return new SimpleSelection(names);
}
}
protected abstract List<ByteBuffer> handleRow(ResultSetBuilder rs) throws InvalidRequestException;
/**
* @return the list of CQL3 "regular" (the "COLUMN_METADATA" ones) column names to fetch.
*/
public List<ColumnIdentifier> regularColumnsToFetch()
{
List<ColumnIdentifier> toFetch = new ArrayList<ColumnIdentifier>();
for (CFDefinition.Name name : columnsList)
{
if (name.kind == CFDefinition.Name.Kind.COLUMN_METADATA)
toFetch.add(name.name);
}
return toFetch;
}
/**
* @return the list of CQL3 columns value this SelectionClause needs.
*/
public List<CFDefinition.Name> getColumnsList()
{
return columnsList;
}
public ResultSetBuilder resultSetBuilder()
{
return new ResultSetBuilder();
}
private static ByteBuffer value(IColumn c)
{
return (c instanceof CounterColumn)
? ByteBufferUtil.bytes(CounterContext.instance().total(c.value()))
: c.value();
}
public class ResultSetBuilder
{
private final ResultSet resultSet;
/*
* We'll build CQL3 row one by one.
* The currentRow is the values for the (CQL3) columns we've fetched.
* We also collect timestamps and ttls for the case where the writetime and
* ttl functions are used. Note that we might collect timestamp and/or ttls
* we don't care about, but since the array below are allocated just once,
* it doesn't matter performance wise.
*/
List<ByteBuffer> current;
final long[] timestamps;
final int[] ttls;
private ResultSetBuilder()
{
this.resultSet = new ResultSet(metadata);
this.timestamps = collectTimestamps ? new long[columnsList.size()] : null;
this.ttls = collectTTLs ? new int[columnsList.size()] : null;
}
public void add(ByteBuffer v)
{
current.add(v);
}
public void add(IColumn c)
{
current.add(isDead(c) ? null : value(c));
if (timestamps != null)
{
timestamps[current.size() - 1] = isDead(c) ? -1 : c.timestamp();
}
if (ttls != null)
{
int ttl = -1;
if (!isDead(c) && c instanceof ExpiringColumn)
ttl = ((ExpiringColumn)c).getLocalDeletionTime() - (int) (System.currentTimeMillis() / 1000);
ttls[current.size() - 1] = ttl;
}
}
private boolean isDead(IColumn c)
{
return c == null || c.isMarkedForDelete();
}
public void newRow() throws InvalidRequestException
{
if (current != null)
resultSet.addRow(handleRow(this));
current = new ArrayList<ByteBuffer>(columnsList.size());
}
public ResultSet build() throws InvalidRequestException
{
if (current != null)
{
resultSet.addRow(handleRow(this));
current = null;
}
return resultSet;
}
}
// Special cased selection for when no function is used (this save some allocations).
private static class SimpleSelection extends Selection
{
public SimpleSelection(List<CFDefinition.Name> columnsList)
{
/*
* In theory, even a simple selection could have multiple time the same column, so we
* could filter those duplicate out of columnsList. But since we're very unlikely to
* get much duplicate in practice, it's more efficient not to bother.
*/
super(columnsList, new ArrayList<ColumnSpecification>(columnsList), false, false);
}
protected List<ByteBuffer> handleRow(ResultSetBuilder rs)
{
return rs.current;
}
}
private interface Selector extends AssignementTestable
{
public ByteBuffer compute(ResultSetBuilder rs) throws InvalidRequestException;
}
private static class SimpleSelector implements Selector
{
private final String columnName;
private final int idx;
private final AbstractType<?> type;
public SimpleSelector(String columnName, int idx, AbstractType<?> type)
{
this.columnName = columnName;
this.idx = idx;
this.type = type;
}
public ByteBuffer compute(ResultSetBuilder rs)
{
return rs.current.get(idx);
}
public boolean isAssignableTo(ColumnSpecification receiver)
{
return type.asCQL3Type().equals(receiver.type.asCQL3Type());
}
@Override
public String toString()
{
return columnName;
}
}
private static class FunctionSelector implements Selector
{
private final Function fun;
private final List<Selector> argSelectors;
public FunctionSelector(Function fun, List<Selector> argSelectors)
{
this.fun = fun;
this.argSelectors = argSelectors;
}
public ByteBuffer compute(ResultSetBuilder rs) throws InvalidRequestException
{
List<ByteBuffer> args = new ArrayList<ByteBuffer>(argSelectors.size());
for (Selector s : argSelectors)
args.add(s.compute(rs));
return fun.execute(args);
}
public boolean isAssignableTo(ColumnSpecification receiver)
{
return fun.returnType().asCQL3Type().equals(receiver.type.asCQL3Type());
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append(fun.name()).append("(");
for (int i = 0; i < argSelectors.size(); i++)
{
if (i > 0)
sb.append(", ");
sb.append(argSelectors.get(i));
}
return sb.append(")").toString();
}
}
private static class WritetimeOrTTLSelector implements Selector
{
private final String columnName;
private final int idx;
private final boolean isWritetime;
public WritetimeOrTTLSelector(String columnName, int idx, boolean isWritetime)
{
this.columnName = columnName;
this.idx = idx;
this.isWritetime = isWritetime;
}
public ByteBuffer compute(ResultSetBuilder rs)
{
if (isWritetime)
{
long ts = rs.timestamps[idx];
return ts >= 0 ? ByteBufferUtil.bytes(ts) : null;
}
int ttl = rs.ttls[idx];
return ttl > 0 ? ByteBufferUtil.bytes(ttl) : null;
}
public boolean isAssignableTo(ColumnSpecification receiver)
{
return receiver.type.asCQL3Type().equals(isWritetime ? CQL3Type.Native.BIGINT : CQL3Type.Native.INT);
}
@Override
public String toString()
{
return columnName;
}
}
private static class SelectionWithFunctions extends Selection
{
private final List<Selector> selectors;
public SelectionWithFunctions(List<CFDefinition.Name> columnsList, List<ColumnSpecification> metadata, List<Selector> selectors, boolean collectTimestamps, boolean collectTTLs)
{
super(columnsList, metadata, collectTimestamps, collectTTLs);
this.selectors = selectors;
}
protected List<ByteBuffer> handleRow(ResultSetBuilder rs) throws InvalidRequestException
{
List<ByteBuffer> result = new ArrayList<ByteBuffer>();
for (Selector selector : selectors)
{
result.add(selector.compute(rs));
}
return result;
}
}
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.io.watch;
import jodd.io.FileUtil;
import jodd.mutable.MutableLong;
import jodd.util.StringPool;
import jodd.util.Wildcard;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
public class DirWatcher {
/**
* Events that describes file change.
*/
public enum Event {
CREATED,
DELETED,
MODIFIED
}
protected final File dir;
protected HashMap<File, MutableLong> map = new HashMap<>();
protected int filesCount;
protected List<DirWatcherListener> listeners = new ArrayList<>();
protected String[] patterns;
/**
* Creates new watcher on specified directory.
* You can set file patterns {@link #monitor(String...) later}.
*/
public DirWatcher(String dir) {
this(dir, null);
}
/**
* Creates new watched on specified directory with given set of
* wildcard patterns for file names.
*/
public DirWatcher(String dirName, String... patterns) {
this.dir = new File(dirName);
if (!dir.exists() || !dir.isDirectory()) {
throw new DirWatcherException("Invalid watch dir: " + dirName);
}
this.patterns = patterns;
}
/**
* Initializes dir watcher by reading all files
* from watched folder.
*/
protected void init() {
File[] filesArray = dir.listFiles();
filesCount = 0;
if (filesArray != null) {
filesCount = filesArray.length;
for (File file : filesArray) {
if (!acceptFile(file)) {
continue;
}
map.put(file, new MutableLong(file.lastModified()));
}
}
}
// ---------------------------------------------------------------- flags
protected boolean ignoreDotFiles = true;
protected boolean startBlank = false;
/**
* Enables or disables if dot files should be watched.
*/
public DirWatcher ignoreDotFiles(boolean ignoreDotFiles) {
this.ignoreDotFiles = ignoreDotFiles;
return this;
}
/**
* Defines if watcher should start blank and consider all present
* files as {@link jodd.io.watch.DirWatcher.Event#CREATED created}.
* By default all existing files will consider as existing ones.
*/
public DirWatcher startBlank(boolean startBlank) {
this.startBlank = startBlank;
return this;
}
/**
* Defines patterns to scan.
*/
public DirWatcher monitor(String... patterns) {
this.patterns = patterns;
return this;
}
// ---------------------------------------------------------------- accept
/**
* Accepts if a file is going to be watched.
*/
protected boolean acceptFile(File file) {
if (!file.isFile()) {
return false; // ignore non-files
}
String fileName = file.getName();
if (ignoreDotFiles) {
if (fileName.startsWith(StringPool.DOT)) {
return false; // ignore hidden files
}
}
if (patterns == null) {
return true;
}
return Wildcard.matchOne(fileName, patterns) != -1;
}
// ---------------------------------------------------------------- watch file
protected File watchFile;
protected long watchFileLastAccessTime;
/**
* Enables usage of default watch file (".watch.ready").
*/
public DirWatcher useWatchFile() {
return useWatchFile(".watch.ready");
}
/**
* Enables usage of provided watch file.
*/
public DirWatcher useWatchFile(String name) {
watchFile = new File(dir, name);
if (!watchFile.isFile() || !watchFile.exists()) {
try {
FileUtil.touch(watchFile);
} catch (IOException ioex) {
throw new DirWatcherException("Invalid watch file: " + name, ioex);
}
}
watchFileLastAccessTime = watchFile.lastModified();
return this;
}
// ---------------------------------------------------------------- timer
protected Timer timer;
/**
* Starts the watcher.
*/
public void start(long pollingInterval) {
if (timer == null) {
if (!startBlank) {
init();
}
timer = new Timer(true);
timer.schedule(new WatchTask(), 0, pollingInterval);
}
}
/**
* Stops the watcher.
*/
public void stop() {
if (timer != null) {
timer.cancel();
timer = null;
}
}
// ---------------------------------------------------------------- timer
public class WatchTask extends TimerTask {
protected boolean running;
public final void run() {
if (running) {
// if one task takes too long, don't fire another one
return;
}
running = true;
if (watchFile != null) {
// wait for watch file changes
long last = watchFile.lastModified();
if (last <= watchFileLastAccessTime) {
running = false;
return;
}
watchFileLastAccessTime = last;
}
// scan!
File[] filesArray = dir.listFiles();
if (filesArray == null) {
running = false;
return;
}
HashSet<File> deletedFiles = null;
// check if there might be a delete file
if (filesArray.length < filesCount) {
deletedFiles = new HashSet<>(map.keySet());
}
filesCount = filesArray.length;
// scan the files and check for modification/addition
for (File file : filesArray) {
if (!acceptFile(file)) {
continue;
}
MutableLong currentTime = map.get(file);
if (deletedFiles != null) {
deletedFiles.remove(file);
}
long lastModified = file.lastModified();
if (currentTime == null) {
// new file
map.put(file, new MutableLong(lastModified));
onChange(file, Event.CREATED);
}
else if (currentTime.longValue() != lastModified) {
// modified file
currentTime.setValue(lastModified);
onChange(file, Event.MODIFIED);
}
}
// check for deleted files
if (deletedFiles != null) {
for (File deletedFile : deletedFiles) {
map.remove(deletedFile);
onChange(deletedFile, Event.DELETED);
}
}
// stop running
running = false;
}
}
/**
* Triggers listeners on file change.
*/
protected void onChange(File file, Event event) {
for (DirWatcherListener listener : listeners) {
listener.onChange(file, event);
}
}
// ---------------------------------------------------------------- listeners
/**
* Registers {@link jodd.io.watch.DirWatcherListener listener}.
*/
public void register(DirWatcherListener dirWatcherListener) {
if (!listeners.contains(dirWatcherListener)) {
listeners.add(dirWatcherListener);
}
}
/**
* Removes registered {@link jodd.io.watch.DirWatcherListener listener}.
*/
public void remove(DirWatcherListener dirWatcherListener) {
listeners.remove(dirWatcherListener);
}
}
| |
package com.example.wisebody.twelve.PagerMenu;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.TabLayout;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import com.example.wisebody.twelve.R;
import com.example.wisebody.twelve.User;
public class MainActivity extends AppCompatActivity {
User loginUser;
Bundle args;
int position;
static int loadcount;
Thread loadThread;
private SectionsPagerAdapter mSectionsPagerAdapter;
private ViewPager mViewPager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Intent getIntent = getIntent();
loginUser = (User) getIntent.getSerializableExtra("loginUser");
args = new Bundle();
args.putSerializable("loginUser",loginUser);
loadcount = 0;
mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager());
// Set up the ViewPager with the sections adapter.
mViewPager = (ViewPager) findViewById(R.id.container);
mViewPager.setAdapter(mSectionsPagerAdapter);
mViewPager.setEnabled(false);
mViewPager.setOffscreenPageLimit(4);
mViewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
restFragment(position);
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
TabLayout tabLayout = (TabLayout) findViewById(R.id.tabs);
tabLayout.setupWithViewPager(mViewPager);
View view1 = getLayoutInflater().inflate(R.layout.customtab, null);
view1.findViewById(R.id.icon).setBackgroundResource(R.drawable.tab_active);
tabLayout.addTab(tabLayout.newTab().setCustomView(view1));
View view2 = getLayoutInflater().inflate(R.layout.customtab, null);
view2.findViewById(R.id.icon).setBackgroundResource(R.drawable.tab_limit);
tabLayout.addTab(tabLayout.newTab().setCustomView(view2));
View view3 = getLayoutInflater().inflate(R.layout.customtab, null);
view3.findViewById(R.id.icon).setBackgroundResource(R.drawable.tab_stardom);
tabLayout.addTab(tabLayout.newTab().setCustomView(view3));
View view4 = getLayoutInflater().inflate(R.layout.customtab, null);
view4.findViewById(R.id.icon).setBackgroundResource(R.drawable.tab_best);
tabLayout.addTab(tabLayout.newTab().setCustomView(view4));
View view5 = getLayoutInflater().inflate(R.layout.customtab, null);
view5.findViewById(R.id.icon).setBackgroundResource(R.drawable.tab_me);
tabLayout.addTab(tabLayout.newTab().setCustomView(view5));
tabLayout.removeTabAt(0);
tabLayout.removeTabAt(0);
tabLayout.removeTabAt(0);
tabLayout.removeTabAt(0);
tabLayout.removeTabAt(0);
}
@Override
public void onResume()
{
super.onResume();
Log.d("resume","resume");
mViewPager.setEnabled(true);
position = mViewPager.getCurrentItem();
if(loadThread == null) {
loadThread = new Thread(new Runnable() {
@Override
public void run() {
while (true) {
if (loadcount == 5) {
mViewPager.setEnabled(true);
position = mViewPager.getCurrentItem();
//restFragment(position);
loadThread = null;
break;
}
}
}
});
loadThread.start();
}
}
@Override
public void onPause()
{
super.onPause();
/* if(loadThread!=null)
loadThread = null;*/
}
protected void restFragment(int curposition)
{
ActiveFragment activeFragment = (ActiveFragment) mSectionsPagerAdapter.instantiateItem(mViewPager,0);
LimitFragment limitFragment = (LimitFragment) mSectionsPagerAdapter.instantiateItem(mViewPager,1);
StardomFragment stardomFragment = (StardomFragment) mSectionsPagerAdapter.instantiateItem(mViewPager,2);
BestFragment bestFragment = (BestFragment) mSectionsPagerAdapter.instantiateItem(mViewPager,3);
MeFragment meFragment = (MeFragment) mSectionsPagerAdapter.instantiateItem(mViewPager,4);
if(curposition==0)
{
activeFragment.shownpage = true;
limitFragment.shownpage = false;
bestFragment.shownpage = false;
meFragment.shownpage = false;
if(activeFragment.loadData)
activeFragment.restFragment();
if(limitFragment.loadData)
limitFragment.restFragment();
if(bestFragment.loadData)
bestFragment.restFragment();
if(meFragment.loadData)
meFragment.restFragment();
activeFragment = null;
limitFragment = null;
bestFragment = null;
stardomFragment = null;
meFragment = null;
}
else if(curposition == 1)
{
activeFragment.shownpage = false;
limitFragment.shownpage = true;
bestFragment.shownpage = false;
meFragment.shownpage = false;
if(activeFragment.loadData)
activeFragment.restFragment();
if(limitFragment.loadData)
limitFragment.restFragment();
if(bestFragment.loadData)
bestFragment.restFragment();
if(meFragment.loadData)
meFragment.restFragment();
activeFragment = null;
limitFragment = null;
bestFragment = null;
stardomFragment = null;
meFragment = null;
} else if(curposition == 2)
{
activeFragment.shownpage = false;
limitFragment.shownpage = false;
bestFragment.shownpage = false;
meFragment.shownpage = false;
if(activeFragment.loadData)
activeFragment.restFragment();
if(limitFragment.loadData)
limitFragment.restFragment();
if(bestFragment.loadData)
bestFragment.restFragment();
if(meFragment.loadData)
meFragment.restFragment();
activeFragment = null;
limitFragment = null;
bestFragment = null;
((AppCompatActivity)meFragment.getActivity()).setSupportActionBar(null);
((AppCompatActivity)stardomFragment.getActivity()).setSupportActionBar(stardomFragment.toolbar);
((AppCompatActivity)stardomFragment.getActivity()).getSupportActionBar().setDisplayShowTitleEnabled(false);
stardomFragment = null;
meFragment = null;
}
else if(curposition == 3)
{
activeFragment.shownpage = false;
limitFragment.shownpage = false;
bestFragment.shownpage = true;
meFragment.shownpage = false;
if(activeFragment.loadData)
activeFragment.restFragment();
if(limitFragment.loadData)
limitFragment.restFragment();
if(bestFragment.loadData)
bestFragment.restFragment();
if(meFragment.loadData)
meFragment.restFragment();
activeFragment = null;
limitFragment = null;
bestFragment = null;
stardomFragment = null;
meFragment = null;
}
else
{
activeFragment.shownpage = false;
limitFragment.shownpage = false;
bestFragment.shownpage = false;
meFragment.shownpage = true;
if(activeFragment.loadData)
activeFragment.restFragment();
if(limitFragment.loadData)
limitFragment.restFragment();
if(bestFragment.loadData)
bestFragment.restFragment();
if(meFragment.loadData)
meFragment.restFragment();
activeFragment = null;
limitFragment = null;
bestFragment = null;
((AppCompatActivity)stardomFragment.getActivity()).setSupportActionBar(null);
((AppCompatActivity)meFragment.getActivity()).setSupportActionBar(meFragment.toolbar);
((AppCompatActivity)meFragment.getActivity()).getSupportActionBar().setDisplayShowTitleEnabled(false);
stardomFragment = null;
meFragment = null;
}
}
public ViewPager getViewPager() {
if (null == mViewPager) {
mViewPager = (ViewPager) findViewById(R.id.container);
}
return mViewPager;
}
public class SectionsPagerAdapter extends FragmentPagerAdapter {
public SectionsPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int position) {
Log.d("get",Integer.toString(position));
if(position==0)
{
ActiveFragment fragment = new ActiveFragment();
fragment.setArguments(args);
return fragment;
}
else if (position == 1) {
LimitFragment fragment = new LimitFragment();
fragment.setArguments(args);
return fragment;
}
else if (position == 2) {
StardomFragment fragment = new StardomFragment();
fragment.setArguments(args);
return fragment;
}
else if (position == 3) {
BestFragment fragment = new BestFragment();
fragment.setArguments(args);
return fragment;
}
else {
MeFragment fragment = new MeFragment();
fragment.setArguments(args);
return fragment;
}
}
@Override
public int getCount() {
return 5;
}
}
}
| |
/*
* Copyright (c) 2013, salesforce.com, inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the
* following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.sforce.ws.wsdl;
import com.sforce.ws.parser.XmlInputStream;
import com.sforce.ws.ConnectionException;
import javax.xml.namespace.QName;
import java.util.HashMap;
import java.util.Iterator;
/**
* This class represents a WSDL->definitions
*
* @author http://cheenath.com
* @version 1.0
* @since 1.0 Nov 5, 2005
*/
public class Definitions extends WsdlNode {
// private static boolean LOG = Verbose.isVerbose(Verbose.WSDL);
private Types types;
private HashMap<QName, Message> messages = new HashMap<QName, Message>();
private String targetNamespace;
private SfdcApiType apiType;
private PortType portType;
private Service service;
private Binding binding;
public Types getTypes() {
return types;
}
public String getTargetNamespace() {
return targetNamespace;
}
public SfdcApiType getApiType() {
return apiType;
}
public PortType getPortType() {
return portType;
}
public Binding getBinding() throws ConnectionException {
QName name = service.getPort().getBinding();
if (binding.getName().equals(name.getLocalPart()) && targetNamespace.equals(name.getNamespaceURI())) {
return binding;
} else {
throw new ConnectionException("Unable to find binding " + name + ". Found "
+ binding.getName() + " instead.");
}
}
public Service getService() {
return service;
}
public Message getMessage(QName name) throws ConnectionException {
Message message = messages.get(name);
if (message == null) {
throw new ConnectionException("No message found for:" + name);
}
return message;
}
void read(WsdlParser parser) throws WsdlParseException {
int eventType = parser.getEventType();
while (eventType != XmlInputStream.END_DOCUMENT) {
if (eventType == XmlInputStream.START_DOCUMENT) {
//} else if (eventType == XmlInputStream.END_DOCUMENT) {
} else if (eventType == XmlInputStream.START_TAG) {
String name = parser.getName();
String namespace = parser.getNamespace();
if (name != null && namespace != null) {
parse(name, namespace, parser);
}
} else if (eventType == XmlInputStream.END_TAG) {
} else if (eventType == XmlInputStream.TEXT) {
}
eventType = parser.next();
}
if (targetNamespace == null) {
throw new WsdlParseException("targetNamespace not specified in wsdl:definitions ");
}
if (binding == null) {
throw new WsdlParseException("Unable to find wsdl:binding in the specified wsdl");
}
if (portType == null) {
throw new WsdlParseException("Unable to find wsdl:portType in the specified wsdl");
}
if (service == null) {
throw new WsdlParseException("Unable to find wsdl:service in the specified wsdl");
}
try {
updateHeaderTypes();
} catch (ConnectionException e) {
throw new WsdlParseException("Failed to parse WSDL: " + e.getMessage(), e);
}
}
private void updateHeaderTypes() throws ConnectionException {
Iterator<Part> headers = getBinding().getAllHeaders();
while (headers.hasNext()) {
Part part = headers.next();
QName el = part.getElement();
if (getTypes() != null) {
Element element = getTypes().getElement(el);
if (element.isComplexType()) {
ComplexType ct = getTypes().getComplexType(element.getType());
ct.setHeader(true);
} else {
//no need to set header type for simple types
}
}
}
}
private void parse(String name, String namespace, WsdlParser parser) throws WsdlParseException {
if (WSDL_NS.equals(namespace)) {
if (DEFINITIONS.equals(name)) {
targetNamespace = parser.getAttributeValue(null, TARGET_NAME_SPACE);
apiType = SfdcApiType.getFromNamespace(targetNamespace);
} else if (TYPES.equals(name)) {
types = new Types();
types.read(parser);
} else if (MESSAGE.equals(name)) {
Message message = new Message(targetNamespace);
message.read(parser);
messages.put(message.getName(), message);
} else if (PORT_TYPE.equals(name)) {
if (portType != null) {
throw new WsdlParseException("Found more than one wsdl:portType. " +
"WSDL with multiple portType not supported");
}
portType = new PortType(this);
portType.read(parser);
} else if (BINDING.equals(name)) {
if (binding != null) {
throw new WsdlParseException("Found more than one wsdl:binding. " +
"WSDL with multiple binding not supported");
}
binding = new Binding(this);
binding.read(parser);
} else if (SERVICE.equals(name)) {
if (service != null) {
throw new WsdlParseException("Found more than one wsdl:service. " +
"WSDL with multiple service not supported");
}
service = new Service();
service.read(parser);
} else if (DOCUMENTATION.equals(name)) {
new Documentation().read(parser);
} else {
throw new WsdlParseException("Unknown element: " + name);
}
}
}
@Override
public String toString() {
return "Definitions{" +
"types=" + types +
", messages=" + messages +
", targetNamespace='" + targetNamespace + '\'' +
", portType=" + portType +
", service=" + service +
'}';
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* Written by Doug Lea with assistance from members of JCP JSR-166 Expert Group and released to the
* public domain, as explained at http://creativecommons.org/licenses/publicdomain
*/
/**
* Misc utilities in JSR166 performance tests
*/
package org.apache.geode.internal.util.concurrent.cm;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
class LoopHelpers {
static final SimpleRandom staticRNG = new SimpleRandom();
// Some mindless computation to do between synchronizations...
/**
* generates 32 bit pseudo-random numbers. Adapted from http://www.snippets.org
*/
public static int compute1(int x) {
int lo = 16807 * (x & 0xFFFF);
int hi = 16807 * (x >>> 16);
lo += (hi & 0x7FFF) << 16;
if ((lo & 0x80000000) != 0) {
lo &= 0x7fffffff;
++lo;
}
lo += hi >>> 15;
if (lo == 0 || (lo & 0x80000000) != 0) {
lo &= 0x7fffffff;
++lo;
}
return lo;
}
/**
* Computes a linear congruential random number a random number of times.
*/
public static int compute2(int x) {
int loops = (x >>> 4) & 7;
while (loops-- > 0) {
x = (x * 2147483647) % 16807;
}
return x;
}
/**
* Yet another random number generator
*/
public static int compute3(int x) {
int t = (x % 127773) * 16807 - (x / 127773) * 2836;
return (t > 0) ? t : t + 0x7fffffff;
}
/**
* Yet another random number generator
*/
public static int compute4(int x) {
return x * 134775813 + 1;
}
/**
* Yet another random number generator
*/
public static int compute5(int x) {
return 36969 * (x & 65535) + (x >> 16);
}
/**
* Marsaglia xorshift (1, 3, 10)
*/
public static int compute6(int seed) {
seed ^= seed << 1;
seed ^= seed >>> 3;
seed ^= (seed << 10);
return seed;
}
/**
* Marsaglia xorshift (6, 21, 7)
*/
public static int compute7(int y) {
y ^= y << 6;
y ^= y >>> 21;
y ^= (y << 7);
return y;
}
/**
* Marsaglia xorshift for longs
*/
public static long compute8(long x) {
x ^= x << 13;
x ^= x >>> 7;
x ^= (x << 17);
return x;
}
public static class XorShift32Random {
static final AtomicInteger seq = new AtomicInteger(8862213);
int x = -1831433054;
public XorShift32Random(int seed) {
x = seed;
}
public XorShift32Random() {
this((int) System.nanoTime() + seq.getAndAdd(129));
}
public int next() {
x ^= x << 6;
x ^= x >>> 21;
x ^= (x << 7);
return x;
}
}
/** Multiplication-free RNG from Marsaglia "Xorshift RNGs" paper */
public static class MarsagliaRandom {
static final AtomicInteger seq = new AtomicInteger(3122688);
int x;
int y = 842502087;
int z = -715159705;
int w = 273326509;
public MarsagliaRandom(int seed) {
x = seed;
}
public MarsagliaRandom() {
this((int) System.nanoTime() + seq.getAndAdd(129));
}
public int next() {
int t = x ^ (x << 11);
x = y;
y = z;
z = w;
return w = (w ^ (w >>> 19) ^ (t ^ (t >>> 8)));
}
}
/**
* Unsynchronized version of java.util.Random algorithm.
*/
public static class SimpleRandom {
private final static long multiplier = 0x5DEECE66DL;
private final static long addend = 0xBL;
private final static long mask = (1L << 48) - 1;
static final AtomicLong seq = new AtomicLong(-715159705);
private long seed;
SimpleRandom(long s) {
seed = s;
}
SimpleRandom() {
seed = System.nanoTime() + seq.getAndAdd(129);
}
public void setSeed(long s) {
seed = s;
}
public int next() {
long nextseed = (seed * multiplier + addend) & mask;
seed = nextseed;
return ((int) (nextseed >>> 17)) & 0x7FFFFFFF;
}
}
public static class BarrierTimer implements Runnable {
volatile boolean started;
volatile long startTime;
volatile long endTime;
public void run() {
long t = System.nanoTime();
if (!started) {
started = true;
startTime = t;
} else
endTime = t;
}
public void clear() {
started = false;
}
public long getTime() {
return endTime - startTime;
}
}
public static String rightJustify(long n) {
// There's probably a better way to do this...
String field = " ";
String num = Long.toString(n);
if (num.length() >= field.length())
return num;
StringBuffer b = new StringBuffer(field);
b.replace(b.length() - num.length(), b.length(), num);
return b.toString();
}
}
| |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.masterdb.security.hibernate.cds;
import java.util.Map;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.financial.security.cds.CreditDefaultSwapIndexComponent;
import com.opengamma.masterdb.security.hibernate.ExternalIdBean;
/**
* A Hibernate bean representation of {@link CreditDefaultSwapIndexComponent}.
*/
@BeanDefinition
public class CDSIndexComponentBean extends DirectBean {
/**
* The obligor identifier
*/
@PropertyDefinition
private ExternalIdBean _obligor;
/**
* The weight
*/
@PropertyDefinition
private Double _weight;
/**
* The bond ref id
*/
@PropertyDefinition
private ExternalIdBean _bondId;
/**
* The name
*/
@PropertyDefinition
private String _name;
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code CDSIndexComponentBean}.
* @return the meta-bean, not null
*/
public static CDSIndexComponentBean.Meta meta() {
return CDSIndexComponentBean.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(CDSIndexComponentBean.Meta.INSTANCE);
}
@Override
public CDSIndexComponentBean.Meta metaBean() {
return CDSIndexComponentBean.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the obligor identifier
* @return the value of the property
*/
public ExternalIdBean getObligor() {
return _obligor;
}
/**
* Sets the obligor identifier
* @param obligor the new value of the property
*/
public void setObligor(ExternalIdBean obligor) {
this._obligor = obligor;
}
/**
* Gets the the {@code obligor} property.
* @return the property, not null
*/
public final Property<ExternalIdBean> obligor() {
return metaBean().obligor().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the weight
* @return the value of the property
*/
public Double getWeight() {
return _weight;
}
/**
* Sets the weight
* @param weight the new value of the property
*/
public void setWeight(Double weight) {
this._weight = weight;
}
/**
* Gets the the {@code weight} property.
* @return the property, not null
*/
public final Property<Double> weight() {
return metaBean().weight().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the bond ref id
* @return the value of the property
*/
public ExternalIdBean getBondId() {
return _bondId;
}
/**
* Sets the bond ref id
* @param bondId the new value of the property
*/
public void setBondId(ExternalIdBean bondId) {
this._bondId = bondId;
}
/**
* Gets the the {@code bondId} property.
* @return the property, not null
*/
public final Property<ExternalIdBean> bondId() {
return metaBean().bondId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the name
* @return the value of the property
*/
public String getName() {
return _name;
}
/**
* Sets the name
* @param name the new value of the property
*/
public void setName(String name) {
this._name = name;
}
/**
* Gets the the {@code name} property.
* @return the property, not null
*/
public final Property<String> name() {
return metaBean().name().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public CDSIndexComponentBean clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
CDSIndexComponentBean other = (CDSIndexComponentBean) obj;
return JodaBeanUtils.equal(getObligor(), other.getObligor()) &&
JodaBeanUtils.equal(getWeight(), other.getWeight()) &&
JodaBeanUtils.equal(getBondId(), other.getBondId()) &&
JodaBeanUtils.equal(getName(), other.getName());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getObligor());
hash = hash * 31 + JodaBeanUtils.hashCode(getWeight());
hash = hash * 31 + JodaBeanUtils.hashCode(getBondId());
hash = hash * 31 + JodaBeanUtils.hashCode(getName());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(160);
buf.append("CDSIndexComponentBean{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
protected void toString(StringBuilder buf) {
buf.append("obligor").append('=').append(JodaBeanUtils.toString(getObligor())).append(',').append(' ');
buf.append("weight").append('=').append(JodaBeanUtils.toString(getWeight())).append(',').append(' ');
buf.append("bondId").append('=').append(JodaBeanUtils.toString(getBondId())).append(',').append(' ');
buf.append("name").append('=').append(JodaBeanUtils.toString(getName())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code CDSIndexComponentBean}.
*/
public static class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code obligor} property.
*/
private final MetaProperty<ExternalIdBean> _obligor = DirectMetaProperty.ofReadWrite(
this, "obligor", CDSIndexComponentBean.class, ExternalIdBean.class);
/**
* The meta-property for the {@code weight} property.
*/
private final MetaProperty<Double> _weight = DirectMetaProperty.ofReadWrite(
this, "weight", CDSIndexComponentBean.class, Double.class);
/**
* The meta-property for the {@code bondId} property.
*/
private final MetaProperty<ExternalIdBean> _bondId = DirectMetaProperty.ofReadWrite(
this, "bondId", CDSIndexComponentBean.class, ExternalIdBean.class);
/**
* The meta-property for the {@code name} property.
*/
private final MetaProperty<String> _name = DirectMetaProperty.ofReadWrite(
this, "name", CDSIndexComponentBean.class, String.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"obligor",
"weight",
"bondId",
"name");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -1657678854: // obligor
return _obligor;
case -791592328: // weight
return _weight;
case -1383424194: // bondId
return _bondId;
case 3373707: // name
return _name;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends CDSIndexComponentBean> builder() {
return new DirectBeanBuilder<CDSIndexComponentBean>(new CDSIndexComponentBean());
}
@Override
public Class<? extends CDSIndexComponentBean> beanType() {
return CDSIndexComponentBean.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code obligor} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExternalIdBean> obligor() {
return _obligor;
}
/**
* The meta-property for the {@code weight} property.
* @return the meta-property, not null
*/
public final MetaProperty<Double> weight() {
return _weight;
}
/**
* The meta-property for the {@code bondId} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExternalIdBean> bondId() {
return _bondId;
}
/**
* The meta-property for the {@code name} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> name() {
return _name;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -1657678854: // obligor
return ((CDSIndexComponentBean) bean).getObligor();
case -791592328: // weight
return ((CDSIndexComponentBean) bean).getWeight();
case -1383424194: // bondId
return ((CDSIndexComponentBean) bean).getBondId();
case 3373707: // name
return ((CDSIndexComponentBean) bean).getName();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -1657678854: // obligor
((CDSIndexComponentBean) bean).setObligor((ExternalIdBean) newValue);
return;
case -791592328: // weight
((CDSIndexComponentBean) bean).setWeight((Double) newValue);
return;
case -1383424194: // bondId
((CDSIndexComponentBean) bean).setBondId((ExternalIdBean) newValue);
return;
case 3373707: // name
((CDSIndexComponentBean) bean).setName((String) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package is.mpg.ruglan.data;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.preference.PreferenceManager;
import android.util.Log;
import android.content.SharedPreferences;
import android.text.format.Time;
import java.util.Date;
import is.mpg.ruglan.HomeActivity;
/**
* An interface for the database backend.
* @author Jon
*/
public class Dabbi {
SharedPreferences prefs;
SharedPreferences.Editor editor;
private Context context;
/**
* @use a = new Dabbi(context)
* @pre context is a valid non null android context
* @post a is a pointer to a new Dabbi objext with the
* context context.
* @param context A non null android Context object.
*/
public Dabbi(Context context)
{
this.context = context;
prefs = PreferenceManager.getDefaultSharedPreferences(context);
editor = prefs.edit();
}
/**
* @use a = new Dabbi()
* @post a is a pointer to a new Dabbi objext with the
* context of HomeActivity.
*/
public Dabbi()
{
this(HomeActivity.getContext());
}
/**
* @use addCalEvents(events)
* @pre events is an array of valid CalEvent elements.
* @post The CalEvents contained in events have been
* added to the database and leaves it in a consistent state.
*
* @param calEvents An array of CalEvents to be added.
*/
public void addCalEvents(CalEvent[] calEvents)
{
Log.d("Dabbi", "Adding " + calEvents.length + " events...");
rDataBase DB;
if(context == null)
{
System.out.println("warning Null context");
return;
}
DB = new rDataBase(context);
SQLiteDatabase qdb = DB.getWritableDatabase();
if(qdb == null)
{
System.out.println("warning Null database pointer");
return;
}
for(CalEvent event: calEvents)
{
//Check if the event is in the color table and if not add it to it.
Cursor result = qdb.rawQuery("SELECT color FROM COLORS WHERE name = ?",
new String[]{event.getName()});
if(result.getCount() == 0)
{
//Get the highest color value in the table so we know what value to
//assign to this event.
Cursor maxColorResult = qdb.rawQuery("SELECT MAX(color) FROM COLORS",null);
int newColorValue = 0;
if(maxColorResult.getCount() != 0)
{
maxColorResult.moveToFirst();
newColorValue = maxColorResult.getInt(0)+1;
}
ContentValues colorValues = new ContentValues();
colorValues.put("name",event.getName());
colorValues.put("color", newColorValue);
qdb.insert("COLORS",null,colorValues);
Log.d("Dabbi",event.getName()+" got color "+newColorValue);
}
ContentValues values = new ContentValues();
values.put("name", event.getName());
values.put("description", event.getDescription());
values.put("location", event.getLocation());
values.put("start", event.getStart().getTime()/1000);
values.put("finish", event.getEnd().getTime()/1000);
values.put("hidden", event.isHidden()? "1" : "0");
qdb.insert("CALEVENTS",null,values);
}
qdb.close();
Log.d("Dabbi", "Adding done.");
}
/**
* A function to get the color value of an event.
* @use a = getColor(b);
* @pre b is a name of an event that is in the database.
* @post a is the color value of b.
* @param name The event name we want the color for.
* @return The color value of name or -1 in case of failure.
*/
public int getColor(String name)
{
rDataBase DB;
if(context == null)
{
System.out.println("warning Null context");
return -1;
}
DB = new rDataBase(context);
SQLiteDatabase qdb = DB.getWritableDatabase();
Cursor result = qdb.rawQuery("SELECT color FROM COLORS WHERE name = ?",
new String[]{name});
result.moveToFirst();
return result.getInt(0);
}
/**
* @use events = getCalEvents(start,end)
* @pre start and end are valid Date objects.
* @post events contains all the CalEvents contained in
* the database that begin between start and end.
*
* @param start A Date object that is the earliest date
* we want to look at.
* @param end A Date object that is the latest time an CalEvent
* can start at so it is included in the return value.
*/
public CalEvent[] getCalEvents(Date start, Date end)
{
String query = "SELECT * FROM CALEVENTS "
+"WHERE start BETWEEN ? AND ?";
String queryArgs[] = new String[]{
Long.toString(start.getTime()/1000),
Long.toString(end.getTime()/1000)};
return getCalEventsForQuery(query, queryArgs);
}
/**
* @use events = getAllCalEvents()
* @post events contains all the CalEvents contained in the database
*/
public CalEvent[] getAllCalEvents()
{
String query ="SELECT * FROM CALEVENTS";
String queryArgs[] = null;
return getCalEventsForQuery(query, queryArgs);
}
/**
* Deletes all the events in the events table of the database.
* @use a = clearEventsTable();
* @pre
* @post The events table in the database is now empty if a is true else
* something failed.
*/
boolean clearEventsTable()
{
try
{
rDataBase DB = new rDataBase(context);
SQLiteDatabase qdb = DB.getWritableDatabase();
try {
qdb.execSQL("DROP TABLE CALEVENTS");
} catch (Exception ex) {
Log.e("Failed to drop table CALEVENTS. " +
"Does the table even exist", ex.getMessage());
}
DB.executeSQLScript(qdb, "create.sql");
qdb.close();
return true;
}
catch(Exception e)
{
return false;
}
}
/**
* Runs a private method of the class if with the correct
* password.
* @use a = runPrivateMethod(password);
* @pre
* @post a is true if a method matched the password and it was run
* else a is false
*/
public boolean runPrivateMethod(String password)
{
if(password.equals("there is no cow level"))
{
return clearEventsTable();
}
return false;
}
/**
* Refreshes the events in the CALEVENTS table.
* @use refreshEventsTable(iCalUrl);
* @pre iCalUrl is a path to a valid URL to a valid iCal
* @post The CALEVENTS table in the database contains fresh data
* from the iCal url in the iCalUrl setting in the SETTINGS table.
*/
public void refreshEventsTable(String iCalUrl) throws Exception{
CalEvent [] calEvents;
Log.d("Dabbi","refreshing table");
try{
Log.d("Dabbi","Trying to fetch from parser");
calEvents = iCalParser.urlToCalEvents(iCalUrl);
} catch (Exception ex) {
Log.e("Dabbi", ex.getMessage());
calEvents = null;
}
if (calEvents == null){
Exception e = new Exception("iCal Parsing error");
throw e;
}
clearEventsTable();
addCalEvents(calEvents);
Time now = new Time();
now.setToNow();
String t = now.format3339(false);
String[] ts = t.split("T");
String [] ts1 = ts[1].split(":");
String tim = ts[0] + " " + ts1[0] + ":" + ts1[1];
editor.putString("lastUpdate", tim);
editor.commit();
}
public void refreshEventsTable() throws Exception
{
String iCalUrl = prefs.getString("iCalUrl","");
refreshEventsTable(iCalUrl);
}
/**
* @use CalEvent[] c = d.getCalEventsForRecurringEvents();
* @pre d is an instance of Dabbi.
* @return A list of CalEvents representing
* recurring events, on weekly basis
*/
public CalEvent[] getCalEventsForRecurringEvents() {
int secondsInAWeek = 604800;
String query = "SELECT * FROM CALEVENTS "
+"GROUP BY name, location, start % ? "
+"ORDER BY description, start";
String queryArgs[] = new String[]{Integer.toString(secondsInAWeek)};
return getCalEventsForQuery(query, queryArgs);
}
/**
* @use String[] names = d.getCalEventsNames();
* @pre d is an instance of Dabbi.
* @return A list of event names in Dabbi.
*/
public String[] getCalEventsNames() {
rDataBase DB = new rDataBase(context);
SQLiteDatabase qdb = DB.getWritableDatabase();
if(qdb == null)
{
return new String[0];
}
Cursor result = qdb.rawQuery("SELECT name, max(start) AS ms FROM CALEVENTS " +
"GROUP BY name " +
"ORDER BY ms", null);
//Iterate over the result.
String[] names = new String[result.getCount()];
result.moveToFirst();
int i = 0;
while(!result.isAfterLast())
{
names[i] = result.getString(0);
i++;
result.moveToNext();
}
qdb.close();
return names;
}
/**
* @param event is an instance of CalEvent
* @return A list of CalEvents of same type as event, i.e. has the same
* name and location and their start times are at the same time each week.
*/
public CalEvent[] getEventsLike(CalEvent event) {
rDataBase DB = new rDataBase(context);
SQLiteDatabase qdb = DB.getWritableDatabase();
if(qdb == null)
{
return new CalEvent[0];
}
int secondsInAWeek = 604800;
String query = "SELECT * FROM CALEVENTS "
+ "WHERE name=? AND location=? "
+ "AND (start-?)%?=0";
String queryArgs[] = new String[]{
event.getName(),
event.getLocation(),
Long.toString(
event.getStart().getTime()/1000),
Integer.toString(secondsInAWeek)
};
return getCalEventsForQuery(query,queryArgs);
}
public void changeHiddenForEventsLike(CalEvent event, Boolean hidden) {
rDataBase DB = new rDataBase(context);
SQLiteDatabase qdb = DB.getWritableDatabase();
int secondsInAWeek = 604800;
qdb.execSQL("UPDATE CALEVENTS "
+ "SET hidden=? "
+ "WHERE name=? AND location=? "
+ "AND (start-?)%?=0",
new String[]{
hidden? "1" : "0",
event.getName(),
event.getLocation(),
Long.toString(
event.getStart().getTime()/1000),
Integer.toString(secondsInAWeek)
});
}
private CalEvent[] getCalEventsForQuery(String query, String[] queryArgs)
{
rDataBase DB = new rDataBase(context);
SQLiteDatabase qdb = DB.getWritableDatabase();
if(qdb == null)
{
return new CalEvent[0];
}
Cursor result = qdb.rawQuery(query,queryArgs);
//Iterate over the result.
CalEvent[] events = new CalEvent[result.getCount()];
result.moveToFirst();
int i = 0;
while(!result.isAfterLast())
{
CalEvent tmpEvent = new CalEvent(result.getString(0),
result.getString(1),result.getString(2)
,new Date(Long.parseLong(result.getString(3))*1000)
,new Date(Long.parseLong(result.getString(4))*1000),
result.getInt(5)>0);
events[i] = tmpEvent;
i++;
result.moveToNext();
}
qdb.close();
return events;
}
}
| |
/*
* Copyright (c) 2010-2020. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.modelling.command.inspection;
import org.axonframework.commandhandling.CommandMessage;
import org.axonframework.commandhandling.NoHandlerForCommandException;
import org.axonframework.common.Assert;
import org.axonframework.common.AxonConfigurationException;
import org.axonframework.eventhandling.DomainEventMessage;
import org.axonframework.eventhandling.EventBus;
import org.axonframework.eventhandling.EventMessage;
import org.axonframework.eventhandling.GenericDomainEventMessage;
import org.axonframework.eventhandling.GenericEventMessage;
import org.axonframework.messaging.DefaultInterceptorChain;
import org.axonframework.messaging.Message;
import org.axonframework.messaging.MetaData;
import org.axonframework.messaging.annotation.MessageHandlingMember;
import org.axonframework.messaging.unitofwork.CurrentUnitOfWork;
import org.axonframework.messaging.unitofwork.UnitOfWork;
import org.axonframework.modelling.command.Aggregate;
import org.axonframework.modelling.command.AggregateInvocationException;
import org.axonframework.modelling.command.AggregateLifecycle;
import org.axonframework.modelling.command.ApplyMore;
import org.axonframework.modelling.command.Repository;
import org.axonframework.modelling.command.RepositoryProvider;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.Callable;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static java.lang.String.format;
/**
* Implementation of the {@link Aggregate} interface that allows for an aggregate root to be a POJO with annotations on
* its Command and Event Handler methods.
* <p>
* This wrapper ensures that aggregate members can use the {@link AggregateLifecycle#apply(Object)} method in a static
* context, as long as access to the instance is done via the {@link #execute(Consumer)} or {@link #invoke(Function)}
* methods.
*
* @param <T> The type of the aggregate root object
* @see AggregateLifecycle#apply(Object)
* @see AggregateLifecycle#markDeleted()
*/
public class AnnotatedAggregate<T> extends AggregateLifecycle implements Aggregate<T>, ApplyMore {
private final AggregateModel<T> inspector;
private final RepositoryProvider repositoryProvider;
private final Queue<Runnable> delayedTasks = new LinkedList<>();
private final EventBus eventBus;
private T aggregateRoot;
private boolean applying = false;
private boolean executingDelayedTasks = false;
private boolean isDeleted = false;
private Long lastKnownSequence;
/**
* Initialize an Aggregate instance for the given {@code aggregateRoot}, described by the given
* {@code aggregateModel} that will publish events to the given {@code eventBus}.
*
* @param aggregateRoot The aggregate root instance
* @param model The model describing the aggregate structure
* @param eventBus The Event Bus to publish generated events on
*/
protected AnnotatedAggregate(T aggregateRoot, AggregateModel<T> model, EventBus eventBus) {
this(aggregateRoot, model, eventBus, null);
}
/**
* Initialize an Aggregate instance for the given {@code aggregateRoot}, described by the given
* {@code aggregateModel} that will publish events to the given {@code eventBus}.
*
* @param aggregateRoot The aggregate root instance
* @param model The model describing the aggregate structure
* @param eventBus The Event Bus to publish generated events on
* @param repositoryProvider Provides repositories for specific aggregate types
*/
protected AnnotatedAggregate(T aggregateRoot,
AggregateModel<T> model,
EventBus eventBus,
RepositoryProvider repositoryProvider) {
this(model, eventBus, repositoryProvider);
this.aggregateRoot = aggregateRoot;
}
/**
* Initialize an Aggregate instance for the given {@code aggregateRoot}, described by the given
* {@code aggregateModel} that will publish events to the given {@code eventBus}.
*
* @param inspector The AggregateModel that describes the aggregate
* @param eventBus The Event Bus to publish generated events on
*/
protected AnnotatedAggregate(AggregateModel<T> inspector, EventBus eventBus) {
this(inspector, eventBus, null);
}
/**
* Initialize an Aggregate instance for the given {@code aggregateRoot}, described by the given
* {@code aggregateModel} that will publish events to the given {@code eventBus}.
*
* @param inspector The AggregateModel that describes the aggregate
* @param eventBus The Event Bus to publish generated events on
* @param repositoryProvider Provides repositories for specific aggregate types
*/
protected AnnotatedAggregate(AggregateModel<T> inspector,
EventBus eventBus,
RepositoryProvider repositoryProvider) {
this.inspector = inspector;
this.eventBus = eventBus;
this.repositoryProvider = repositoryProvider;
}
/**
* Initialize an aggregate created by the given {@code aggregateFactory} which is described in the given
* {@code aggregateModel}. The given {@code eventBus} is used to publish events generated by the aggregate.
*
* @param aggregateFactory The factory to create the aggregate root instance with
* @param aggregateModel The model describing the aggregate structure
* @param eventBus The EventBus to publish events on
* @param <T> The type of the Aggregate root
* @return An Aggregate instance, fully initialized
*
* @throws Exception when an error occurs creating the aggregate root instance
*/
public static <T> AnnotatedAggregate<T> initialize(Callable<T> aggregateFactory, AggregateModel<T> aggregateModel,
EventBus eventBus)
throws Exception {
return initialize(aggregateFactory, aggregateModel, eventBus, false);
}
/**
* Initialize an aggregate created by the given {@code aggregateFactory} which is described in the given
* {@code aggregateModel}. The given {@code eventBus} is used to publish events generated by the aggregate.
*
* @param aggregateFactory The factory to create the aggregate root instance with
* @param aggregateModel The model describing the aggregate structure
* @param eventBus The EventBus to publish events on
* @param repositoryProvider Provides repositories for specific aggregate types
* @param <T> The type of the Aggregate root
* @return An Aggregate instance, fully initialized
*
* @throws Exception when an error occurs creating the aggregate root instance
*/
public static <T> AnnotatedAggregate<T> initialize(Callable<T> aggregateFactory,
AggregateModel<T> aggregateModel,
EventBus eventBus,
RepositoryProvider repositoryProvider) throws Exception {
return initialize(aggregateFactory, aggregateModel, eventBus, repositoryProvider, false);
}
/**
* Initialize an aggregate created by the given {@code aggregateFactory} which is described in the given
* {@code aggregateModel}. The given {@code eventBus} is used to publish events generated by the aggregate.
*
* @param aggregateFactory The factory to create the aggregate root instance with
* @param aggregateModel The model describing the aggregate structure
* @param eventBus The EventBus to publish events on
* @param generateSequences Whether to generate sequence numbers on events published from this aggregate
* @param <T> The type of the Aggregate root
* @return An Aggregate instance, fully initialized
*
* @throws Exception when an error occurs creating the aggregate root instance
*/
public static <T> AnnotatedAggregate<T> initialize(Callable<T> aggregateFactory,
AggregateModel<T> aggregateModel,
EventBus eventBus,
boolean generateSequences) throws Exception {
return initialize(aggregateFactory, aggregateModel, eventBus, null, generateSequences);
}
/**
* Initialize an aggregate created by the given {@code aggregateFactory} which is described in the given
* {@code aggregateModel}. The given {@code eventBus} is used to publish events generated by the aggregate.
*
* @param aggregateFactory The factory to create the aggregate root instance with
* @param aggregateModel The model describing the aggregate structure
* @param eventBus The EventBus to publish events on
* @param repositoryProvider Provides repositories for specific aggregate types
* @param generateSequences Whether to generate sequence numbers on events published from this aggregate
* @param <T> The type of the Aggregate root
* @return An Aggregate instance, fully initialized
*
* @throws Exception when an error occurs creating the aggregate root instance
*/
public static <T> AnnotatedAggregate<T> initialize(Callable<T> aggregateFactory,
AggregateModel<T> aggregateModel,
EventBus eventBus,
RepositoryProvider repositoryProvider,
boolean generateSequences) throws Exception {
AnnotatedAggregate<T> aggregate =
new AnnotatedAggregate<>(aggregateModel, eventBus, repositoryProvider);
if (generateSequences) {
aggregate.initSequence();
}
aggregate.registerRoot(aggregateFactory);
return aggregate;
}
/**
* Initialize an aggregate with the given {@code aggregateRoot} which is described in the given
* {@code aggregateModel}. The given {@code eventBus} is used to publish events generated by the aggregate.
*
* @param aggregateRoot The aggregate root instance
* @param aggregateModel The model describing the aggregate structure
* @param eventBus The EventBus to publish events on
* @param <T> The type of the Aggregate root
* @return An Aggregate instance, fully initialized
*/
public static <T> AnnotatedAggregate<T> initialize(T aggregateRoot,
AggregateModel<T> aggregateModel,
EventBus eventBus) {
return initialize(aggregateRoot, aggregateModel, eventBus, null);
}
/**
* Initialize an aggregate with the given {@code aggregateRoot} which is described in the given
* {@code aggregateModel}. The given {@code eventBus} is used to publish events generated by the aggregate.
*
* @param aggregateRoot The aggregate root instance
* @param aggregateModel The model describing the aggregate structure
* @param eventBus The EventBus to publish events on
* @param repositoryProvider Provides repositories for specific aggregate types
* @param <T> The type of the Aggregate root
* @return An Aggregate instance, fully initialized
*/
public static <T> AnnotatedAggregate<T> initialize(T aggregateRoot,
AggregateModel<T> aggregateModel,
EventBus eventBus,
RepositoryProvider repositoryProvider) {
return new AnnotatedAggregate<>(aggregateRoot, aggregateModel, eventBus, repositoryProvider);
}
/**
* Enable sequences on this Aggregate, causing it to emit DomainEventMessages, starting at sequence 0. Each Event
* applied will increase the sequence, allowing to trace each event back to the Aggregate instance that published
* it, in the order published.
*/
public void initSequence() {
initSequence(-1);
}
/**
* Enable sequences on this Aggregate, causing it to emit DomainEventMessages based on the given
* {@code lastKnownSequenceNumber}. Each Event applied will increase the sequence, allowing to trace each event
* back to the Aggregate instance that published it, in the order published.
*
* @param lastKnownSequenceNumber The sequence number to pass into the next event published
*/
public void initSequence(long lastKnownSequenceNumber) {
this.lastKnownSequence = lastKnownSequenceNumber;
}
/**
* Registers the aggregate root created by the given {@code aggregateFactory} with this aggregate. Applies any
* delayed events that have not been applied to the aggregate yet.
* <p>
* This is method is commonly called while an aggregate is being initialized.
*
* @param aggregateFactory the factory to create the aggregate root
* @throws Exception if the aggregate factory fails to create the aggregate root
*/
protected void registerRoot(Callable<T> aggregateFactory) throws Exception {
this.aggregateRoot = executeWithResult(aggregateFactory);
execute(() -> {
while (!delayedTasks.isEmpty()) {
delayedTasks.remove().run();
}
});
}
@Override
public String type() {
return inspector.type();
}
@Override
public Object identifier() {
return inspector.getIdentifier(aggregateRoot);
}
@Override
public Long version() {
return inspector.getVersion(aggregateRoot);
}
/**
* Returns the last sequence of any event published, or {@code null} if no events have been published yet. If
* sequences aren't enabled for this Aggregate, the this method will also return null;
*
* @return the last sequence of any event published, or {@code null} if no events have been published yet
*/
public Long lastSequence() {
return lastKnownSequence == -1 ? null : lastKnownSequence;
}
@Override
protected boolean getIsLive() {
return true;
}
@Override
protected <R> Aggregate<R> doCreateNew(Class<R> aggregateType, Callable<R> factoryMethod) throws Exception {
if (repositoryProvider == null) {
throw new AxonConfigurationException(format(
"Since repository provider is not provided, we cannot spawn a new aggregate for %s",
aggregateType.getName()));
}
Repository<R> repository = repositoryProvider.repositoryFor(aggregateType);
if (repository == null) {
throw new IllegalStateException(format("There is no configured repository for %s",
aggregateType.getName()));
}
return repository.newInstance(factoryMethod);
}
@Override
public <R> R invoke(Function<T, R> invocation) {
try {
return executeWithResult(() -> invocation.apply(aggregateRoot));
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new AggregateInvocationException("Exception occurred while invoking an aggregate", e);
}
}
@Override
public void execute(Consumer<T> invocation) {
execute(() -> invocation.accept(aggregateRoot));
}
@Override
public boolean isDeleted() {
return isDeleted;
}
@SuppressWarnings("unchecked")
@Override
public Class<? extends T> rootType() {
return (Class<? extends T>) aggregateRoot.getClass();
}
@Override
protected void doMarkDeleted() {
this.isDeleted = true;
}
/**
* Publish an event to the aggregate root and its entities first and external event handlers (using the given
* event bus) later.
*
* @param msg the event message to publish
*/
protected void publish(EventMessage<?> msg) {
if (msg instanceof DomainEventMessage) {
lastKnownSequence = ((DomainEventMessage) msg).getSequenceNumber();
}
inspector.publish(msg, aggregateRoot);
publishOnEventBus(msg);
}
/**
* Publish an event to external event handlers using the given event bus.
*
* @param msg the event message to publish
*/
protected void publishOnEventBus(EventMessage<?> msg) {
if (eventBus != null) {
eventBus.publish(msg);
}
}
@SuppressWarnings("unchecked")
@Override
public Object handle(Message<?> message) throws Exception {
Callable<Object> messageHandling;
if (message instanceof CommandMessage) {
messageHandling = () -> handle((CommandMessage) message);
} else if (message instanceof EventMessage) {
messageHandling = () -> handle((EventMessage) message);
} else {
throw new IllegalArgumentException("Unsupported message type: " + message.getClass());
}
return executeWithResult(messageHandling);
}
@SuppressWarnings("unchecked")
private Object handle(CommandMessage<?> commandMessage) throws Exception {
List<AnnotatedCommandHandlerInterceptor<? super T>> interceptors =
inspector.commandHandlerInterceptors((Class<? extends T>) aggregateRoot.getClass())
.map(chi -> new AnnotatedCommandHandlerInterceptor<>(chi, aggregateRoot))
.collect(Collectors.toList());
MessageHandlingMember<? super T> handler = inspector.commandHandlers((Class<? extends T>) aggregateRoot.getClass())
.filter(mh -> mh.canHandle(commandMessage))
.findFirst()
.orElseThrow(() -> new NoHandlerForCommandException(format("No handler available to handle command [%s]", commandMessage.getCommandName())));
Object result;
if (interceptors.isEmpty()) {
result = handler.handle(commandMessage, aggregateRoot);
} else {
result = new DefaultInterceptorChain<>(
(UnitOfWork<CommandMessage<?>>) CurrentUnitOfWork.get(),
interceptors,
m -> handler.handle(commandMessage, aggregateRoot)
).proceed();
}
return result;
}
private Object handle(EventMessage<?> eventMessage) {
inspector.publish(eventMessage, aggregateRoot);
return null;
}
@Override
protected <P> ApplyMore doApply(P payload, MetaData metaData) {
if (!applying && aggregateRoot != null) {
applying = true;
try {
publish(createMessage(payload, metaData));
} finally {
applying = false;
}
if (!executingDelayedTasks) {
executingDelayedTasks = true;
try {
while (!delayedTasks.isEmpty()) {
delayedTasks.remove().run();
}
} finally {
executingDelayedTasks = false;
delayedTasks.clear();
}
}
} else {
delayedTasks.add(() -> doApply(payload, metaData));
}
return this;
}
/**
* Creates an {@link EventMessage} with given {@code payload} and {@code metaData}.
*
* @param payload payload of the resulting message
* @param metaData metadata of the resulting message
* @param <P> the payload type
* @return the resulting message
*/
protected <P> EventMessage<P> createMessage(P payload, MetaData metaData) {
if (lastKnownSequence != null) {
String type = inspector.declaredType(rootType())
.orElse(rootType().getSimpleName());
long seq = lastKnownSequence + 1;
String id = identifierAsString();
if (id == null) {
Assert.state(seq == 0,
() -> "The aggregate identifier has not been set. It must be set at the latest when applying the creation event");
return new LazyIdentifierDomainEventMessage<>(type, seq, payload, metaData);
}
return new GenericDomainEventMessage<>(type, identifierAsString(), seq, payload, metaData);
}
return new GenericEventMessage<>(payload, metaData);
}
/**
* Get the annotated aggregate instance. Note that this method should probably never be used in normal use. If you
* need to operate on the aggregate use {@link #invoke(Function)} or {@link #execute(Consumer)} instead.
*
* @return the aggregate instance
*/
public T getAggregateRoot() {
return aggregateRoot;
}
@Override
public ApplyMore andThenApply(Supplier<?> payloadOrMessageSupplier) {
return andThen(() -> applyMessageOrPayload(payloadOrMessageSupplier.get()));
}
@Override
public ApplyMore andThen(Runnable runnable) {
if (applying || aggregateRoot == null) {
delayedTasks.add(runnable);
} else {
runnable.run();
}
return this;
}
/**
* Apply a new event message to the aggregate and then publish this message to external systems. If the given {@code
* payloadOrMessage} is an instance of a {@link Message} an event message is applied with the payload and metadata
* of the given message, otherwise an event message is applied with given payload and empty metadata.
*
* @param payloadOrMessage defines the payload and optionally metadata to apply to the aggregate
*/
protected void applyMessageOrPayload(Object payloadOrMessage) {
if (payloadOrMessage instanceof Message) {
Message message = (Message) payloadOrMessage;
apply(message.getPayload(), message.getMetaData());
} else if (payloadOrMessage != null) {
apply(payloadOrMessage, MetaData.emptyInstance());
}
}
private class LazyIdentifierDomainEventMessage<P> extends GenericDomainEventMessage<P> {
private static final long serialVersionUID = -1624446038982565972L;
public LazyIdentifierDomainEventMessage(String type, long seq, P payload, MetaData metaData) {
super(type, null, seq, payload, metaData);
}
@Override
public String getAggregateIdentifier() {
return identifierAsString();
}
@Override
public GenericDomainEventMessage<P> withMetaData(Map<String, ?> newMetaData) {
String identifier = identifierAsString();
if (identifier != null) {
return new GenericDomainEventMessage<>(getType(), getAggregateIdentifier(), getSequenceNumber(),
getPayload(), getMetaData(), getIdentifier(), getTimestamp());
} else {
return new LazyIdentifierDomainEventMessage<>(getType(), getSequenceNumber(), getPayload(),
MetaData.from(newMetaData));
}
}
@Override
public GenericDomainEventMessage<P> andMetaData(Map<String, ?> additionalMetaData) {
String identifier = identifierAsString();
if (identifier != null) {
return new GenericDomainEventMessage<>(getType(), getAggregateIdentifier(), getSequenceNumber(),
getPayload(), getMetaData(), getIdentifier(), getTimestamp())
.andMetaData(additionalMetaData);
} else {
return new LazyIdentifierDomainEventMessage<>(getType(), getSequenceNumber(), getPayload(),
getMetaData().mergedWith(additionalMetaData));
}
}
}
}
| |
// $Id: GraphDisplayPanel.java,v 1.2 2003/10/07 21:46:05 idgay Exp $
/* tab:4
* "Copyright (c) 2000-2003 The Regents of the University of California.
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software and its
* documentation for any purpose, without fee, and without written agreement is
* hereby granted, provided that the above copyright notice, the following
* two paragraphs and the author appear in all copies of this software.
*
* IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT
* OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE UNIVERSITY OF
* CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS."
*
* Copyright (c) 2002-2003 Intel Corporation
* All rights reserved.
*
* This file is distributed under the terms in the attached INTEL-LICENSE
* file. If you do not find these files, copies can be found by writing to
* Intel Research Berkeley, 2150 Shattuck Avenue, Suite 1300, Berkeley, CA,
* 94704. Attention: Intel License Inquiry.
*/
/**
* @author Wei Hong
* @author modified for tinydb
*/
//*******************************************************
//*******************************************************
//This class is essentially a JPanel with the paint() function overridden
//IT cycles through the nodes, edges, nodePainters, edgePainters, and screenPainters
//and tells them all to draw on its graphics object
//Then I add some code for scaling and zooming the screen
//and converting from node coordinates to screen coordinates
//*******************************************************
//*******************************************************
package net.tinyos.surge;
import net.tinyos.surge.util.*;
import javax.swing.*;
import java.beans.*;
import java.awt.*;
import java.util.*;
public class GraphDisplayPanel extends javax.swing.JPanel
{
//the following are member variables needed only for painting
protected Image doubleBufferImage;
protected Dimension doubleBufferImageSize;
protected Graphics doubleBufferGraphic;
protected boolean fitToScreenAutomatically = false;
//the following are needed for scaling and zooming
protected double xScale=534;
protected double xScaleIntercept = -101;
protected double yScale = 534;
protected double yScaleIntercept=-65;
protected double xMargin=10, yMargin=10;//value of 10 -> margin width of 10% of the screensize
protected boolean screenIsEmpty=true;
public GraphDisplayPanel()
{
//{{INIT_CONTROLS
setLayout(null);
Insets ins = getInsets();
setSize(ins.left + ins.right + 430,ins.top + ins.bottom + 270);
//}}
//{{REGISTER_LISTENERS
//}}
}
//----------------------------------------------------------------
//PAINT
//recall that paint() is called the first time the window
//needs to be drawn, or if something damages the window,
//and in this case by RefreshScreenNow()
public void paint(Graphics g)
{
super.paint(g);//first paint the panel normally
//the following block of code is used
//if this is the first time being drawn or if the window was resized
//Otherwise we don't creat a new buffer
Dimension d = getSize();
if ((doubleBufferImage == null) || (d.width != doubleBufferImageSize.width) || (d.height != doubleBufferImageSize.height))//if this is the first time being drawn or if the window was resized
{
doubleBufferImage = createImage(d.width, d.height);
doubleBufferImageSize = d;
if (doubleBufferGraphic != null) {
doubleBufferGraphic.dispose();
}
doubleBufferGraphic = doubleBufferImage.getGraphics();
doubleBufferGraphic.setFont(getFont());
}
doubleBufferGraphic.setColor(Color.white);
doubleBufferGraphic.fillRect(0, 0, d.width, d.height);
if( (fitToScreenAutomatically) || (screenIsEmpty))
{ //if the user wants all the nodes on the screen, or if these are the first nodes on the screen, fit all nodes to the visible area
//FitToScreen();
;
}
//draw things on the screen before any nodes or edges appear
MainClass.displayManager.PaintUnderScreen(doubleBufferGraphic);
//draw all the nodes
DisplayManager.NodeInfo nodeDisplayInfo;
int xCoord, yCoord, imageWidth, imageHeight;
for(Enumeration nodes = MainClass.displayManager.GetNodeInfo(); nodes.hasMoreElements();)
{
nodeDisplayInfo = (DisplayManager.NodeInfo)nodes.nextElement();
//figure out where to put the node on the screen
xCoord = ScaleNodeXCoordToScreenCoord(MainClass.locationAnalyzer.GetX(nodeDisplayInfo.GetNodeNumber()));
yCoord = ScaleNodeYCoordToScreenCoord(MainClass.locationAnalyzer.GetY(nodeDisplayInfo.GetNodeNumber()));
//if that spot is not on the visible area, don't draw it at all
if( (xCoord > this.getSize().getWidth()) ||
(yCoord > this.getSize().getHeight()))
{
continue;
}
// MDW: Don't scale size of dots
//imageWidth = (int)Math.max(20,xScale*nodeDisplayInfo.GetImageWidth()/100);
//imageHeight = (int)Math.max(20,yScale*nodeDisplayInfo.GetImageHeight()/100);
imageWidth = imageHeight = 20;
MainClass.displayManager.PaintAllNodes(nodeDisplayInfo.GetNodeNumber(), xCoord-imageWidth/2, yCoord-imageHeight/2, xCoord+imageWidth/2, yCoord+imageHeight/2, doubleBufferGraphic);
}
//draw all the edges
DisplayManager.EdgeInfo edgeDisplayInfo;
for(Enumeration edges = MainClass.displayManager.GetEdgeInfo(); edges.hasMoreElements();)
{
edgeDisplayInfo = (DisplayManager.EdgeInfo)edges.nextElement();
//figure out the coordinates of the endpoints of the edge
int x1 = ScaleNodeXCoordToScreenCoord(MainClass.locationAnalyzer.GetX(edgeDisplayInfo.GetSourceNodeNumber()));
int y1 = ScaleNodeYCoordToScreenCoord(MainClass.locationAnalyzer.GetY(edgeDisplayInfo.GetSourceNodeNumber()));
int x2 = ScaleNodeXCoordToScreenCoord(MainClass.locationAnalyzer.GetX(edgeDisplayInfo.GetDestinationNodeNumber()));
int y2 = ScaleNodeYCoordToScreenCoord(MainClass.locationAnalyzer.GetY(edgeDisplayInfo.GetDestinationNodeNumber()));
// edgeDisplayInfo.paint(doubleBufferGraphic);
MainClass.displayManager.PaintAllEdges(edgeDisplayInfo.GetSourceNodeNumber(), edgeDisplayInfo.GetDestinationNodeNumber(), x1, y1, x2, y2, doubleBufferGraphic);
}
//draw things over the entire display
MainClass.displayManager.PaintOverScreen(doubleBufferGraphic);
//Make everything that was drawn visible
g.drawImage(doubleBufferImage, 0, 0, null);
}
//END OF PAINT
//----------------------------------------------------------------
public void update(Graphics g)
{
paint(g);
}
public void repaint(Graphics g)
{
paint(g);
}
//*******************************************************
//*******************************************************
//the following code sets X and Y scaling factors for
//the X and Y coordinates of the nodes
//*******************************************************
//the norm is to set scaling factors to keep everything
//on the screen. However, if the user attempts to zoom
//the scaling factors can be larger and will be held
//constant
//*******************************************************
//*******************************************************
//----------------------------------------------------------------
//FIT TO SCREEN
//this function will set the scaling factors above such
//that all nodes are scaled to within the screen viewing area
public void FitToScreen() //do not synchronize
{
double largestXCoord = Double.MIN_VALUE;
double smallestXCoord = Double.MAX_VALUE;
double largestYCoord = Double.MIN_VALUE;
double smallestYCoord = Double.MAX_VALUE;
double x, y;
//find the largest and smallest coords for all nodes
DisplayManager.NodeInfo currentDisplayInfo;
for(Enumeration nodes = MainClass.displayManager.GetNodeInfo(); nodes.hasMoreElements();)
{
currentDisplayInfo = (DisplayManager.NodeInfo)nodes.nextElement();
if(screenIsEmpty){currentDisplayInfo.SetFitOnScreen(true);}
if( ((currentDisplayInfo.GetDisplayThisNode() == true) && (currentDisplayInfo.GetFitOnScreen() == true) ))//If the current node is displayed and old enough, or if they are the first nodes to be drawn
{
x = MainClass.locationAnalyzer.GetX(currentDisplayInfo.GetNodeNumber());
y = MainClass.locationAnalyzer.GetY(currentDisplayInfo.GetNodeNumber());
if(x > largestXCoord) {largestXCoord = x;}
if(x < smallestXCoord){smallestXCoord = x;}
if(y > largestYCoord){largestYCoord = y;}
if(y < smallestYCoord){smallestYCoord = y;}
}
}
//here we use the following equations to set the scaling factors:
// xScale*SmallestXCoord + XIntercept = 0
// xScale*LargestXCoord + XIntercept = window.width();
//
//And the same for the y scaling factors.
//Note that I want a border of <Margin>% of the screen on both sides
Dimension d = getSize();
xScale = (d.width-2*(d.width/xMargin))/(largestXCoord - smallestXCoord);
yScale = (d.height-2*(d.height/yMargin))/(largestYCoord - smallestYCoord);
xScale = Math.min(xScale, yScale);//this is to make sure that the x and y coordinates are not warped
yScale = xScale;
xScaleIntercept = -xScale*smallestXCoord + d.width/xMargin;
yScaleIntercept = -yScale*smallestYCoord + d.height/yMargin;
screenIsEmpty = false;
if(MainClass.displayManager.proprietaryNodeInfo.isEmpty())//if there are no nodes and none of this function was executed
{
screenIsEmpty = true;
xScale = 1;
yScale = 1;
xScaleIntercept = 0;
yScaleIntercept = 0;
}
//System.out.println(xScale + " " + yScale);
//System.out.println(xScaleIntercept + " " + yScaleIntercept);
}
public int ScaleNodeXCoordToScreenCoord(double pXCoord)//do not synchronize
{ //take the local coordinate system of the nodes and show it as a graphical coordinate system
Double xCoord = new Double(xScale*pXCoord+xScaleIntercept);
return xCoord.intValue();
}
public int ScaleNodeYCoordToScreenCoord(double pYCoord) //do not synchronize
{ //take the local coordinate system of the nodes and show it as a graphical coordinate system
Double yCoord = new Double(yScale*pYCoord+yScaleIntercept);
return yCoord.intValue();
}
public Double ScaleScreenXCoordToNodeCoord(double pXCoord)//do not synchronize
{ //take a graphical coordinate system and show it as the local coordinate system of the nodes
return new Double((pXCoord-xScaleIntercept)/xScale);
}
public Double ScaleScreenYCoordToNodeCoord(double pYCoord) //do not synchronize
{ //take a graphical coordinate system and show it as the local coordinate system of the nodes
return new Double((pYCoord-yScaleIntercept)/yScale);
}
//------------------------------------------------------------------
//GET/SET
public boolean GetFitToScreenAutomatically(){return fitToScreenAutomatically;}
public void SetFitToScreenAutomatically(boolean p){fitToScreenAutomatically=p;}
public double GetXScale(){return xScale;}
public double GetYScale(){return yScale;}
//GET/SET
//------------------------------------------------------------------
//{{DECLARE_CONTROLS
//}}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.