repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
nikhilvibhav/camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FileToFtpsExplicitSSLWithoutClientAuthAndGlobalSSLContextParametersIT.java | 2315 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.remote.integration;
import org.apache.camel.CamelContext;
import org.apache.camel.SSLContextParametersAware;
import org.apache.camel.support.jsse.KeyStoreParameters;
import org.apache.camel.support.jsse.SSLContextParameters;
import org.apache.camel.support.jsse.TrustManagersParameters;
public class FileToFtpsExplicitSSLWithoutClientAuthAndGlobalSSLContextParametersIT
extends FileToFtpsExplicitSSLWithoutClientAuthIT {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
KeyStoreParameters ksp = new KeyStoreParameters();
ksp.setResource("server.jks");
ksp.setPassword("password");
TrustManagersParameters tmp = new TrustManagersParameters();
tmp.setKeyStore(ksp);
SSLContextParameters sslContextParameters = new SSLContextParameters();
sslContextParameters.setSecureSocketProtocol("SSLv3");
sslContextParameters.setTrustManagers(tmp);
context.setSSLContextParameters(sslContextParameters);
((SSLContextParametersAware) context.getComponent("ftps")).setUseGlobalSslContextParameters(true);
return context;
}
@Override
protected String getFtpUrl() {
return "ftps://admin@localhost:{{ftp.server.port}}"
+ "/tmp2/camel?password=admin&initialDelay=2000&disableSecureDataChannelDefaults=true"
+ "&implicit=false&delete=true";
}
}
| apache-2.0 |
mattcaldwell/incubator-geode | gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectDUnitTest.java | 27820 | /*=========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache.tier.sockets;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.client.PoolFactory;
import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.cache.client.internal.Connection;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
import com.gemstone.gemfire.cache.util.BridgeServer;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.distributed.internal.ServerLocation;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.BridgeServerImpl;
import dunit.DistributedTestCase;
import dunit.Host;
import dunit.VM;
/**
* Tests for durable reconnect issue
*
* @author Yogesh
* @since 5.2
*/
public class DurableClientReconnectDUnitTest extends DistributedTestCase
{
private static Cache cache = null;
private static VM server1 = null;
private static VM server2 = null;
private static VM server3 = null;
private static VM server4 = null;
private static PoolImpl pool = null;
private static Connection conn = null ;
private static Integer PORT1 ;
private static Integer PORT2 ;
private static Integer PORT3 ;
private static Integer PORT4 ;
private static String SERVER1;
private static String SERVER2;
private static String SERVER3;
private static String SERVER4;
private static final String REGION_NAME = "DurableClientReconnectDUnitTest_region";
private DurableClientReconnectDUnitTest instance = null ;
/** constructor */
public DurableClientReconnectDUnitTest(String name) {
super(name);
}
public static void caseSetUp() throws Exception {
DistributedTestCase.disconnectAllFromDS();
}
@Override
public void setUp() throws Exception
{
super.setUp();
final Host host = Host.getHost(0);
server1 = host.getVM(0);
server2 = host.getVM(1);
server3 = host.getVM(2);
server4 = host.getVM(3);
// start servers first
PORT1 = ((Integer) server1.invoke(DurableClientReconnectDUnitTest.class, "createServerCache"));
PORT2 = ((Integer) server2.invoke(DurableClientReconnectDUnitTest.class, "createServerCache"));
PORT3 = ((Integer) server3.invoke(DurableClientReconnectDUnitTest.class, "createServerCache"));
PORT4 = ((Integer) server4.invoke(DurableClientReconnectDUnitTest.class, "createServerCache"));
SERVER1 = getServerHostName(host)+PORT1;
SERVER2 = getServerHostName(host)+PORT2;
SERVER3 = getServerHostName(host)+PORT3;
SERVER4 = getServerHostName(host)+PORT4;
//CacheServerTestUtil.disableShufflingOfEndpoints();
System.setProperty("gemfire.bridge.disableShufflingOfEndpoints", "false");
}
public void testDurableReconnectSingleServer() throws Exception
{
createCacheClientAndConnectToSingleServer(getServerHostName(Host.getHost(0)), 0);
List redundantServers = pool.getRedundantNames();
String primaryName = pool.getPrimaryName();
assertTrue(redundantServers.isEmpty());
closeCache(true);
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
pause(2000);
createCacheClientAndConnectToSingleServer(getServerHostName(Host.getHost(0)), 0);
List redundantServers2 = pool.getRedundantNames();
String primaryName2 = pool.getPrimaryName();
assertTrue(redundantServers2.isEmpty());
assertTrue(primaryName2.equals(primaryName));
}
public void testDurableReconnectSingleServerWithZeroConnPerServer() throws Exception
{
createCacheClientAndConnectToSingleServerWithZeroConnPerServer(getServerHostName(Host.getHost(0)), 0);
List redundantServers = pool.getRedundantNames();
String primaryName = pool.getPrimaryName();
assertTrue(redundantServers.isEmpty());
closeCache(true);
createCacheClientAndConnectToSingleServerWithZeroConnPerServer(getServerHostName(Host.getHost(0)), 0);
List redundantServers2 = pool.getRedundantNames();
String primaryName2 = pool.getPrimaryName();
assertTrue(redundantServers2.isEmpty());
assertTrue(primaryName2.equals(primaryName));
}
public void testDurableReconnectNonHA() throws Exception
{
createCacheClient(0);
List redundantServers = pool.getRedundantNames();
String primaryName = pool.getPrimaryName();
assertTrue(redundantServers.isEmpty());
closeCache(true);
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
pause(2000);
createCacheClient(0);
List redundantServers2 = pool.getRedundantNames();
String primaryName2 = pool.getPrimaryName();
assertTrue(redundantServers2.isEmpty());
assertTrue(primaryName2.equals(primaryName));
}
/**
* (R = 1 ) , four servers , all Servers are up, Check client reconnect to either of server having queue.
* @throws Exception
*/
public void testDurableReconnect() throws Exception
{
//create client cache and Send clientReady message
createCacheClient();
HashSet redundantServers = new HashSet(pool.getRedundantNames());
redundantServers.add(pool.getPrimaryName());
instance.determineAndVerfiyRedundantServers(redundantServers);
instance.determineAndVerfiyNonRedundantServers(redundantServers);
// Stop the durable client
closeCache(true);
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
pause(2000);
createCacheClient();
HashSet redundantServersAfterReconnect = new HashSet(pool.getRedundantNames());
redundantServersAfterReconnect.add(pool.getPrimaryName());
instance.determineAndVerfiyRedundantServers(redundantServersAfterReconnect);
instance.determineAndVerfiyNonRedundantServers(redundantServersAfterReconnect);
assertTrue(redundantServers.equals(redundantServersAfterReconnect));
}
public void testDurableReconnect_DiffernetPrimary() throws Exception
{
//create client cache and Send clientReady message
createCacheClient();
HashSet redundantServers = new HashSet(pool.getRedundantNames());
String primaryBefore = pool.getPrimaryName() ;
redundantServers.add(primaryBefore);
instance.determineAndVerfiyRedundantServers(redundantServers);
instance.determineAndVerfiyNonRedundantServers(redundantServers);
// Stop the durable client
closeCache(true);
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
pause(2000);
createCacheClient();
HashSet redundantServersAfterReconnect = new HashSet(pool.getRedundantNames());
String primaryAfter = pool.getPrimaryName() ;
redundantServersAfterReconnect.add(primaryAfter);
instance.determineAndVerfiyRedundantServers(redundantServersAfterReconnect);
instance.determineAndVerfiyNonRedundantServers(redundantServersAfterReconnect);
assertTrue(redundantServers.equals(redundantServersAfterReconnect));
assertFalse(primaryBefore.equals(primaryAfter));
}
public void testDurableReconnectWithOneRedundantServerDown() throws Exception
{
//create client cache and Send clientReady message
createCacheClient();
List redundantServers = pool.getRedundantNames();
redundantServers.add(pool.getPrimaryName());
assertTrue(redundantServers.size()== 2);
instance.determineAndVerfiyRedundantServers(redundantServers);
instance.determineAndVerfiyNonRedundantServers(redundantServers);
// Stop the durable client
closeCache(true);
Object serverArray[] = redundantServers.toArray();
String rServer1 = (String)serverArray[0];
String rServer2 = (String)serverArray[1];
instance.closeServer(rServer1);
createCacheClient();
List redundantServersAfterReconnect = pool.getRedundantNames();
redundantServersAfterReconnect.add(pool.getPrimaryName());
instance.determineAndVerfiyRedundantServers(redundantServersAfterReconnect);
List redundantServersHistory = new ArrayList();
redundantServersHistory.addAll(redundantServersAfterReconnect);
redundantServersHistory.add(rServer1);
instance.determineAndVerfiyNonRedundantServers(redundantServersHistory);
assertFalse(redundantServers.equals(redundantServersAfterReconnect));
assertTrue(redundantServersAfterReconnect.size()== 2);
assertFalse(redundantServersAfterReconnect.contains(rServer1));
assertTrue(redundantServersAfterReconnect.contains(rServer2));
}
public void testDurableReconnectWithBothRedundantServersDown() throws Exception
{
//create client cache and Send clientReady message
createCacheClient();
List redundantServers = pool.getRedundantNames();
redundantServers.add(pool.getPrimaryName());
assertTrue(redundantServers.size()== 2);
instance.determineAndVerfiyRedundantServers(redundantServers);
instance.determineAndVerfiyNonRedundantServers(redundantServers);
// Stop the durable client
closeCache(true);
Object serverArray[] = redundantServers.toArray();
String rServer1 = (String)serverArray[0];
String rServer2 = (String)serverArray[1];
instance.closeServer(rServer1);
instance.closeServer(rServer2);
createCacheClient();
List redundantServersAfterReconnect = pool.getRedundantNames();
if (redundantServersAfterReconnect.isEmpty()) {
redundantServersAfterReconnect = new LinkedList();
}
redundantServersAfterReconnect.add(pool.getPrimaryName());
List redundantServersHistory = new ArrayList();
redundantServersHistory.addAll(redundantServersAfterReconnect);
redundantServersHistory.add(rServer1);
redundantServersHistory.add(rServer2);
instance.determineAndVerfiyNonRedundantServers(redundantServersHistory);
assertFalse(redundantServers.equals(redundantServersAfterReconnect));
assertTrue(redundantServersAfterReconnect.size()== 2);
assertFalse(redundantServersAfterReconnect.contains(rServer1));
assertFalse(redundantServersAfterReconnect.contains(rServer2));
}
public void testDurableReconnectWithBothNonRedundantServersDown() throws Exception
{
//create client cache and Send clientReady message
createCacheClient();
HashSet redundantServers = new HashSet(pool.getRedundantNames());
redundantServers.add(pool.getPrimaryName());
assertTrue(redundantServers.size()== 2);
instance.determineAndVerfiyRedundantServers(redundantServers);
instance.determineAndVerfiyNonRedundantServers(redundantServers);
// Stop the durable client
closeCache(true);
Set nonRedundantSet = new HashSet();
nonRedundantSet.add(SERVER1);
nonRedundantSet.add(SERVER2);
nonRedundantSet.add(SERVER3);
nonRedundantSet.add(SERVER4);
nonRedundantSet.removeAll(redundantServers);
Object serverArray[] = nonRedundantSet.toArray();
String rServer1 = (String)serverArray[0];
String rServer2 = (String)serverArray[1];
// can see sporadic socket closed exceptions
final ExpectedException expectedEx = addExpectedException(
SocketException.class.getName());
instance.closeServer(rServer1);
instance.closeServer(rServer2);
createCacheClient();
HashSet redundantServersAfterReconnect = new HashSet(pool.getRedundantNames());
redundantServersAfterReconnect.add(pool.getPrimaryName());
List redundantServersHistory = new ArrayList();
redundantServersHistory.addAll(redundantServersAfterReconnect);
redundantServersHistory.add(rServer1);
redundantServersHistory.add(rServer2);
instance.determineAndVerfiyNonRedundantServers(redundantServersHistory);
expectedEx.remove();
assertTrue(redundantServers.equals(redundantServersAfterReconnect));
assertTrue(redundantServersAfterReconnect.size()== 2);
assertFalse("redundantServersAfterReconnect contains " + rServer1, redundantServersAfterReconnect.contains(rServer1));
assertFalse("redundantServersAfterReconnect contains " + rServer2, redundantServersAfterReconnect.contains(rServer2));
}
/** This test checks a problem found in bug 39332
* 1. Durable client disconnects
* 2. Durable client comes back, creates a create to server connection but not a queue connection
* 3. Durable client disconnects again
* 4. Durable client connects
* 5. Eventually, all of the durable clients connections are closed because the durable expiration timer task created
* in step 1 is never cancelled.
*/
public void testBug39332() {
//create client cache and Send clientReady message
createCacheClient(2, 20);
HashSet redundantServers = new HashSet(pool.getRedundantNames());
redundantServers.add(pool.getPrimaryName());
instance.determineAndVerfiyRedundantServers(redundantServers);
instance.determineAndVerfiyNonRedundantServers(redundantServers);
getLogWriter().info("TEST - Durable client initialially has servers " + redundantServers);
getLogWriter().info("TEST - Closing durable client for the first time");
// Stop the durable client
closeCache(true);
getLogWriter().info("TEST - Durable client closed for the first time");
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
pause(2000);
getLogWriter().info("TEST - Creating the durable client with one fewer servers");
//We recreate the durable client, but this
//Time we won't have it create any queues
createCacheClient(2, 20, false);
HashSet redundantServers2 = new HashSet(pool.getRedundantNames());
redundantServers2.add(pool.getPrimaryName());
getLogWriter().info("TEST - Durable client created again, now with servers " + redundantServers2);
Host host = Host.getHost(0);
//Make sure we create client to server connections to all of the servers
pool.acquireConnection(new ServerLocation(getServerHostName(host), PORT1.intValue()));
pool.acquireConnection(new ServerLocation(getServerHostName(host), PORT2.intValue()));
pool.acquireConnection(new ServerLocation(getServerHostName(host), PORT3.intValue()));
pool.acquireConnection(new ServerLocation(getServerHostName(host), PORT4.intValue()));
getLogWriter().info("TEST - All pool connections are now aquired");
closeCache(true);
getLogWriter().info("TEST - closed durable client for the second time");
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
pause(2000);
getLogWriter().info("TEST - creating durable client for the third time");
//Now we should connect to all of the servers we were originally connected to
createCacheClient(2, 20);
HashSet redundantServersAfterReconnect = new HashSet(pool.getRedundantNames());
redundantServersAfterReconnect.add(pool.getPrimaryName());
getLogWriter().info("TEST - durable client created for the third time, now with servers " + redundantServersAfterReconnect);
instance.determineAndVerfiyRedundantServers(redundantServersAfterReconnect);
instance.determineAndVerfiyNonRedundantServers(redundantServersAfterReconnect);
assertEquals(redundantServers, redundantServersAfterReconnect);
//Now we wait to make sure the durable client expiration task isn't fired.
pause(25000);
getLogWriter().info("TEST - Finished waiting for durable client expiration task");
redundantServersAfterReconnect = new HashSet(pool.getRedundantNames());
redundantServersAfterReconnect.add(pool.getPrimaryName());
instance.determineAndVerfiyRedundantServers(redundantServersAfterReconnect);
instance.determineAndVerfiyNonRedundantServers(redundantServersAfterReconnect);
assertEquals(redundantServers, redundantServersAfterReconnect);
}
private static void verifyRedundantServers(Set redundantServers , Set redundantServersAfterReconnect) {
try{
Iterator iter = redundantServers.iterator();
while(iter.hasNext()){
Object endpointName = iter.next();
assertTrue(redundantServersAfterReconnect.contains(endpointName));
}
}catch (Exception e){
fail("test failed due to" , e);
}
}
private static void verifyNoDurableClientOnServer() {
try{
checkNumberOfClientProxies(0);
}catch (Exception e){
fail("test failed due to" , e);
}
}
public static Integer createServerCache() throws Exception
{
Properties props = new Properties();
new DurableClientReconnectDUnitTest("temp").createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attrs = factory.create();
Region r = cache.createRegion(REGION_NAME, attrs);
assertNotNull(r);
BridgeServer server1 = cache.addBridgeServer();
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
server1.setPort(port);
server1.setNotifyBySubscription(true);
server1.start();
return new Integer(server1.getPort());
}
private void determineAndVerfiyRedundantServers(Collection redundantServers) {
if(redundantServers.contains(SERVER1)){
server1.invoke(DurableClientReconnectDUnitTest.class, "verifyDurableClientOnServer");
}
if(redundantServers.contains(SERVER2)){
server2.invoke(DurableClientReconnectDUnitTest.class, "verifyDurableClientOnServer");
}
if (redundantServers.contains(SERVER3)){
server3.invoke(DurableClientReconnectDUnitTest.class, "verifyDurableClientOnServer");
}
if(redundantServers.contains(SERVER4)){
server4.invoke(DurableClientReconnectDUnitTest.class, "verifyDurableClientOnServer");
}
}
private void determineAndVerfiyNonRedundantServers(Collection redundantServers) {
if(!redundantServers.contains(SERVER1)){
server1.invoke(DurableClientReconnectDUnitTest.class, "verifyNoDurableClientOnServer");
}
if(!redundantServers.contains(SERVER2)){
server2.invoke(DurableClientReconnectDUnitTest.class, "verifyNoDurableClientOnServer");
}
if (!redundantServers.contains(SERVER3)){
server3.invoke(DurableClientReconnectDUnitTest.class, "verifyNoDurableClientOnServer");
}
if(!redundantServers.contains(SERVER4)){
server4.invoke(DurableClientReconnectDUnitTest.class, "verifyNoDurableClientOnServer");
}
}
private void closeServer(String server) {
if(server.equals(SERVER1)){
server1.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
}
if(server.equals(SERVER2)){
server2.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
}
if (server.equals(SERVER3)){
server3.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
}
if(server.equals(SERVER4)){
server4.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
}
}
private static void verifyDurableClientOnServer() {
try{
checkNumberOfClientProxies(1);
CacheClientProxy proxy = getClientProxy();
assertNotNull(proxy);
// Verify that it is durable and its properties are correct
assertTrue(proxy.isDurable());
assertEquals("DurableClientReconnectDUnitTest_client", proxy.getDurableId());
// assertEquals(60, proxy.getDurableTimeout());
}catch (Exception e){
fail("test failed due to" , e);
}
}
private static CacheClientProxy getClientProxy() {
// Get the CacheClientNotifier
CacheClientNotifier notifier = getBridgeServer().getAcceptor()
.getCacheClientNotifier();
// Get the CacheClientProxy or not (if proxy set is empty)
CacheClientProxy proxy = null;
Iterator i = notifier.getClientProxies().iterator();
if (i.hasNext()) {
proxy = (CacheClientProxy) i.next();
}
return proxy;
}
private static void checkNumberOfClientProxies(final int expected) {
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return expected == getNumberOfClientProxies();
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 15 * 1000, 200, true);
}
protected static int getNumberOfClientProxies() {
return getBridgeServer().getAcceptor().getCacheClientNotifier()
.getClientProxies().size();
}
private static BridgeServerImpl getBridgeServer() {
BridgeServerImpl bridgeServer = (BridgeServerImpl) cache.getBridgeServers().iterator().next();
assertNotNull(bridgeServer);
return bridgeServer;
}
private void createCache(Properties props)
{try{
DistributedSystem ds = getSystem(props);
assertNotNull(ds);
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
} catch(Exception e){
fail("test failed due to " , e );
}
}
private void createCacheClient()
{
createCacheClient(1);
}
protected PoolFactory getPoolFactory() {
Host host = Host.getHost(0);
PoolFactory factory = PoolManager.createFactory()
.addServer(getServerHostName(host), PORT1.intValue())
.addServer(getServerHostName(host), PORT2.intValue())
.addServer(getServerHostName(host), PORT3.intValue())
.addServer(getServerHostName(host), PORT4.intValue());
return factory;
}
private void createCacheClient(int redundancyLevel) {
createCacheClient(redundancyLevel, 60);
}
private void createCacheClient(int redundancyLevel, final int durableClientTimeout){
createCacheClient(redundancyLevel, durableClientTimeout, true);
}
private void createCacheClient(int redundancyLevel, final int durableClientTimeout, boolean queueEnabled){
try{
final String durableClientId = "DurableClientReconnectDUnitTest_client";
Properties props = getClientDistributedSystemProperties(durableClientId,durableClientTimeout );
instance = new DurableClientReconnectDUnitTest("temp");
instance.createCache(props);
// Host host = Host.getHost(0);
PoolImpl p = (PoolImpl) getPoolFactory()
.setSubscriptionEnabled(queueEnabled)
.setReadTimeout(10000)
// .setRetryInterval(2000)
.setSubscriptionRedundancy(redundancyLevel)
.create("DurableClientReconnectDUnitTestPool");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
factory.setPoolName(p.getName());
RegionAttributes attrs = factory.create();
Region r = cache.createRegion(REGION_NAME, attrs);
assertNotNull(r);
pool = p;
conn = pool.acquireConnection();
assertNotNull(conn);
cache.readyForEvents();
}catch(Exception e){
fail("test failed due to " , e );
}
}
private void createCacheClientAndConnectToSingleServer(String host, int redundancyLevel){
try{
final String durableClientId = "DurableClientReconnectDUnitTest_client";
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
Properties props = getClientDistributedSystemProperties(durableClientId,durableClientTimeout );
instance = new DurableClientReconnectDUnitTest("temp");
instance.createCache(props);
PoolImpl p = (PoolImpl)PoolManager.createFactory()
.addServer(host, PORT1.intValue())
.setSubscriptionEnabled(true)
.setReadTimeout(10000)
// .setRetryInterval(2000)
.setSubscriptionRedundancy(redundancyLevel)
.create("DurableClientReconnectDUnitTestPool");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
factory.setPoolName(p.getName());
RegionAttributes attrs = factory.create();
Region r = cache.createRegion(REGION_NAME, attrs);
assertNotNull(r);
pool = p;
conn = pool.acquireConnection();
assertNotNull(conn);
cache.readyForEvents();
}catch(Exception e){
fail("test failed due to " , e );
}
}
private void createCacheClientAndConnectToSingleServerWithZeroConnPerServer(String host, int redundancyLevel){
try{
final String durableClientId = "DurableClientReconnectDUnitTest_client";
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
Properties props = getClientDistributedSystemProperties(durableClientId,durableClientTimeout );
instance = new DurableClientReconnectDUnitTest("temp");
instance.createCache(props);
PoolImpl p = (PoolImpl)PoolManager.createFactory()
.addServer(host, PORT1.intValue())
.setSubscriptionEnabled(true)
.setReadTimeout(10000)
// .setRetryInterval(2000)
.setMinConnections(0)
.setSubscriptionRedundancy(redundancyLevel)
.create("DurableClientReconnectDUnitTestPool");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
factory.setPoolName(p.getName());
RegionAttributes attrs = factory.create();
Region r = cache.createRegion(REGION_NAME, attrs);
assertNotNull(r);
pool = p;
conn = pool.acquireConnection();
assertNotNull(conn);
cache.readyForEvents();
}catch(Exception e){
fail("test failed due to " , e );
}
}
private Properties getClientDistributedSystemProperties(
String durableClientId, int durableClientTimeout) {
Properties properties = new Properties();
properties.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
properties.setProperty(DistributionConfig.LOCATORS_NAME, "");
properties.setProperty(DistributionConfig.DURABLE_CLIENT_ID_NAME, durableClientId);
properties.setProperty(DistributionConfig.DURABLE_CLIENT_TIMEOUT_NAME, String.valueOf(durableClientTimeout));
return properties;
}
@Override
public void tearDown2() throws Exception {
super.tearDown2();
// close the clients first
closeCache();
// then close the servers
server1.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
server2.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
server3.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
server4.invoke(DurableClientReconnectDUnitTest.class, "closeCache");
}
public void closeCache(boolean keepAlive)
{
if (cache != null && !cache.isClosed()) {
cache.close(keepAlive);
cache.getDistributedSystem().disconnect();
}
}
public static void closeCache()
{
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
}
}
| apache-2.0 |
Darsstar/framework | compatibility-server/src/main/java/com/vaadin/v7/data/util/converter/DateToLongConverter.java | 2390 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.v7.data.util.converter;
import java.util.Date;
import java.util.Locale;
/**
* A converter that converts from {@link Long} to {@link Date} and back.
*
* @author Vaadin Ltd
* @since 7.0
*/
@Deprecated
public class DateToLongConverter implements Converter<Date, Long> {
/*
* (non-Javadoc)
*
* @see
* com.vaadin.data.util.converter.Converter#convertToModel(java.lang.Object,
* java.lang.Class, java.util.Locale)
*/
@Override
public Long convertToModel(Date value, Class<? extends Long> targetType,
Locale locale) {
if (value == null) {
return null;
}
return value.getTime();
}
/*
* (non-Javadoc)
*
* @see
* com.vaadin.data.util.converter.Converter#convertToPresentation(java.lang
* .Object, java.lang.Class, java.util.Locale)
*/
@Override
public Date convertToPresentation(Long value,
Class<? extends Date> targetType, Locale locale) {
if (targetType != getPresentationType()) {
throw new ConversionException(
"Converter only supports " + getPresentationType().getName()
+ " (targetType was " + targetType.getName() + ")");
}
if (value == null) {
return null;
}
return new Date(value);
}
/*
* (non-Javadoc)
*
* @see com.vaadin.data.util.converter.Converter#getModelType()
*/
@Override
public Class<Long> getModelType() {
return Long.class;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.data.util.converter.Converter#getPresentationType()
*/
@Override
public Class<Date> getPresentationType() {
return Date.class;
}
}
| apache-2.0 |
ruks/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/gen/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/MediationPoliciesApi.java | 4988 | package org.wso2.carbon.apimgt.rest.api.publisher.v1;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ErrorDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.MediationListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.MediationPoliciesApiService;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.impl.MediationPoliciesApiServiceImpl;
import org.wso2.carbon.apimgt.api.APIManagementException;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.inject.Inject;
import io.swagger.annotations.*;
import java.io.InputStream;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.apache.cxf.jaxrs.ext.multipart.Attachment;
import org.apache.cxf.jaxrs.ext.multipart.Multipart;
import java.util.Map;
import java.util.List;
import javax.validation.constraints.*;
@Path("/mediation-policies")
@Api(description = "the mediation-policies API")
public class MediationPoliciesApi {
@Context MessageContext securityContext;
MediationPoliciesApiService delegate = new MediationPoliciesApiServiceImpl();
@GET
@Produces({ "application/json" })
@ApiOperation(value = "Get all global level mediation policies ", notes = "This operation provides you a list of available all global level mediation policies. ", response = MediationListDTO.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:api_view", description = "View API"),
@AuthorizationScope(scope = "apim:api_manage", description = "Manage all API related operations"),
@AuthorizationScope(scope = "apim:mediation_policy_view", description = "View mediation policies"),
@AuthorizationScope(scope = "apim:mediation_policy_manage", description = "Update and delete mediation policies")
})
}, tags={ "Global Mediation Policies", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK. List of mediation policies is returned. ", response = MediationListDTO.class),
@ApiResponse(code = 304, message = "Not Modified. Empty body because the client has already the latest version of the requested resource (Will be supported in future). ", response = Void.class),
@ApiResponse(code = 406, message = "Not Acceptable. The requested media type is not supported.", response = ErrorDTO.class) })
public Response getAllGlobalMediationPolicies( @ApiParam(value = "Maximum size of resource array to return. ", defaultValue="25") @DefaultValue("25") @QueryParam("limit") Integer limit, @ApiParam(value = "Starting point within the complete list of items qualified. ", defaultValue="0") @DefaultValue("0") @QueryParam("offset") Integer offset, @ApiParam(value = "-Not supported yet-") @QueryParam("query") String query, @ApiParam(value = "Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource. " )@HeaderParam("If-None-Match") String ifNoneMatch) throws APIManagementException{
return delegate.getAllGlobalMediationPolicies(limit, offset, query, ifNoneMatch, securityContext);
}
@GET
@Path("/{mediationPolicyId}/content")
@Produces({ "application/json" })
@ApiOperation(value = "Download a Global Mediation Policy", notes = "This operation can be used to download a particular global mediation policy. ", response = Void.class, authorizations = {
@Authorization(value = "OAuth2Security", scopes = {
@AuthorizationScope(scope = "apim:api_view", description = "View API"),
@AuthorizationScope(scope = "apim:api_manage", description = "Manage all API related operations"),
@AuthorizationScope(scope = "apim:mediation_policy_view", description = "View mediation policies"),
@AuthorizationScope(scope = "apim:mediation_policy_manage", description = "Update and delete mediation policies")
})
}, tags={ "Global Mediation Policy" })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK. Mediation policy returned. ", response = Void.class),
@ApiResponse(code = 304, message = "Not Modified. Empty body because the client has already the latest version of the requested resource (Will be supported in future). ", response = Void.class),
@ApiResponse(code = 404, message = "Not Found. The specified resource does not exist.", response = ErrorDTO.class) })
public Response getGlobalMediationPolicyContent(@ApiParam(value = "Mediation policy Id ",required=true) @PathParam("mediationPolicyId") String mediationPolicyId, @ApiParam(value = "Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource. " )@HeaderParam("If-None-Match") String ifNoneMatch) throws APIManagementException{
return delegate.getGlobalMediationPolicyContent(mediationPolicyId, ifNoneMatch, securityContext);
}
}
| apache-2.0 |
tkopczynski/camel | components/camel-docker/src/test/java/org/apache/camel/component/docker/headers/PullImageCmdHeaderTest.java | 2736 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.docker.headers;
import java.util.Map;
import com.github.dockerjava.api.command.PullImageCmd;
import com.github.dockerjava.core.command.PullImageResultCallback;
import org.apache.camel.component.docker.DockerConstants;
import org.apache.camel.component.docker.DockerOperation;
import org.junit.Test;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
/**
* Validates Pull Image Request headers are applied properly
*/
public class PullImageCmdHeaderTest extends BaseDockerHeaderTest<PullImageCmd> {
@Mock
private PullImageCmd mockObject;
@Mock
private PullImageResultCallback callback;
@Test
public void pullImageHeaderTest() {
String repository = "docker/empty";
String tag = "1.0";
String registry = "registry";
Map<String, Object> headers = getDefaultParameters();
headers.put(DockerConstants.DOCKER_REPOSITORY, repository);
headers.put(DockerConstants.DOCKER_TAG, tag);
headers.put(DockerConstants.DOCKER_REGISTRY, registry);
template.sendBodyAndHeaders("direct:in", "", headers);
Mockito.verify(dockerClient, Mockito.times(1)).pullImageCmd(repository);
Mockito.verify(mockObject, Mockito.times(1)).withTag(Matchers.eq(tag));
Mockito.verify(mockObject, Mockito.times(1)).withRegistry(Matchers.eq(registry));
}
@Override
protected void setupMocks() {
Mockito.when(dockerClient.pullImageCmd(Matchers.anyString())).thenReturn(mockObject);
Mockito.when(mockObject.exec(Mockito.anyObject())).thenReturn(callback);
try {
Mockito.when(callback.awaitCompletion()).thenReturn(callback);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@Override
protected DockerOperation getOperation() {
return DockerOperation.PULL_IMAGE;
}
}
| apache-2.0 |
tom1120/ninja | zhaoyi/src/main/java/filters/LoggerFilter.java | 1399 | /**
* Copyright (C) 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package filters;
import ninja.Context;
import ninja.Filter;
import ninja.FilterChain;
import ninja.Result;
import org.slf4j.Logger;
import com.google.inject.Inject;
/**
* This is just a demo for a filter. This filter just logs a request in level
* info. Be inspired and use your own filter.
*
* Filters can be attached to classes or methods via @FilterWith(LoggerFilter.class)
*
* @author ra
*
*/
public class LoggerFilter implements Filter {
private final Logger logger;
@Inject
public LoggerFilter(Logger logger) {
this.logger = logger;
}
@Override
public Result filter(FilterChain chain, Context context) {
logger.info("Got request from : " + context.getRequestPath());
return chain.next(context);
}
}
| apache-2.0 |
mike-jumper/incubator-guacamole-client | guacamole-common/src/main/java/org/apache/guacamole/protocol/FilteredGuacamoleWriter.java | 3385 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole.protocol;
import org.apache.guacamole.GuacamoleException;
import org.apache.guacamole.GuacamoleServerException;
import org.apache.guacamole.io.GuacamoleWriter;
/**
* GuacamoleWriter which applies a given GuacamoleFilter to observe or alter
* all written instructions. Instructions may also be dropped or denied by
* the filter.
*/
public class FilteredGuacamoleWriter implements GuacamoleWriter {
/**
* The wrapped GuacamoleWriter.
*/
private final GuacamoleWriter writer;
/**
* The filter to apply when writing instructions.
*/
private final GuacamoleFilter filter;
/**
* Parser for reading instructions prior to writing, such that they can be
* passed on to the filter.
*/
private final GuacamoleParser parser = new GuacamoleParser();
/**
* Wraps the given GuacamoleWriter, applying the given filter to all written
* instructions. Future writes will only write instructions which pass
* the filter.
*
* @param writer The GuacamoleWriter to wrap.
* @param filter The filter which dictates which instructions are written,
* and how.
*/
public FilteredGuacamoleWriter(GuacamoleWriter writer, GuacamoleFilter filter) {
this.writer = writer;
this.filter = filter;
}
@Override
public void write(char[] chunk, int offset, int length) throws GuacamoleException {
// Write all data in chunk
while (length > 0) {
// Pass as much data through the parser as possible
int parsed;
while ((parsed = parser.append(chunk, offset, length)) != 0) {
offset += parsed;
length -= parsed;
}
// If no instruction is available, it must be incomplete
if (!parser.hasNext())
throw new GuacamoleServerException("Filtered write() contained an incomplete instruction.");
// Write single instruction through filter
writeInstruction(parser.next());
}
}
@Override
public void write(char[] chunk) throws GuacamoleException {
write(chunk, 0, chunk.length);
}
@Override
public void writeInstruction(GuacamoleInstruction instruction) throws GuacamoleException {
// Write instruction only if not dropped
GuacamoleInstruction filteredInstruction = filter.filter(instruction);
if (filteredInstruction != null)
writer.writeInstruction(filteredInstruction);
}
}
| apache-2.0 |
trekawek/jackrabbit-oak | oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ValidNamesTest.java | 11132 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Map;
import java.util.UUID;
import javax.jcr.ItemExistsException;
import javax.jcr.NamespaceException;
import javax.jcr.Node;
import javax.jcr.PathNotFoundException;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import org.apache.jackrabbit.oak.fixture.NodeStoreFixture;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Maps;
public class ValidNamesTest extends AbstractRepositoryTest {
private static final String TEST_NODE = "test_node";
private static final String TEST_PATH = '/' + TEST_NODE;
private static final Map<NodeStoreFixture, NodeStore> STORES = Maps.newConcurrentMap();
private Repository repo;
private Session session;
private Node testNode;
private String unmappedNsPrefix;
private String testPrefix;
private String testNsUri;
private static char[] SURROGATE_PAIR = Character.toChars(0x1f4a9);
public ValidNamesTest(NodeStoreFixture fixture) {
super(fixture);
}
@Before
public void setup() throws NamespaceException, RepositoryException {
repo = createRepository(fixture);
session = repo.login(getAdminCredentials());
Node root = session.getRootNode();
testNode = root.addNode(TEST_NODE);
session.save();
StringBuilder t = new StringBuilder();
for (String prefix : session.getNamespacePrefixes()) {
int l = t.length();
if (prefix.length() > l) {
t.append((char) (prefix.charAt(l) ^ 1));
} else {
t.append('x');
}
}
unmappedNsPrefix = t.toString();
for (String p : testNode.getSession().getNamespacePrefixes()) {
if (p.length() != 0) {
String u = testNode.getSession().getNamespaceURI(p);
if (u.contains(":")) {
testPrefix = p;
testNsUri = u;
}
}
}
assertNotNull(testPrefix);
assertNotNull(testNsUri);
}
@After
public void tearDown() throws RepositoryException {
if (session != null) {
session.removeItem(TEST_PATH);
session.save();
session.logout();
}
if (repo != null) {
dispose(repo);
}
}
@AfterClass
public static void disposeStores() throws Exception {
for (Map.Entry<NodeStoreFixture, NodeStore> e : STORES.entrySet()) {
e.getKey().dispose(e.getValue());
}
STORES.clear();
}
@Test
public void testSimple() {
nameTest("foo");
}
// TODO: questionable exception
@Test
public void testDot() {
unsupportedNameTest(".", ItemExistsException.class);
}
@Test
public void testDotFoo() {
nameTest(".foo");
}
// TODO: questionable exception
@Test
public void testDotDot() {
unsupportedNameTest("..", ItemExistsException.class);
}
@Test
public void testDotDotFoo() {
nameTest("..foo");
}
@Test
public void testTrailingDot() {
nameTest("foo.");
}
// TODO: questionable exception
@Test
public void testLeadingBlank() {
unsupportedNameTest(" foo", RepositoryException.class);
}
// TODO: questionable exception
@Test
public void testTrailingBlank() {
unsupportedNameTest("foo ", RepositoryException.class);
}
// TODO: questionable exception
@Test
public void testEnclosedSlash() {
unsupportedNameTest("foo/bar", PathNotFoundException.class);
}
// TODO: questionable exception
@Test
public void testEnclosedPipe() {
unsupportedNameTest("foo|bar", PathNotFoundException.class);
}
// TODO: questionable exception
@Test
public void testEnclosedStar() {
unsupportedNameTest("foo*bar", PathNotFoundException.class);
}
// TODO: questionable exception
@Test
public void testEnclosedOpenBracket() {
unsupportedNameTest("foo[bar", PathNotFoundException.class);
}
// TODO: questionable exception
@Test
public void testEnclosedCloseBracket() {
unsupportedNameTest("foo]bar", PathNotFoundException.class);
}
// TODO: questionable exception
@Test
public void testLeadingColon() {
unsupportedNameTest(":foo", RepositoryException.class);
}
// TODO: questionable exception
@Test
public void testEnclosedUnmappedNsColon() {
unsupportedNameTest(unmappedNsPrefix + ":bar", RepositoryException.class);
}
// TODO seems to be a bug
@Test
public void testEmptyNameInCurlys() throws RepositoryException {
Node n = nameTest("{}foo");
assertEquals("foo", n.getName());
}
@Test
public void testSingleEnclosedOpenCurly() {
nameTest("foo{bar");
}
@Test
public void testSingleEnclosedCloseCurly() {
nameTest("foo}bar");
}
@Test
public void testValidLocalNameInCurlys() throws RepositoryException {
Node n = nameTest("{foo}bar");
assertEquals("{foo}bar", n.getName());
}
// TODO: questionable exception
@Test
public void testNonUriInCurlys() {
unsupportedNameTest("{/}bar", RepositoryException.class);
}
@Test
public void testValidNamespaceUriInCurlys() throws RepositoryException {
Node n = nameTest("{" + testNsUri + "}foo");
assertEquals(testPrefix + ":foo", n.getName());
}
// TODO: questionable exception
@Test
public void testValidNamespaceUriInCurlysWrongPlace() {
unsupportedNameTest("x{" + testNsUri + "}foo", RepositoryException.class);
}
// TODO: questionable exception
@Test
public void testValidNamespaceUriInCurlysNoLocalName() {
unsupportedNameTest("{" + testNsUri + "}", RepositoryException.class);
}
// TODO this should actually pass
@Test
public void testQualifiedNameWithUnmappedNsUri() {
String ns = "urn:uuid:" + UUID.randomUUID().toString();
unsupportedNameTest("{" + ns + "}foo", RepositoryException.class);
}
@Test
public void testEnclosedPercent() {
nameTest("foo%bar");
}
@Test
public void testEnclosedBlank() {
nameTest("foo bar");
}
@Test
public void testEnclosedTab() {
unsupportedNameTest("foo\tbar", RepositoryException.class);
}
@Test
public void testEnclosedLf() {
unsupportedNameTest("foo\nbar", RepositoryException.class);
}
@Test
public void testEnclosedCr() {
unsupportedNameTest("foo\rbar", RepositoryException.class);
}
@Test
public void testEnclosedNonXMLChars() {
// see OAK-7270
org.junit.Assume.assumeFalse(super.fixture.toString().toLowerCase().contains("rdb"));
// see https://www.w3.org/TR/xml/#NT-Char
for (int c = 0; c < 32; c++) {
if (!(c == 0x9 || c == 0xa || c == 0xd)) {
unsupportedNameTest("foo" + ((char) c) + "bar", RepositoryException.class);
}
}
}
@Test
public void testEnclosedNonBreakingSpace() {
nameTest("foo\u00a0bar");
}
@Test
public void testEnclosedIdeographicSpace() {
nameTest("foo\u3000bar");
}
@Test
public void testUnpairedHighSurrogateEnd() {
// see OAK-5506
org.junit.Assume.assumeFalse(super.fixture.toString().toLowerCase().contains("segment"));
org.junit.Assume.assumeFalse(super.fixture.toString().toLowerCase().contains("rdb"));
nameTest("foo" + SURROGATE_PAIR[0]);
}
@Test
public void testUnpairedLowSurrogateStart() {
// see OAK-5506
org.junit.Assume.assumeFalse(super.fixture.toString().toLowerCase().contains("segment"));
org.junit.Assume.assumeFalse(super.fixture.toString().toLowerCase().contains("rdb"));
nameTest(SURROGATE_PAIR[1] + "foo");
}
@Test
public void testUnpairedSurrogateInside() {
// see OAK-5506
org.junit.Assume.assumeFalse(super.fixture.toString().toLowerCase().contains("segment"));
// see OAK-7270
org.junit.Assume.assumeFalse(super.fixture.toString().toLowerCase().contains("rdb"));
nameTest("foo" + SURROGATE_PAIR[0] + "bar");
nameTest("foo" + SURROGATE_PAIR[1] + "bar");
}
@Test
public void testSurrogate() {
nameTest("foo" + new String(SURROGATE_PAIR));
}
private Node nameTest(String nodeName) {
try {
Node n = testNode.addNode(nodeName);
testNode.getSession().save();
Node p = testNode.getSession().getNode(n.getPath());
assertTrue("nodes should be the same", p.isSame(n));
assertEquals("paths should be equal", p.getPath(), n.getPath());
return p;
} catch (RepositoryException ex) {
fail(ex.getMessage());
return null;
}
}
private void unsupportedNameTest(String nodeName, Class<? extends RepositoryException> clazz) {
try {
testNode.addNode(nodeName);
testNode.getSession().save();
fail("should have failed with " + clazz);
}
catch (RepositoryException ex) {
assertTrue("should have failed with " + clazz + ", but got " + ex.getClass(), clazz.isAssignableFrom(ex.getClass()));
}
}
private Repository createRepository(NodeStoreFixture fixture) throws RepositoryException
{
NodeStore ns = null;
for (Map.Entry<NodeStoreFixture, NodeStore> e : STORES.entrySet()) {
if (e.getKey().getClass().equals(fixture.getClass())) {
ns = e.getValue();
}
}
if (ns == null) {
ns = createNodeStore(fixture);
STORES.put(fixture, ns);
}
return createRepository(ns);
}
} | apache-2.0 |
hgschmie/presto | presto-main/src/main/java/io/prestosql/connector/system/NodeSystemTable.java | 3955 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.connector.system;
import io.prestosql.metadata.AllNodes;
import io.prestosql.metadata.InternalNode;
import io.prestosql.metadata.InternalNodeManager;
import io.prestosql.metadata.NodeState;
import io.prestosql.spi.connector.ConnectorSession;
import io.prestosql.spi.connector.ConnectorTableMetadata;
import io.prestosql.spi.connector.ConnectorTransactionHandle;
import io.prestosql.spi.connector.InMemoryRecordSet;
import io.prestosql.spi.connector.InMemoryRecordSet.Builder;
import io.prestosql.spi.connector.RecordCursor;
import io.prestosql.spi.connector.SchemaTableName;
import io.prestosql.spi.connector.SystemTable;
import io.prestosql.spi.predicate.TupleDomain;
import javax.inject.Inject;
import java.util.Locale;
import java.util.Set;
import static io.prestosql.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder;
import static io.prestosql.metadata.NodeState.ACTIVE;
import static io.prestosql.metadata.NodeState.INACTIVE;
import static io.prestosql.metadata.NodeState.SHUTTING_DOWN;
import static io.prestosql.spi.connector.SystemTable.Distribution.SINGLE_COORDINATOR;
import static io.prestosql.spi.type.BooleanType.BOOLEAN;
import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType;
import static java.util.Objects.requireNonNull;
public class NodeSystemTable
implements SystemTable
{
public static final SchemaTableName NODES_TABLE_NAME = new SchemaTableName("runtime", "nodes");
public static final ConnectorTableMetadata NODES_TABLE = tableMetadataBuilder(NODES_TABLE_NAME)
.column("node_id", createUnboundedVarcharType())
.column("http_uri", createUnboundedVarcharType())
.column("node_version", createUnboundedVarcharType())
.column("coordinator", BOOLEAN)
.column("state", createUnboundedVarcharType())
.build();
private final InternalNodeManager nodeManager;
@Inject
public NodeSystemTable(InternalNodeManager nodeManager)
{
this.nodeManager = requireNonNull(nodeManager, "nodeManager is null");
}
@Override
public Distribution getDistribution()
{
return SINGLE_COORDINATOR;
}
@Override
public ConnectorTableMetadata getTableMetadata()
{
return NODES_TABLE;
}
@Override
public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, ConnectorSession session, TupleDomain<Integer> constraint)
{
Builder table = InMemoryRecordSet.builder(NODES_TABLE);
AllNodes allNodes = nodeManager.getAllNodes();
addRows(table, allNodes.getActiveNodes(), ACTIVE);
addRows(table, allNodes.getInactiveNodes(), INACTIVE);
addRows(table, allNodes.getShuttingDownNodes(), SHUTTING_DOWN);
return table.build().cursor();
}
private void addRows(Builder table, Set<InternalNode> nodes, NodeState state)
{
for (InternalNode node : nodes) {
table.addRow(node.getNodeIdentifier(), node.getInternalUri().toString(), getNodeVersion(node), isCoordinator(node), state.toString().toLowerCase(Locale.ENGLISH));
}
}
private static String getNodeVersion(InternalNode node)
{
return node.getNodeVersion().toString();
}
private boolean isCoordinator(InternalNode node)
{
return nodeManager.getCoordinators().contains(node);
}
}
| apache-2.0 |
liupugong/drools | drools-compiler/src/test/java/org/drools/compiler/integrationtests/StreamsTest.java | 34679 | /*
* Copyright 2007 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Created on Dec 14, 2007
*/
package org.drools.compiler.integrationtests;
import org.drools.compiler.CommonTestMethodBase;
import org.drools.compiler.StockTick;
import org.drools.compiler.StockTickInterface;
import org.drools.compiler.compiler.DroolsParserException;
import org.drools.core.ClockType;
import org.drools.core.SessionConfiguration;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.impl.KnowledgeBaseImpl;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.rule.EntryPointId;
import org.drools.core.spi.ObjectType;
import org.drools.core.time.impl.PseudoClockScheduler;
import org.junit.Assert;
import org.junit.Test;
import org.kie.api.KieBaseConfiguration;
import org.kie.api.conf.EventProcessingOption;
import org.kie.api.definition.type.FactType;
import org.kie.api.event.rule.AfterMatchFiredEvent;
import org.kie.api.event.rule.AgendaEventListener;
import org.kie.api.event.rule.MatchCreatedEvent;
import org.kie.api.event.rule.RuleRuntimeEventListener;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSessionConfiguration;
import org.kie.api.runtime.conf.ClockTypeOption;
import org.kie.api.runtime.rule.EntryPoint;
import org.kie.api.time.SessionClock;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.KnowledgeBaseFactory;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.io.ResourceFactory;
import org.kie.internal.runtime.StatefulKnowledgeSession;
import org.mockito.ArgumentCaptor;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.CoreMatchers.*;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
/**
* Tests related to the stream support features
*/
public class StreamsTest extends CommonTestMethodBase {
private KnowledgeBase loadKnowledgeBase( final String fileName ) throws IOException,
DroolsParserException,
Exception {
return loadKnowledgeBase( fileName,
KnowledgeBaseFactory.newKnowledgeBaseConfiguration() );
}
private KnowledgeBase loadKnowledgeBase( final String fileName,
KieBaseConfiguration kconf ) throws IOException,
DroolsParserException,
Exception {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add(ResourceFactory.newClassPathResource(fileName,
getClass()),
ResourceType.DRL);
if (kbuilder.hasErrors()) {
System.out.println( kbuilder.getErrors() );
return null;
}
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(kconf);
kbase.addKnowledgePackages(kbuilder.getKnowledgePackages());
return SerializationHelper.serializeObject(kbase);
//return kbase;
}
@Test(timeout=10000)
public void testEventAssertion() throws Exception {
// read in the source
KnowledgeBase kbase = loadKnowledgeBase("test_EntryPoint.drl");
//final RuleBase ruleBase = loadRuleBase( reader );
KieSessionConfiguration conf = new SessionConfiguration();
( (SessionConfiguration) conf ).setClockType( ClockType.PSEUDO_CLOCK );
StatefulKnowledgeSession session = kbase.newStatefulKnowledgeSession( conf,
null );
final List results = new ArrayList();
session.setGlobal("results",
results);
StockTickInterface tick1 = new StockTick(1,
"DROO",
50,
System.currentTimeMillis());
StockTickInterface tick2 = new StockTick(2,
"ACME",
10,
System.currentTimeMillis());
StockTickInterface tick3 = new StockTick(3,
"ACME",
10,
System.currentTimeMillis());
StockTickInterface tick4 = new StockTick(4,
"DROO",
50,
System.currentTimeMillis());
InternalFactHandle handle1 = (InternalFactHandle) session.insert(tick1);
InternalFactHandle handle2 = (InternalFactHandle) session.insert(tick2);
InternalFactHandle handle3 = (InternalFactHandle) session.insert(tick3);
InternalFactHandle handle4 = (InternalFactHandle) session.insert(tick4);
assertNotNull(handle1);
assertNotNull(handle2);
assertNotNull(handle3);
assertNotNull(handle4);
assertTrue(handle1.isEvent());
assertTrue(handle2.isEvent());
assertTrue(handle3.isEvent());
assertTrue(handle4.isEvent());
session.fireAllRules();
assertEquals(0,
results.size());
StockTickInterface tick5 = new StockTick(5,
"DROO",
50,
System.currentTimeMillis());
StockTickInterface tick6 = new StockTick(6,
"ACME",
10,
System.currentTimeMillis());
StockTickInterface tick7 = new StockTick(7,
"ACME",
15,
System.currentTimeMillis());
StockTickInterface tick8 = new StockTick(8,
"DROO",
50,
System.currentTimeMillis());
EntryPoint entry = session.getEntryPoint("StockStream");
InternalFactHandle handle5 = (InternalFactHandle) entry.insert(tick5);
InternalFactHandle handle6 = (InternalFactHandle) entry.insert(tick6);
InternalFactHandle handle7 = (InternalFactHandle) entry.insert(tick7);
InternalFactHandle handle8 = (InternalFactHandle) entry.insert(tick8);
assertNotNull(handle5);
assertNotNull(handle6);
assertNotNull(handle7);
assertNotNull(handle8);
assertTrue(handle5.isEvent());
assertTrue(handle6.isEvent());
assertTrue(handle7.isEvent());
assertTrue(handle8.isEvent());
session.fireAllRules();
assertEquals(1,
results.size());
assertSame(tick7,
results.get(0));
}
@Test//(timeout=10000)
public void testEntryPointReference() throws Exception {
// read in the source
KnowledgeBase kbase = loadKnowledgeBase("test_EntryPointReference.drl");
StatefulKnowledgeSession session = kbase.newStatefulKnowledgeSession();
final List<StockTick> results = new ArrayList<StockTick>();
session.setGlobal("results",
results);
StockTickInterface tick5 = new StockTick(5,
"DROO",
50,
System.currentTimeMillis());
StockTickInterface tick6 = new StockTick(6,
"ACME",
10,
System.currentTimeMillis());
StockTickInterface tick7 = new StockTick(7,
"ACME",
30,
System.currentTimeMillis());
StockTickInterface tick8 = new StockTick(8,
"DROO",
50,
System.currentTimeMillis());
EntryPoint entry = session.getEntryPoint("stream1");
InternalFactHandle handle5 = (InternalFactHandle) entry.insert(tick5);
InternalFactHandle handle6 = (InternalFactHandle) entry.insert(tick6);
InternalFactHandle handle7 = (InternalFactHandle) entry.insert(tick7);
InternalFactHandle handle8 = (InternalFactHandle) entry.insert(tick8);
assertNotNull(handle5);
assertNotNull(handle6);
assertNotNull(handle7);
assertNotNull(handle8);
assertTrue(handle5.isEvent());
assertTrue(handle6.isEvent());
assertTrue(handle7.isEvent());
assertTrue(handle8.isEvent());
session.fireAllRules();
assertEquals(1,
results.size());
assertSame(tick7,
results.get(0));
}
@Test(timeout=10000)
public void testModifyRetracOnEntryPointFacts() throws Exception {
// read in the source
KnowledgeBase kbase = loadKnowledgeBase("test_modifyRetractEntryPoint.drl");
StatefulKnowledgeSession session = kbase.newStatefulKnowledgeSession();
final List<? extends Number> results = new ArrayList<Number>();
session.setGlobal( "results",
results );
StockTickInterface tick5 = new StockTick( 5,
"DROO",
50,
System.currentTimeMillis() );
StockTickInterface tick6 = new StockTick( 6,
"ACME",
10,
System.currentTimeMillis() );
StockTickInterface tick7 = new StockTick( 7,
"ACME",
30,
System.currentTimeMillis() );
StockTickInterface tick8 = new StockTick( 8,
"DROO",
50,
System.currentTimeMillis() );
EntryPoint entry = session.getEntryPoint( "stream1" );
InternalFactHandle handle5 = (InternalFactHandle) entry.insert( tick5 );
InternalFactHandle handle6 = (InternalFactHandle) entry.insert( tick6 );
InternalFactHandle handle7 = (InternalFactHandle) entry.insert( tick7 );
InternalFactHandle handle8 = (InternalFactHandle) entry.insert( tick8 );
assertNotNull( handle5 );
assertNotNull( handle6 );
assertNotNull( handle7 );
assertNotNull( handle8 );
assertTrue( handle5.isEvent() );
assertTrue( handle6.isEvent() );
assertTrue( handle7.isEvent() );
assertTrue( handle8.isEvent() );
session.fireAllRules();
System.out.println(results);
assertEquals( 2,
results.size() );
assertEquals( 30,
( (Number) results.get( 0 ) ).intValue() );
assertEquals( 110,
( (Number) results.get( 1 ) ).intValue() );
// the 3 non-matched facts continue to exist in the entry point
assertEquals(3,
entry.getObjects().size());
// but no fact was inserted into the main session
assertEquals(0,
session.getObjects().size());
}
@Test //(timeout=10000)
public void testModifyOnEntryPointFacts() throws Exception {
String str = "package org.drools.compiler\n" +
"declare StockTick\n" +
" @role ( event )\n" +
"end\n" +
"rule R1 salience 100\n" +
" when\n" +
" $s1 : StockTick( company == 'RHT', price == 10 ) from entry-point ep1\n" +
" then\n" +
" StockTick s = $s1;\n" +
" modify( s ) { setPrice( 50 ) };\n" +
"end\n" +
"rule R2 salience 90\n" +
" when\n" +
" $s1 : StockTick( company == 'RHT', price == 10 ) from entry-point ep2\n" +
" then\n" +
" StockTick s = $s1;\n" +
" modify( s ) { setPrice( 50 ) };\n" +
"end\n" +
"rule R3 salience 80\n" +
" when\n" +
" $s1 : StockTick( company == 'RHT', price == 10 ) from entry-point ep3\n" +
" then\n" +
" StockTick s = $s1;\n" +
" modify( s ) { setPrice( 50 ) };\n" +
"end\n";
// read in the source
KnowledgeBase kbase = loadKnowledgeBaseFromString( (KieBaseConfiguration)null, str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.kie.api.event.rule.AgendaEventListener ael = mock(org.kie.api.event.rule.AgendaEventListener.class);
ksession.addEventListener(ael);
EntryPoint ep1 = ksession.getEntryPoint("ep1");
EntryPoint ep2 = ksession.getEntryPoint("ep2");
EntryPoint ep3 = ksession.getEntryPoint("ep3");
ep1.insert(new StockTick(1,
"RHT",
10,
1000));
ep2.insert(new StockTick(1,
"RHT",
10,
1000));
ep3.insert(new StockTick(1,
"RHT",
10,
1000));
int rulesFired = ksession.fireAllRules();
assertEquals(3,
rulesFired);
ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael,
times(3)).afterMatchFired(captor.capture());
List<org.kie.api.event.rule.AfterMatchFiredEvent> aafe = captor.getAllValues();
Assert.assertThat(aafe.get(0).getMatch().getRule().getName(),
is("R1"));
Assert.assertThat(aafe.get(1).getMatch().getRule().getName(),
is("R2"));
Assert.assertThat(aafe.get(2).getMatch().getRule().getName(),
is("R3"));
}
@Test(timeout=10000)
public void testEntryPointWithAccumulateAndMVEL() throws Exception {
String str = "package org.drools.compiler\n" +
"rule R1 dialect 'mvel'\n" +
" when\n" +
" $n : Number() from accumulate( \n" +
" StockTick() from entry-point ep1,\n" +
" count(1))" +
" then\n" +
"end\n";
// read in the source
KnowledgeBase kbase = loadKnowledgeBaseFromString( (KieBaseConfiguration)null, str );
StatefulKnowledgeSession ksession = createKnowledgeSession(kbase);
org.kie.api.event.rule.AgendaEventListener ael = mock(org.kie.api.event.rule.AgendaEventListener.class);
ksession.addEventListener(ael);
EntryPoint ep1 = ksession.getEntryPoint("ep1");
ep1.insert(new StockTick(1,
"RHT",
10,
1000));
int rulesFired = ksession.fireAllRules();
assertEquals(1,
rulesFired);
ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael,
times(1)).afterMatchFired(captor.capture());
List<org.kie.api.event.rule.AfterMatchFiredEvent> aafe = captor.getAllValues();
Assert.assertThat(aafe.get(0).getMatch().getRule().getName(),
is("R1"));
}
@Test(timeout=10000)
public void testGetEntryPointList() throws Exception {
// read in the source
KnowledgeBase kbase = loadKnowledgeBase("test_EntryPointReference.drl");
StatefulKnowledgeSession session = kbase.newStatefulKnowledgeSession();
EntryPoint def = session.getEntryPoint(EntryPointId.DEFAULT.getEntryPointId());
EntryPoint s1 = session.getEntryPoint("stream1");
EntryPoint s2 = session.getEntryPoint( "stream2" );
EntryPoint s3 = session.getEntryPoint( "stream3" );
Collection<? extends EntryPoint> eps = session.getEntryPoints();
assertEquals( 4,
eps.size() );
assertTrue(eps.contains(def));
assertTrue(eps.contains(s1));
assertTrue(eps.contains(s2));
assertTrue(eps.contains(s3));
}
@Test(timeout=10000)
public void testEventDoesNotExpireIfNotInPattern() throws Exception {
KieBaseConfiguration kconf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption(EventProcessingOption.STREAM);
KnowledgeBase kbase = loadKnowledgeBase("test_EventExpiration.drl",
kconf);
KieSessionConfiguration ksessionConfig = KnowledgeBaseFactory.newKnowledgeSessionConfiguration();
ksessionConfig.setOption(ClockTypeOption.get("pseudo"));
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(ksessionConfig,
null);
RuleRuntimeEventListener wml = mock(RuleRuntimeEventListener.class);
ksession.addEventListener(wml);
PseudoClockScheduler clock = (PseudoClockScheduler) ksession.<SessionClock> getSessionClock();
final StockTickInterface st1 = new StockTick(1,
"RHT",
100,
1000);
final StockTickInterface st2 = new StockTick(2,
"RHT",
100,
1000);
ksession.insert(st1);
ksession.insert(st2);
verify(wml,
times(2)).objectInserted(any(org.kie.api.event.rule.ObjectInsertedEvent.class));
assertThat(ksession.getObjects().size(),
equalTo(2));
assertThat((Collection<Object>) ksession.getObjects(),
hasItems((Object) st1, st2));
ksession.fireAllRules();
clock.advanceTime(3,
TimeUnit.SECONDS);
ksession.fireAllRules();
assertThat(ksession.getObjects().size(),
equalTo(0));
}
@Test(timeout=10000)
public void testEventExpirationSetToZero() throws Exception {
KieBaseConfiguration kconf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption(EventProcessingOption.STREAM);
KnowledgeBase kbase = loadKnowledgeBase("test_EventExpirationSetToZero.drl",
kconf);
KieSessionConfiguration ksessionConfig = KnowledgeBaseFactory.newKnowledgeSessionConfiguration();
ksessionConfig.setOption(ClockTypeOption.get("pseudo"));
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(ksessionConfig,
null);
RuleRuntimeEventListener wml = mock(RuleRuntimeEventListener.class);
ksession.addEventListener(wml);
AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
PseudoClockScheduler clock = (PseudoClockScheduler) ksession.<SessionClock> getSessionClock();
final StockTickInterface st1 = new StockTick(1,
"RHT",
100,
1000);
final StockTickInterface st2 = new StockTick(2,
"RHT",
100,
1000);
ksession.insert(st1);
ksession.insert(st2);
assertThat(ksession.fireAllRules(),
equalTo(2));
verify(wml,
times(2)).objectInserted(any(org.kie.api.event.rule.ObjectInsertedEvent.class));
verify(ael,
times(2)).matchCreated(any(MatchCreatedEvent.class));
assertThat(ksession.getObjects().size(),
equalTo(2));
assertThat((Collection<Object>) ksession.getObjects(),
hasItems((Object) st1,
st2));
clock.advanceTime(3,
TimeUnit.SECONDS);
ksession.fireAllRules();
assertThat(ksession.getObjects().size(),
equalTo(0));
}
@Test(timeout=10000)
public void testEventExpirationValue() throws Exception {
String drl1 = "package org.drools.pkg1\n" +
"import org.drools.compiler.StockTick\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"rule X\n" +
"when\n" +
" StockTick()\n" +
"then\n" +
"end\n";
String drl2 = "package org.drools.pkg2\n" +
"import org.drools.compiler.StockTick\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"rule X\n" +
"when\n" +
" StockTick()\n" +
"then\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add(ResourceFactory.newByteArrayResource(drl1.getBytes()),
ResourceType.DRL);
kbuilder.add(ResourceFactory.newByteArrayResource(drl2.getBytes()),
ResourceType.DRL);
assertFalse(kbuilder.getErrors().toString(),
kbuilder.hasErrors());
KieBaseConfiguration kconf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( EventProcessingOption.STREAM );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase( kconf );
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
List<ObjectTypeNode> otns = ( (KnowledgeBaseImpl) kbase ).getRete().getObjectTypeNodes();
ObjectType stot = new ClassObjectType( StockTick.class );
for (ObjectTypeNode otn : otns) {
if (otn.getObjectType().isAssignableFrom( stot )) {
assertEquals( -1,
otn.getExpirationOffset() );
}
}
}
@Test(timeout=10000)
public void testDeclaredEntryPoint() {
final String drl = "package org.jboss.qa.brms.declaredep\n" +
"declare entry-point UnusedEntryPoint\n" +
"end\n" +
"rule HelloWorld\n" +
" when\n" +
" String( ) from entry-point UsedEntryPoint\n" +
" then\n" +
" // consequences\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( (KieBaseConfiguration)null, drl );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
assertNotNull(ksession.getEntryPoint("UsedEntryPoint"));
assertNotNull(ksession.getEntryPoint("UnusedEntryPoint"));
ksession.dispose();
}
public void testWindowDeclaration() throws Exception {
String drl = "package org.drools.compiler\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"declare window RedHatTicks\n" +
" StockTick( company == 'RHT' )\n" +
" over window:length(5)\n" +
" from entry-point ticks\n" +
"end\n" +
"rule X\n" +
"when\n" +
" accumulate( $s : StockTick( price > 20 ) from window RedHatTicks,\n" +
" $sum : sum( $s.getPrice() ),\n" +
" $cnt : count( $s ) )\n" +
"then\n" +
"end\n";
KieBaseConfiguration kconf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption(EventProcessingOption.STREAM);
KnowledgeBase kbase = loadKnowledgeBaseFromString(kconf,
drl);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
EntryPoint ep = ksession.getEntryPoint("ticks");
ep.insert(new StockTick(1, "ACME", 20, 1000)); // not in the window
ep.insert(new StockTick(2, "RHT", 20, 1000)); // not > 20
ep.insert(new StockTick(3, "RHT", 30, 1000));
ep.insert(new StockTick(4, "ACME", 30, 1000)); // not in the window
ep.insert(new StockTick(5, "RHT", 25, 1000));
ep.insert(new StockTick(6, "ACME", 10, 1000)); // not in the window
ep.insert(new StockTick(7, "RHT", 10, 1000)); // not > 20
ep.insert(new StockTick(8, "RHT", 40, 1000));
ksession.fireAllRules();
ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael,
times(1)).afterMatchFired(captor.capture());
AfterMatchFiredEvent aafe = captor.getValue();
Assert.assertThat(((Number) aafe.getMatch().getDeclarationValue("$sum")).intValue(),
is(95));
Assert.assertThat(((Number) aafe.getMatch().getDeclarationValue("$cnt")).intValue(),
is(3));
}
@Test(timeout=10000)
public void testWindowDeclaration2() throws Exception {
String drl = "package org.drools.compiler\n" +
"declare Double\n" +
" @role(event)\n" +
"end\n" +
"declare window Streem\n" +
" Double() over window:length( 10 ) from entry-point data\n" +
"end\n" +
"rule \"See\"\n" +
"when\n" +
" $sum : Double() from accumulate (\n" +
" $d: Double()\n" +
" from window Streem,\n" +
" sum( $d )\n" +
" )\n" +
"then\n" +
"end";
KieBaseConfiguration kconf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption(EventProcessingOption.STREAM);
KnowledgeBase kbase = loadKnowledgeBaseFromString(kconf,
drl);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
EntryPoint ep = ksession.getEntryPoint("data");
ep.insert(Double.valueOf( 10 ));
ep.insert(Double.valueOf( 11 ));
ep.insert(Double.valueOf( 12 ));
ksession.fireAllRules();
ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael,
times(1)).afterMatchFired(captor.capture());
AfterMatchFiredEvent aafe = captor.getValue();
Assert.assertThat(((Number) aafe.getMatch().getDeclarationValue("$sum")).intValue(),
is(33));
}
@Test (timeout=10000)
public void testMultipleWindows() throws Exception {
String drl = "package org.drools.compiler\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"rule FaultsCoincide\n" +
"when\n" +
" f1 : StockTick( company == \"RHT\" ) over window:length( 1 )\n" +
" f2 : StockTick( company == \"JBW\" ) over window:length( 1 )\n" +
"then\n" +
"end";
KieBaseConfiguration kconf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption(EventProcessingOption.STREAM);
KnowledgeBase kbase = loadKnowledgeBaseFromString(kconf,
drl);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
StockTick st1 = new StockTick(1, "RHT", 10, 1000);
ksession.insert( st1 );
StockTick st2 = new StockTick(2, "JBW", 10, 1000);
ksession.insert( st2 );
ksession.fireAllRules();
ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael,
times(1)).afterMatchFired(captor.capture());
AfterMatchFiredEvent aafe = captor.getValue();
Assert.assertThat( (StockTick) aafe.getMatch().getDeclarationValue("f1"),
is(st1));
Assert.assertThat( (StockTick) aafe.getMatch().getDeclarationValue("f2"),
is(st2));
}
@Test(timeout=10000)
public void testWindowWithEntryPointCompilationError() {
String str = "import org.drools.compiler.Cheese;\n" +
"declare window X\n" +
" Cheese( type == \"gorgonzola\" ) over window:time(1m) from entry-point Z\n" +
"end\n" +
"rule R when\n" +
" $c : Cheese( price < 100 ) from window X\n" +
"then\n" +
" System.out.println($c);\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()),
ResourceType.DRL );
assertTrue( "Should have raised a compilation error as Cheese is not declared as an event.",
kbuilder.hasErrors() );
}
@Test(timeout=10000)
public void testAtomicActivationFiring() throws Exception {
// JBRULES-3383
String str = "package org.drools.compiler.test\n" +
"declare Event\n" +
" @role(event)\n" +
" name : String\n" +
"end\n" +
"declare Monitor\n" +
" @role(event)\n" +
" event : Event\n" +
" name : String\n" +
"end\n" +
"\n" +
"rule \"start monitoring\"\n" +
"when\n" +
" $e : Event( $in : name )\n" +
" not Monitor( name == $in )\n" +
"then\n" +
" Monitor m = new Monitor( $e, $in );\n" +
" insert( m );\n" +
"end\n" +
"\n" +
"rule \"stop monitoring\"\n" +
"timer( int: 1s )\n" +
"when\n" +
" $m : Monitor( $in : name )\n" +
" $e : Event( name == $in )\n" +
"then\n" +
" retract( $m );\n" +
" retract( $m.getEvent() );\n" +
"end\n" +
"rule \"halt\"\n" +
"salience -1\n" +
"when\n" +
" not Event( )\n" +
"then\n" +
" drools.halt();\n" +
"end\n";
KieBaseConfiguration kBaseConfig = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kBaseConfig.setOption(EventProcessingOption.STREAM);
KnowledgeBase kbase = loadKnowledgeBaseFromString(kBaseConfig, str);
StatefulKnowledgeSession ksession = createKnowledgeSession( kbase );
ksession.addEventListener(new org.kie.api.event.rule.DebugAgendaEventListener());
FactType eventType = kbase.getFactType("org.drools.compiler.test", "Event");
Object event = eventType.newInstance();
eventType.set(event, "name", "myName");
ksession.insert( event );
ksession.fireUntilHalt();
}
}
| apache-2.0 |
rpgoncalves/sw-comprehension | documents/assignment1/weka-3-7-13/weka-src/src/test/java/weka/classifiers/pmml/consumer/AbstractPMMLClassifierTest.java | 3112 | package weka.classifiers.pmml.consumer;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import junit.framework.TestCase;
import weka.classifiers.evaluation.EvaluationUtils;
import weka.classifiers.evaluation.Prediction;
import weka.core.Attribute;
import weka.core.Instances;
import weka.core.pmml.PMMLFactory;
import weka.core.pmml.PMMLModel;
public abstract class AbstractPMMLClassifierTest extends TestCase {
protected ArrayList<String> m_modelNames = new ArrayList<String>();
protected ArrayList<String> m_dataSetNames = new ArrayList<String>();
public AbstractPMMLClassifierTest(String name) {
super(name);
}
public Instances getData(String name) {
Instances elnino = null;
try {
elnino = new Instances(new BufferedReader(new InputStreamReader(
ClassLoader.getSystemResourceAsStream("weka/classifiers/pmml/data/"
+ name))));
} catch (Exception ex) {
ex.printStackTrace();
}
return elnino;
}
public PMMLClassifier getClassifier(String name) {
PMMLClassifier regression = null;
try {
PMMLModel model = PMMLFactory.getPMMLModel(new BufferedInputStream(
ClassLoader.getSystemResourceAsStream("weka/classifiers/pmml/data/"
+ name)));
regression = (PMMLClassifier) model;
} catch (Exception ex) {
ex.printStackTrace();
}
return regression;
}
public void testRegression() throws Exception {
PMMLClassifier classifier = null;
Instances testData = null;
EvaluationUtils evalUtils = null;
weka.test.Regression reg = new weka.test.Regression(this.getClass());
ArrayList<Prediction> predictions = null;
boolean success = false;
for (int i = 0; i < m_modelNames.size(); i++) {
classifier = getClassifier(m_modelNames.get(i));
testData = getData(m_dataSetNames.get(i));
evalUtils = new EvaluationUtils();
try {
String className = classifier.getMiningSchema().getFieldsAsInstances()
.classAttribute().name();
Attribute classAtt = testData.attribute(className);
testData.setClass(classAtt);
predictions = evalUtils.getTestPredictions(classifier, testData);
success = true;
String predsString = weka.classifiers.AbstractClassifierTest
.predictionsToString(predictions);
reg.println(predsString);
} catch (Exception ex) {
ex.printStackTrace();
String msg = ex.getMessage().toLowerCase();
if (msg.indexOf("not in classpath") > -1) {
return;
}
}
}
if (!success) {
fail("Problem during regression testing: no successful predictions generated");
}
try {
String diff = reg.diff();
if (diff == null) {
System.err.println("Warning: No reference available, creating.");
} else if (!diff.equals("")) {
fail("Regression test failed. Difference:\n" + diff);
}
} catch (java.io.IOException ex) {
fail("Problem during regression testing.\n" + ex);
}
}
}
| apache-2.0 |
da-baumann/dev | resources/lib/core/test/src/com/google/zxing/oned/EAN13BlackBox1TestCase.java | 1064 | /*
* Copyright 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.oned;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.common.AbstractBlackBoxTestCase;
/**
* @author Sean Owen
*/
public final class EAN13BlackBox1TestCase extends AbstractBlackBoxTestCase {
public EAN13BlackBox1TestCase() {
super("test/data/blackbox/ean13-1", new MultiFormatReader(), BarcodeFormat.EAN_13);
addTest(29, 32, 0.0f);
addTest(28, 32, 180.0f);
}
} | apache-2.0 |
nvoron23/presto | presto-base-jdbc/src/main/java/com/facebook/presto/plugin/jdbc/JdbcModule.java | 1554 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.google.inject.Binder;
import com.google.inject.Module;
import com.google.inject.Scopes;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcModule
implements Module
{
private final String connectorId;
public JdbcModule(String connectorId)
{
this.connectorId = checkNotNull(connectorId, "connector id is null");
}
@Override
public void configure(Binder binder)
{
binder.bind(JdbcConnectorId.class).toInstance(new JdbcConnectorId(connectorId));
binder.bind(JdbcMetadata.class).in(Scopes.SINGLETON);
binder.bind(JdbcSplitManager.class).in(Scopes.SINGLETON);
binder.bind(JdbcRecordSetProvider.class).in(Scopes.SINGLETON);
binder.bind(JdbcHandleResolver.class).in(Scopes.SINGLETON);
binder.bind(JdbcRecordSinkProvider.class).in(Scopes.SINGLETON);
binder.bind(JdbcConnector.class).in(Scopes.SINGLETON);
}
}
| apache-2.0 |
smmribeiro/intellij-community | platform/platform-util-netty/src/org/jetbrains/io/MessageDecoder.java | 4424 | // Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.io;
import com.intellij.util.text.CharSequenceBackedByChars;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.nio.CharBuffer;
import java.nio.charset.StandardCharsets;
public abstract class MessageDecoder extends Decoder {
protected int contentLength;
protected final StringBuilder builder = new StringBuilder(64);
private CharBuffer chunkedContent;
private int consumedContentByteCount = 0;
protected final int parseContentLength() {
return parseInt(builder, 0, false, 10);
}
protected final @Nullable CharSequence readChars(@NotNull ByteBuf input) {
int readableBytes = input.readableBytes();
if (readableBytes == 0) {
input.release();
return null;
}
int required = contentLength - consumedContentByteCount;
if (readableBytes < required) {
if (chunkedContent == null) {
chunkedContent = CharBuffer.allocate(contentLength);
}
BufferToCharsKt.readIntoCharBuffer(input, readableBytes, chunkedContent);
consumedContentByteCount += readableBytes;
input.release();
return null;
}
else {
CharBuffer charBuffer = chunkedContent;
CharSequence result;
if (charBuffer != null) {
chunkedContent = null;
consumedContentByteCount = 0;
BufferToCharsKt.readIntoCharBuffer(input, required, charBuffer);
result = new CharSequenceBackedByChars(charBuffer);
}
else {
result = input.toString(input.readerIndex(), required, StandardCharsets.UTF_8);
}
input.readerIndex(input.readerIndex() + required);
return result;
}
}
@Override
public void channelInactive(ChannelHandlerContext context) throws Exception {
try {
chunkedContent = null;
}
finally {
super.channelInactive(context);
}
}
public static boolean readUntil(char what, @NotNull ByteBuf buffer, @NotNull StringBuilder builder) {
int i = buffer.readerIndex();
for (int n = buffer.writerIndex(); i < n; i++) {
char c = (char)buffer.getByte(i);
if (c == what) {
buffer.readerIndex(i + 1);
return true;
}
else {
builder.append(c);
}
}
buffer.readerIndex(i);
return false;
}
public static void skipWhitespace(@NotNull ByteBuf buffer) {
int i = buffer.readerIndex();
int n = buffer.writerIndex();
for (; i < n; i++) {
char c = (char)buffer.getByte(i);
if (c != ' ') {
buffer.readerIndex(i);
return;
}
}
buffer.readerIndex(n);
}
/**
* Javolution - Java(TM) Solution for Real-Time and Embedded Systems
* Copyright (C) 2006 - Javolution (http://javolution.org/)
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software is
* freely granted, provided that this notice is preserved.
*/
public static int parseInt(@NotNull CharSequence value, int start, boolean isNegative, int radix) {
final int end = value.length();
int result = 0; // Accumulates negatively (avoid MIN_VALUE overflow).
int i = start;
for (; i < end; i++) {
char c = value.charAt(i);
int digit = (c <= '9') ? c - '0'
: ((c <= 'Z') && (c >= 'A')) ? c - 'A' + 10
: ((c <= 'z') && (c >= 'a')) ? c - 'a' + 10 : -1;
if ((digit >= 0) && (digit < radix)) {
int newResult = result * radix - digit;
if (newResult > result) {
throw new NumberFormatException("Overflow parsing " + value.subSequence(start, end));
}
result = newResult;
}
else {
break;
}
}
// Requires one valid digit character and checks for opposite overflow.
if ((result == 0) && ((end == 0) || (value.charAt(i - 1) != '0'))) {
throw new NumberFormatException("Invalid integer representation for " + value.subSequence(start, end));
}
if ((result == Integer.MIN_VALUE) && !isNegative) {
throw new NumberFormatException("Overflow parsing " + value.subSequence(start, end));
}
return isNegative ? result : -result;
}
} | apache-2.0 |
eugene-chow/keycloak | services/src/main/java/org/keycloak/authentication/authenticators/resetcred/ResetOTP.java | 1533 | package org.keycloak.authentication.authenticators.resetcred;
import org.keycloak.authentication.AuthenticationFlowContext;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserCredentialModel;
import org.keycloak.models.UserModel;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class ResetOTP extends AbstractSetRequiredActionAuthenticator {
public static final String PROVIDER_ID = "reset-otp";
@Override
public void authenticate(AuthenticationFlowContext context) {
if (context.getExecution().isRequired() ||
(context.getExecution().isOptional() &&
configuredFor(context))) {
context.getClientSession().addRequiredAction(UserModel.RequiredAction.CONFIGURE_TOTP);
}
context.success();
}
protected boolean configuredFor(AuthenticationFlowContext context) {
return context.getSession().users().configuredForCredentialType(context.getRealm().getOTPPolicy().getType(), context.getRealm(), context.getUser());
}
@Override
public String getDisplayType() {
return "Reset OTP";
}
@Override
public String getHelpText() {
return "Sets the Configure OTP required action if execution is REQUIRED. Will also set it if execution is OPTIONAL and the OTP is currently configured for it.";
}
@Override
public String getId() {
return PROVIDER_ID;
}
}
| apache-2.0 |
consulo/consulo-properties | src/main/java/com/intellij/lang/properties/editor/PropertiesGroupingStructureViewModel.java | 879 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.lang.properties.editor;
import com.intellij.ide.structureView.StructureViewModel;
/**
* @author max
*/
public interface PropertiesGroupingStructureViewModel extends StructureViewModel {
void setSeparator(String separator);
String getSeparator();
}
| apache-2.0 |
Darsstar/framework | compatibility-client/src/main/java/com/vaadin/v7/client/widget/grid/events/ColumnResizeHandler.java | 1298 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.v7.client.widget.grid.events;
import com.google.gwt.event.shared.EventHandler;
/**
* Handler for a Grid column resize event, called when the Grid's columns has
* been resized.
*
* @param <T>
* The row type of the grid. The row type is the POJO type from where
* the data is retrieved into the column cells.
* @since 7.6
* @author Vaadin Ltd
*/
public interface ColumnResizeHandler<T> extends EventHandler {
/**
* A column resize event, fired by Grid when the columns of the Grid have
* been resized.
*
* @param event
* column resize event
*/
public void onColumnResize(ColumnResizeEvent<T> event);
}
| apache-2.0 |
prepilef/DC-UFSCar-ES2-201701-Grupo595136 | src/main/java/org/jabref/logic/importer/fetcher/MedlineFetcher.java | 9015 | package org.jabref.logic.importer.fetcher;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.jabref.logic.formatter.bibtexfields.ClearFormatter;
import org.jabref.logic.formatter.bibtexfields.NormalizeMonthFormatter;
import org.jabref.logic.help.HelpFile;
import org.jabref.logic.importer.FetcherException;
import org.jabref.logic.importer.IdBasedParserFetcher;
import org.jabref.logic.importer.Parser;
import org.jabref.logic.importer.ParserResult;
import org.jabref.logic.importer.SearchBasedFetcher;
import org.jabref.logic.importer.fileformat.MedlineImporter;
import org.jabref.logic.l10n.Localization;
import org.jabref.model.cleanup.FieldFormatterCleanup;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.FieldName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.client.utils.URIBuilder;
/**
* Fetch or search from PubMed <a href="http://www.ncbi.nlm.nih.gov/sites/entrez/">www.ncbi.nlm.nih.gov</a>
* The MedlineFetcher fetches the entries from the PubMed database.
* See <a href="http://help.jabref.org/en/MedlineRIS">help.jabref.org</a> for a detailed documentation of the available fields.
*/
public class MedlineFetcher implements IdBasedParserFetcher, SearchBasedFetcher {
private static final Log LOGGER = LogFactory.getLog(MedlineFetcher.class);
private static final int NUMBER_TO_FETCH = 50;
private static final String ID_URL = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi";
private static final String SEARCH_URL = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi";
private int numberOfResultsFound;
/**
* Replaces all commas in a given string with " AND "
*
* @param query input to remove commas
* @return input without commas
*/
private static String replaceCommaWithAND(String query) {
return query.replaceAll(", ", " AND ").replaceAll(",", " AND ");
}
/**
* When using 'esearch.fcgi?db=<database>&term=<query>' we will get a list of IDs matching the query.
* Input: Any text query (&term)
* Output: List of UIDs matching the query
*
* @see <a href="https://www.ncbi.nlm.nih.gov/books/NBK25500/">www.ncbi.nlm.nih.gov/books/NBK25500/</a>
*/
private List<String> getPubMedIdsFromQuery(String query) throws FetcherException {
boolean fetchIDs = false;
boolean firstOccurrenceOfCount = false;
List<String> idList = new ArrayList<>();
try {
URL ncbi = createSearchUrl(query);
XMLInputFactory inputFactory = XMLInputFactory.newFactory();
XMLStreamReader streamReader = inputFactory.createXMLStreamReader(ncbi.openStream());
fetchLoop: while (streamReader.hasNext()) {
int event = streamReader.getEventType();
switch (event) {
case XMLStreamConstants.START_ELEMENT:
if (streamReader.getName().toString().equals("Count")) {
firstOccurrenceOfCount = true;
}
if (streamReader.getName().toString().equals("IdList")) {
fetchIDs = true;
}
break;
case XMLStreamConstants.CHARACTERS:
if (firstOccurrenceOfCount) {
numberOfResultsFound = Integer.parseInt(streamReader.getText());
firstOccurrenceOfCount = false;
}
if (fetchIDs) {
idList.add(streamReader.getText());
}
break;
case XMLStreamConstants.END_ELEMENT:
//Everything relevant is listed before the IdList. So we break the loop right after the IdList tag closes.
if (streamReader.getName().toString().equals("IdList")) {
break fetchLoop;
}
}
streamReader.next();
}
streamReader.close();
return idList;
} catch (IOException | URISyntaxException e) {
throw new FetcherException("Unable to get PubMed IDs", Localization.lang("Unable to get PubMed IDs"), e);
} catch (XMLStreamException e) {
throw new FetcherException("Error while parsing ID list", Localization.lang("Error while parsing ID list"),
e);
}
}
@Override
public String getName() {
return "Medline/PubMed";
}
@Override
public HelpFile getHelpPage() {
return HelpFile.FETCHER_MEDLINE;
}
@Override
public URL getURLForID(String identifier) throws URISyntaxException, MalformedURLException, FetcherException {
URIBuilder uriBuilder = new URIBuilder(ID_URL);
uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("retmode", "xml");
uriBuilder.addParameter("id", identifier);
return uriBuilder.build().toURL();
}
@Override
public Parser getParser() {
return new MedlineImporter();
}
@Override
public void doPostCleanup(BibEntry entry) {
new FieldFormatterCleanup("journal-abbreviation", new ClearFormatter()).cleanup(entry);
new FieldFormatterCleanup("status", new ClearFormatter()).cleanup(entry);
new FieldFormatterCleanup("copyright", new ClearFormatter()).cleanup(entry);
new FieldFormatterCleanup(FieldName.MONTH, new NormalizeMonthFormatter()).cleanup(entry);
}
@Override
public List<BibEntry> performSearch(String query) throws FetcherException {
List<BibEntry> entryList = new LinkedList<>();
if (query.isEmpty()) {
return Collections.emptyList();
} else {
String searchTerm = replaceCommaWithAND(query);
//searching for pubmed ids matching the query
List<String> idList = getPubMedIdsFromQuery(searchTerm);
if (idList.isEmpty()) {
LOGGER.info("No results found.");
return Collections.emptyList();
}
if (numberOfResultsFound > NUMBER_TO_FETCH) {
LOGGER.info(
numberOfResultsFound + " results found. Only 50 relevant results will be fetched by default.");
}
//pass the list of ids to fetchMedline to download them. like a id fetcher for mutliple ids
entryList = fetchMedline(idList);
return entryList;
}
}
private URL createSearchUrl(String term) throws URISyntaxException, MalformedURLException {
term = replaceCommaWithAND(term);
URIBuilder uriBuilder = new URIBuilder(SEARCH_URL);
uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("sort", "relevance");
uriBuilder.addParameter("retmax", String.valueOf(NUMBER_TO_FETCH));
uriBuilder.addParameter("term", term);
return uriBuilder.build().toURL();
}
/**
* Fetch and parse an medline item from eutils.ncbi.nlm.nih.gov.
* The E-utilities generate a huge XML file containing all entries for the ids
*
* @param ids A list of IDs to search for.
* @return Will return an empty list on error.
*/
private List<BibEntry> fetchMedline(List<String> ids) throws FetcherException {
try {
//Separate the IDs with a comma to search multiple entries
URL fetchURL = getURLForID(String.join(",", ids));
URLConnection data = fetchURL.openConnection();
ParserResult result = new MedlineImporter().importDatabase(
new BufferedReader(new InputStreamReader(data.getInputStream(), StandardCharsets.UTF_8)));
if (result.hasWarnings()) {
LOGGER.warn(result.getErrorMessage());
}
List<BibEntry> resultList = result.getDatabase().getEntries();
resultList.forEach(this::doPostCleanup);
return resultList;
} catch (URISyntaxException | MalformedURLException e) {
throw new FetcherException("Error while generating fetch URL",
Localization.lang("Error while generating fetch URL"), e);
} catch (IOException e) {
throw new FetcherException("Error while fetching from Medline",
Localization.lang("Error while fetching from %0", "Medline"), e);
}
}
}
| mit |
TeamDDG/AvicusScrimmage | src/main/java/in/twizmwaz/cardinal/module/modules/matchTimer/MatchTimer.java | 1793 | package in.twizmwaz.cardinal.module.modules.matchTimer;
import in.twizmwaz.cardinal.GameHandler;
import in.twizmwaz.cardinal.event.MatchEndEvent;
import in.twizmwaz.cardinal.event.MatchStartEvent;
import in.twizmwaz.cardinal.match.Match;
import in.twizmwaz.cardinal.match.MatchState;
import in.twizmwaz.cardinal.module.Module;
import org.bukkit.event.EventHandler;
import org.bukkit.event.HandlerList;
public class MatchTimer implements Module {
private long startTime;
private double endTime;
protected MatchTimer() {
this.endTime = 0;
}
public static double getTimeInSeconds() {
Match match = GameHandler.getGameHandler().getMatch();
if (match.isRunning()) {
return ((double) System.currentTimeMillis() - (GameHandler.getGameHandler().getMatch().getModules().getModule(MatchTimer.class)).getTime()) / 1000.0;
}
if (match.getState().equals(MatchState.ENDED) || match.getState().equals(MatchState.CYCLING)) {
return GameHandler.getGameHandler().getMatch().getModules().getModule(MatchTimer.class).getEndTime();
}
return 0;
}
@Override
public void unload() {
HandlerList.unregisterAll(this);
}
@EventHandler
public void onMatchStart(MatchStartEvent event) {
this.startTime = System.currentTimeMillis();
}
@EventHandler
public void onMatchEnd(MatchEndEvent event) {
this.endTime = ((double) System.currentTimeMillis() - (GameHandler.getGameHandler().getMatch().getModules().getModule(MatchTimer.class)).getTime()) / 1000.0;
}
/**
* @return The current time stored in the module.
*/
public long getTime() {
return startTime;
}
public double getEndTime() {
return endTime;
}
}
| mit |
JoeHsiao/bioformats | components/forks/poi/src/loci/poi/poifs/filesystem/BATManaged.java | 2400 | /*
* #%L
* Fork of Apache Jakarta POI.
* %%
* Copyright (C) 2008 - 2015 Open Microscopy Environment:
* - Board of Regents of the University of Wisconsin-Madison
* - Glencoe Software, Inc.
* - University of Dundee
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package loci.poi.poifs.filesystem;
/**
* This interface defines behaviors for objects managed by the Block
* Allocation Table (BAT).
*
* @author Marc Johnson (mjohnson at apache dot org)
*/
public interface BATManaged
{
/**
* Return the number of BigBlock's this instance uses
*
* @return count of BigBlock instances
*/
public int countBlocks();
/**
* Set the start block for this instance
*
* @param index index into the array of BigBlock instances making
* up the the filesystem
*/
public void setStartBlock(final int index);
} // end public interface BATManaged
| gpl-2.0 |
robertoandrade/cyclos | src/nl/strohalm/cyclos/webservices/webshop/WebShopWebServiceImpl.java | 3468 | /*
This file is part of Cyclos (www.cyclos.org).
A project of the Social Trade Organisation (www.socialtrade.org).
Cyclos is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Cyclos is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Cyclos; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package nl.strohalm.cyclos.webservices.webshop;
import javax.jws.WebService;
import javax.servlet.http.HttpServletRequest;
import nl.strohalm.cyclos.entities.accounts.transactions.WebShopTicket;
import nl.strohalm.cyclos.entities.members.Member;
import nl.strohalm.cyclos.services.transactions.TicketServiceLocal;
import nl.strohalm.cyclos.webservices.WebServiceContext;
import nl.strohalm.cyclos.webservices.model.WebShopTicketVO;
import nl.strohalm.cyclos.webservices.utils.TicketHelper;
import nl.strohalm.cyclos.webservices.utils.WebServiceHelper;
/**
* Webshop web service implementation
* @author luis
*/
@WebService(name = "webshop", serviceName = "webshop")
public class WebShopWebServiceImpl implements WebShopWebService {
private TicketServiceLocal ticketServiceLocal;
private TicketHelper ticketHelper;
private WebServiceHelper webServiceHelper;
@Override
public String generate(final GenerateWebShopTicketParams params) {
try {
final HttpServletRequest request = WebServiceContext.getRequest();
// the toTicket() checks the member restriction too
final WebShopTicket ticket = ticketHelper.toTicket(params);
ticket.setMemberAddress(request.getRemoteAddr());
final WebShopTicket object = ticketServiceLocal.generate(ticket);
return object.getTicket();
} catch (final Exception e) {
webServiceHelper.error(e);
return null;
}
}
@Override
public WebShopTicketVO get(final String ticket) {
try {
final WebShopTicket object = (WebShopTicket) ticketServiceLocal.load(ticket);
// Check the member restriction
final Member restricted = WebServiceContext.getMember();
if (restricted != null && !restricted.equals(object.getTo())) {
throw new IllegalArgumentException("Error getting webshop ticket: the target member ('to') of the webshop ticket is not the restricted one");
}
return ticketHelper.toVO(object, WebServiceContext.getChannel().getPrincipalCustomFields());
} catch (final Exception e) {
webServiceHelper.error(e);
return null;
}
}
public void setTicketHelper(final TicketHelper ticketHelper) {
this.ticketHelper = ticketHelper;
}
public void setTicketServiceLocal(final TicketServiceLocal ticketService) {
ticketServiceLocal = ticketService;
}
public void setWebServiceHelper(final WebServiceHelper webServiceHelper) {
this.webServiceHelper = webServiceHelper;
}
}
| gpl-2.0 |
robertoandrade/cyclos | src/nl/strohalm/cyclos/controls/members/messages/SendMessageAction.java | 17781 | /*
This file is part of Cyclos (www.cyclos.org).
A project of the Social Trade Organisation (www.socialtrade.org).
Cyclos is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Cyclos is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Cyclos; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package nl.strohalm.cyclos.controls.members.messages;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import nl.strohalm.cyclos.access.AdminMemberPermission;
import nl.strohalm.cyclos.access.BrokerPermission;
import nl.strohalm.cyclos.access.MemberPermission;
import nl.strohalm.cyclos.access.OperatorPermission;
import nl.strohalm.cyclos.annotations.Inject;
import nl.strohalm.cyclos.controls.ActionContext;
import nl.strohalm.cyclos.controls.BaseFormAction;
import nl.strohalm.cyclos.entities.groups.Group;
import nl.strohalm.cyclos.entities.groups.GroupQuery;
import nl.strohalm.cyclos.entities.groups.MemberGroup;
import nl.strohalm.cyclos.entities.members.Element;
import nl.strohalm.cyclos.entities.members.Member;
import nl.strohalm.cyclos.entities.members.messages.Message;
import nl.strohalm.cyclos.entities.members.messages.Message.Type;
import nl.strohalm.cyclos.entities.members.messages.MessageCategory;
import nl.strohalm.cyclos.entities.members.messages.MessageCategoryQuery;
import nl.strohalm.cyclos.entities.members.preferences.NotificationPreference;
import nl.strohalm.cyclos.entities.settings.LocalSettings;
import nl.strohalm.cyclos.exceptions.PermissionDeniedException;
import nl.strohalm.cyclos.services.elements.MessageCategoryService;
import nl.strohalm.cyclos.services.elements.MessageService;
import nl.strohalm.cyclos.services.elements.SendDirectMessageToMemberDTO;
import nl.strohalm.cyclos.services.elements.SendMessageDTO;
import nl.strohalm.cyclos.services.elements.SendMessageFromBrokerToMembersDTO;
import nl.strohalm.cyclos.services.elements.SendMessageToAdminDTO;
import nl.strohalm.cyclos.services.elements.SendMessageToGroupDTO;
import nl.strohalm.cyclos.services.elements.exceptions.MemberWontReceiveNotificationException;
import nl.strohalm.cyclos.services.preferences.PreferenceService;
import nl.strohalm.cyclos.utils.ActionHelper;
import nl.strohalm.cyclos.utils.TextFormat;
import nl.strohalm.cyclos.utils.binding.BeanBinder;
import nl.strohalm.cyclos.utils.binding.DataBinder;
import nl.strohalm.cyclos.utils.binding.PropertyBinder;
import nl.strohalm.cyclos.utils.binding.SimpleCollectionBinder;
import nl.strohalm.cyclos.utils.conversion.CoercionHelper;
import nl.strohalm.cyclos.utils.conversion.HtmlConverter;
import nl.strohalm.cyclos.utils.conversion.StringTrimmerConverter;
import nl.strohalm.cyclos.utils.transaction.CurrentTransactionData;
import nl.strohalm.cyclos.utils.validation.ValidationException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.WordUtils;
import org.apache.struts.action.ActionForward;
/**
* Action used to send a message
* @author luis
*/
public class SendMessageAction extends BaseFormAction {
/**
* An enum indicating where to send a message
* @author luis
*/
public static enum SendTo {
MEMBER, ADMIN, GROUP, BROKERED_MEMBERS
}
private static final int WRAP_SIZE = 50;
private MessageService messageService;
private MessageCategoryService messageCategoryService;
private PreferenceService preferenceService;
private Map<Class<? extends SendMessageDTO>, DataBinder<? extends SendMessageDTO>> dataBindersByType;
@Inject
public void setMessageCategoryService(final MessageCategoryService messageCategoryService) {
this.messageCategoryService = messageCategoryService;
}
@Inject
public void setMessageService(final MessageService messageService) {
this.messageService = messageService;
}
@Inject
public void setPreferenceService(final PreferenceService preferenceService) {
this.preferenceService = preferenceService;
}
@Override
protected ActionForward handleSubmit(final ActionContext context) throws Exception {
final SendMessageForm form = context.getForm();
final long toMemberId = form.getToMemberId();
// Send the message
final SendMessageDTO dto = resolveDTO(context);
// Call the correct service method
try {
String key = "message.sent";
messageService.send(dto);
if (dto instanceof SendDirectMessageToMemberDTO) {
final SendDirectMessageToMemberDTO sendDirectMessageToMemberDTO = (SendDirectMessageToMemberDTO) dto;
Type type = null;
if (context.isAdmin()) {
type = Message.Type.FROM_ADMIN_TO_MEMBER;
} else {
type = Message.Type.FROM_MEMBER;
}
if (CurrentTransactionData.hasMailError()) {
final Member member = sendDirectMessageToMemberDTO.getToMember();
final NotificationPreference np = preferenceService.load(member, type);
if (np.isMessage()) {
key = "message.warning.messageNotReceivedByEmail";
} else {
return context.sendError("message.error.emailNotSent");
}
}
}
context.sendMessage(key);
} catch (final MemberWontReceiveNotificationException e) {
return context.sendError("message.error.memberWontReceiveNotification");
}
// Go back to the correct location
if (dto.getInReplyTo() == null && toMemberId > 0L) {
return ActionHelper.redirectWithParam(context.getRequest(), context.findForward("backToProfile"), "memberId", toMemberId);
}
return context.findForward("backToList");
}
@Override
protected void prepareForm(final ActionContext context) throws Exception {
final SendMessageForm form = context.getForm();
final HttpServletRequest request = context.getRequest();
final Member toMember = resolveToMember(context);
final Message inReplyTo = resolveInReplyTo(context);
if (toMember == null) {
final List<SendTo> sendTo = new ArrayList<SendTo>();
if (context.isAdmin()) {
// An admin may send to a group, so, we must get the groups
if (inReplyTo == null) {
final GroupQuery gq = new GroupQuery();
gq.setNatures(Group.Nature.MEMBER, Group.Nature.BROKER);
gq.setStatus(Group.Status.NORMAL);
request.setAttribute("groups", groupService.search(gq));
if (permissionService.hasPermission(AdminMemberPermission.MESSAGES_SEND_TO_MEMBER)) {
sendTo.add(SendTo.MEMBER);
}
if (permissionService.hasPermission(AdminMemberPermission.MESSAGES_SEND_TO_GROUP)) {
sendTo.add(SendTo.GROUP);
}
}
} else {
if (form.isToBrokeredMembers()) {
if (context.isBroker() && permissionService.hasPermission(BrokerPermission.MESSAGES_SEND_TO_MEMBERS)) {
sendTo.add(SendTo.BROKERED_MEMBERS);
request.setAttribute("toBrokeredMembers", SendTo.BROKERED_MEMBERS);
}
} else if (inReplyTo == null) {
if (context.isMember() && permissionService.hasPermission(MemberPermission.MESSAGES_SEND_TO_MEMBER) || context.isOperator() && permissionService.hasPermission(OperatorPermission.MESSAGES_SEND_TO_MEMBER)) {
sendTo.add(SendTo.MEMBER);
}
if (context.isBroker() && permissionService.hasPermission(BrokerPermission.MESSAGES_SEND_TO_MEMBERS)) {
sendTo.add(SendTo.BROKERED_MEMBERS);
}
// A member may send to admin, so we must get the categories
final MessageCategoryQuery query = new MessageCategoryQuery();
query.setFromElement((Member) context.getAccountOwner());
final List<MessageCategory> categories = messageCategoryService.search(query);
request.setAttribute("categories", categories);
if (CollectionUtils.isNotEmpty(categories) && (context.isMember() && permissionService.hasPermission(MemberPermission.MESSAGES_SEND_TO_ADMINISTRATION) || context.isOperator() && permissionService.hasPermission(OperatorPermission.MESSAGES_SEND_TO_ADMINISTRATION))) {
sendTo.add(SendTo.ADMIN);
}
}
}
if (inReplyTo == null && CollectionUtils.isEmpty(sendTo)) {
throw new PermissionDeniedException();
}
request.setAttribute("sendTo", sendTo);
} else {
final MessageCategoryQuery query = new MessageCategoryQuery();
query.setFromElement((Element) (context.isOperator() ? context.getAccountOwner() : context.getElement()));
query.setToElement(toMember);
request.setAttribute("categories", messageCategoryService.search(query));
}
// Message reply
final LocalSettings localSettings = settingsService.getLocalSettings();
TextFormat messageFormat = localSettings.getMessageFormat();
if (inReplyTo != null) {
form.setMessage("subject", context.message("message.reply.subject", inReplyTo.getSubject()));
String body;
if (inReplyTo.isHtml()) {
body = "<br><br><div style='padding-left:40px;border-left:1px solid black'>" + inReplyTo.getBody() + "</div>";
messageFormat = TextFormat.RICH;
} else {
body = " \n\n> " + StringUtils.replace(WordUtils.wrap(inReplyTo.getBody(), WRAP_SIZE), "\n", "\n> ");
messageFormat = TextFormat.PLAIN;
}
request.setAttribute("body", body);
form.setMessage("html", inReplyTo.isHtml());
if (inReplyTo.getCategory() != null) {
form.setMessage("category", inReplyTo.getCategory().getId());
if (inReplyTo.getToMember() != null) {
// Reply to a member
request.setAttribute("categoryName", inReplyTo.getCategory().getName());
request.setAttribute("categoryEditable", false);
} else {
// Reply to administration
final MessageCategoryQuery query = new MessageCategoryQuery();
query.setFromElement((Element) (context.isOperator() ? context.getAccountOwner() : context.getElement()));
request.setAttribute("categories", messageCategoryService.search(query));
request.setAttribute("categoryId", inReplyTo.getCategory().getId());
}
}
}
form.setMessage("html", messageFormat == TextFormat.RICH);
request.setAttribute("inReplyTo", inReplyTo);
request.setAttribute("toMember", toMember);
request.setAttribute("messageFormat", messageFormat);
}
@Override
protected void validateForm(final ActionContext context) {
final SendMessageDTO dto = resolveDTO(context);
messageService.validate(dto);
}
private <T extends SendMessageDTO> BeanBinder<T> basicDataBinderFor(final Class<T> type) {
final BeanBinder<T> binder = BeanBinder.instance(type);
// The body is not read here, as it can be either plain text or html
binder.registerBinder("category", PropertyBinder.instance(MessageCategory.class, "category"));
binder.registerBinder("subject", PropertyBinder.instance(String.class, "subject"));
binder.registerBinder("inReplyTo", PropertyBinder.instance(Message.class, "inReplyTo"));
binder.registerBinder("html", PropertyBinder.instance(Boolean.TYPE, "html"));
return binder;
}
@SuppressWarnings("unchecked")
private <T extends SendMessageDTO> DataBinder<T> getDataBinderFor(final Class<T> type) {
if (dataBindersByType == null) {
dataBindersByType = new HashMap<Class<? extends SendMessageDTO>, DataBinder<? extends SendMessageDTO>>();
final BeanBinder<SendDirectMessageToMemberDTO> toMemberBinder = basicDataBinderFor(SendDirectMessageToMemberDTO.class);
toMemberBinder.registerBinder("toMember", PropertyBinder.instance(Member.class, "toMember"));
dataBindersByType.put(SendDirectMessageToMemberDTO.class, toMemberBinder);
final BeanBinder<SendMessageToAdminDTO> toAdminBinder = basicDataBinderFor(SendMessageToAdminDTO.class);
dataBindersByType.put(SendMessageToAdminDTO.class, toAdminBinder);
final BeanBinder<SendMessageFromBrokerToMembersDTO> toBrokeredBinder = basicDataBinderFor(SendMessageFromBrokerToMembersDTO.class);
dataBindersByType.put(SendMessageFromBrokerToMembersDTO.class, toBrokeredBinder);
final BeanBinder<SendMessageToGroupDTO> toGroupBinder = basicDataBinderFor(SendMessageToGroupDTO.class);
toGroupBinder.registerBinder("toGroups", SimpleCollectionBinder.instance(MemberGroup.class, "toGroups"));
dataBindersByType.put(SendMessageToGroupDTO.class, toGroupBinder);
}
return (DataBinder<T>) dataBindersByType.get(type);
}
/**
* Resolve a send message dto
*/
private SendMessageDTO resolveDTO(final ActionContext context) {
final SendMessageForm form = context.getForm();
Class<? extends SendMessageDTO> dtoClass = null;
final SendTo sendTo = CoercionHelper.coerce(SendTo.class, form.getSendTo());
if (sendTo == null) {
throw new ValidationException();
}
// Test and validate who to send the message
switch (sendTo) {
case MEMBER:
dtoClass = SendDirectMessageToMemberDTO.class;
break;
case GROUP:
if (!context.isAdmin()) {
throw new ValidationException();
}
dtoClass = SendMessageToGroupDTO.class;
break;
case BROKERED_MEMBERS:
if (!context.isBroker()) {
throw new ValidationException();
}
dtoClass = SendMessageFromBrokerToMembersDTO.class;
break;
case ADMIN:
if (!(context.isMember() || context.isOperator())) {
throw new ValidationException();
}
dtoClass = SendMessageToAdminDTO.class;
break;
default:
throw new ValidationException();
}
final SendMessageDTO dto = getDataBinderFor(dtoClass).readFromString(form.getMessage());
if (dto.isHtml()) {
dto.setBody(HtmlConverter.instance().valueOf("" + form.getMessage("body")));
} else {
dto.setBody(StringTrimmerConverter.instance().valueOf("" + form.getMessage("body")));
}
return dto;
}
private Message resolveInReplyTo(final ActionContext context) {
final SendMessageForm form = context.getForm();
final long inReplyToId = form.getInReplyTo();
if (inReplyToId <= 0L) {
return null;
}
final Message inReplyTo = messageService.load(inReplyToId, Message.Relationships.TO_MEMBER);
if ((context.isAdmin() && inReplyTo.getToMember() != null) || (context.isMember() && !context.getAccountOwner().equals(inReplyTo.getToMember()))) {
throw new PermissionDeniedException();
}
return inReplyTo;
}
/**
* Resolve the member to send to, if any
*/
private Member resolveToMember(final ActionContext context) {
final SendMessageForm form = context.getForm();
final long toMemberId = form.getToMemberId();
Member toMember = null;
// Load the member to send to, if any
if (toMemberId > 0L) {
final Element loggedElement = (Element) (context.isOperator() ? context.getAccountOwner() : context.getElement());
// Cannot send to self
if (toMemberId == loggedElement.getId()) {
throw new ValidationException();
}
// Ensure a member
final Element element = elementService.load(toMemberId, Element.Relationships.USER);
if (!(element instanceof Member)) {
throw new ValidationException();
}
toMember = (Member) element;
}
return toMember;
}
}
| gpl-2.0 |
aosm/gccfast | libjava/java/awt/image/renderable/ContextualRenderedImageFactory.java | 2372 | /* ContextualRenderedImageFactory.java --
Copyright (C) 2002 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package java.awt.image.renderable;
import java.awt.geom.Rectangle2D;
import java.awt.image.RenderedImage;
/**
* STUBBED
*/
public interface ContextualRenderedImageFactory extends RenderedImageFactory
{
RenderContext mapRenderContext(int i, RenderContext context,
ParameterBlock block, RenderableImage image);
RenderedImage create(RenderContext context, ParameterBlock block);
Rectangle2D getBounds2D(ParameterBlock block);
Object getProperty(ParameterBlock block, String name);
String[] getPropertyNames();
boolean isDynamic();
} // interface ContextualRenderedImageFactory
| gpl-2.0 |
YouDiSN/OpenJDK-Research | jdk9/jaxp/src/java.xml/share/classes/com/sun/org/apache/xerces/internal/util/DOMEntityResolverWrapper.java | 6483 | /*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.util;
import com.sun.org.apache.xerces.internal.xni.XNIException;
import com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier;
import com.sun.org.apache.xerces.internal.xni.grammars.XMLGrammarDescription;
import com.sun.org.apache.xerces.internal.xni.parser.XMLEntityResolver;
import com.sun.org.apache.xerces.internal.xni.parser.XMLInputSource;
import org.w3c.dom.ls.LSResourceResolver;
import org.w3c.dom.ls.LSInput;
import java.io.InputStream;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
/**
* This class wraps DOM entity resolver to XNI entity resolver.
*
* @see LSResourceResolver
*
* @author Gopal Sharma, SUN MicroSystems Inc.
* @author Elena Litani, IBM
* @author Ramesh Mandava, Sun Microsystems
*/
public class DOMEntityResolverWrapper
implements XMLEntityResolver {
//
// Data
//
/** XML 1.0 type constant according to DOM L3 LS CR spec "http://www.w3.org/TR/2003/CR-DOM-Level-3-LS-20031107" */
private static final String XML_TYPE = "http://www.w3.org/TR/REC-xml";
/** XML Schema constant according to DOM L3 LS CR spec "http://www.w3.org/TR/2003/CR-DOM-Level-3-LS-20031107" */
private static final String XSD_TYPE = "http://www.w3.org/2001/XMLSchema";
/** The DOM entity resolver. */
protected LSResourceResolver fEntityResolver;
//
// Constructors
//
/** Default constructor. */
public DOMEntityResolverWrapper() {}
/** Wraps the specified DOM entity resolver. */
public DOMEntityResolverWrapper(LSResourceResolver entityResolver) {
setEntityResolver(entityResolver);
} // LSResourceResolver
//
// Public methods
//
/** Sets the DOM entity resolver. */
public void setEntityResolver(LSResourceResolver entityResolver) {
fEntityResolver = entityResolver;
} // setEntityResolver(LSResourceResolver)
/** Returns the DOM entity resolver. */
public LSResourceResolver getEntityResolver() {
return fEntityResolver;
} // getEntityResolver():LSResourceResolver
//
// XMLEntityResolver methods
//
/**
* Resolves an external parsed entity. If the entity cannot be
* resolved, this method should return null.
*
* @param resourceIdentifier description of the resource to be revsoved
* @throws XNIException Thrown on general error.
* @throws IOException Thrown if resolved entity stream cannot be
* opened or some other i/o error occurs.
*/
public XMLInputSource resolveEntity(XMLResourceIdentifier resourceIdentifier)
throws XNIException, IOException {
// resolve entity using DOM entity resolver
if (fEntityResolver != null) {
// For entity resolution the type of the resource would be XML TYPE
// DOM L3 LS spec mention only the XML 1.0 recommendation right now
LSInput inputSource =
resourceIdentifier == null
? fEntityResolver.resolveResource(
null,
null,
null,
null,
null)
: fEntityResolver.resolveResource(
getType(resourceIdentifier),
resourceIdentifier.getNamespace(),
resourceIdentifier.getPublicId(),
resourceIdentifier.getLiteralSystemId(),
resourceIdentifier.getBaseSystemId());
if (inputSource != null) {
String publicId = inputSource.getPublicId();
String systemId = inputSource.getSystemId();
String baseSystemId = inputSource.getBaseURI();
InputStream byteStream = inputSource.getByteStream();
Reader charStream = inputSource.getCharacterStream();
String encoding = inputSource.getEncoding();
String data = inputSource.getStringData();
/**
* An LSParser looks at inputs specified in LSInput in
* the following order: characterStream, byteStream,
* stringData, systemId, publicId.
*/
XMLInputSource xmlInputSource =
new XMLInputSource(publicId, systemId, baseSystemId, true);
if (charStream != null) {
xmlInputSource.setCharacterStream(charStream);
}
else if (byteStream != null) {
xmlInputSource.setByteStream((InputStream) byteStream);
}
else if (data != null && data.length() != 0) {
xmlInputSource.setCharacterStream(new StringReader(data));
}
xmlInputSource.setEncoding(encoding);
return xmlInputSource;
}
}
// unable to resolve entity
return null;
} // resolveEntity(String,String,String):XMLInputSource
/** Determines the type of resource being resolved **/
private String getType(XMLResourceIdentifier resourceIdentifier) {
if (resourceIdentifier instanceof XMLGrammarDescription) {
XMLGrammarDescription desc = (XMLGrammarDescription) resourceIdentifier;
if (XMLGrammarDescription.XML_SCHEMA.equals(desc.getGrammarType())) {
return XSD_TYPE;
}
}
return XML_TYPE;
} // getType(XMLResourceIdentifier):String
} // DOMEntityResolverWrapper
| gpl-2.0 |
wisdom-garden/dotcms | src/com/dotmarketing/quartz/job/CalendarReminderThread.java | 2172 | package com.dotmarketing.quartz.job;
import java.util.Date;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import com.dotmarketing.business.APILocator;
import com.dotmarketing.db.DbConnectionFactory;
import com.dotmarketing.db.HibernateUtil;
import com.dotmarketing.exception.DotHibernateException;
import com.dotmarketing.portlets.calendar.business.CalendarReminderAPI;
import com.dotmarketing.util.Logger;
/**
* Job implementation to run calendar reminder process
*
* @author Salvador Di Nardo
*/
public class CalendarReminderThread implements Job {
public CalendarReminderThread() {
}
/**
* Thread main method to start the calendar reminder process
*/
@SuppressWarnings("unchecked")
public void run() {
Logger.debug(this, "Running Calendar Reminder Job");
try {
HibernateUtil.startTransaction();
CalendarReminderAPI CRAI = APILocator.getCalendarReminderAPI();
Date now = new Date();
CRAI.sendCalendarRemainder(now);
Logger.debug(this,"The Calendar Reminder Job End successfully");
} catch (Exception e) {
Logger.warn(this, e.toString());
}
finally {
try {
HibernateUtil.commitTransaction();
} catch (Exception e) {
Logger.warn(this, e.toString());
}
}
}
/*
* (non-Javadoc)
*
* @see java.lang.Thread#destroy()
*/
public void destroy() {
}
/**
* Job main method to start Calendar Reminder process, this method call run()
* @param context JobExecutionContext.
* @exception JobExecutionException.
*/
public void execute(JobExecutionContext context) throws JobExecutionException {
Logger.debug(this, "Running CalendarReminderThread - " + new Date());
try {
run();
} catch (Exception e) {
Logger.warn(this, e.toString());
}
finally {
try {
HibernateUtil.closeSession();
} catch (DotHibernateException e) {
Logger.warn(this, e.getMessage(), e);
}
finally {
DbConnectionFactory.closeConnection();
}
}
}
}
| gpl-3.0 |
dentmaged/Bukkit | src/main/java/org/bukkit/event/player/PlayerInteractAtEntityEvent.java | 902 | package org.bukkit.event.player;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.event.HandlerList;
import org.bukkit.util.Vector;
/**
* Represents an event that is called when a player right clicks an entity
* with a location on the entity the was clicked.
*/
public class PlayerInteractAtEntityEvent extends PlayerInteractEntityEvent {
private static final HandlerList handlers = new HandlerList();
private final Vector position;
public PlayerInteractAtEntityEvent(Player who, Entity clickedEntity, Vector position) {
super(who, clickedEntity);
this.position = position;
}
public Vector getClickedPosition() {
return position.clone();
}
@Override
public HandlerList getHandlers() {
return handlers;
}
public static HandlerList getHandlerList() {
return handlers;
}
}
| gpl-3.0 |
Jenyay/tasks | src/main/java/org/tasks/sync/SyncExecutor.java | 1064 | package org.tasks.sync;
import com.todoroo.astrid.sync.SyncResultCallback;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import static java.util.concurrent.ThreadPoolExecutor.DiscardPolicy;
public class SyncExecutor {
private final ExecutorService executor = new ThreadPoolExecutor(
0, Integer.MAX_VALUE,
60L, TimeUnit.SECONDS,
new SynchronousQueue<Runnable>(),
new DiscardPolicy());
@Inject
public SyncExecutor() {
}
public void execute(final SyncResultCallback callback, final Runnable command) {
executor.execute(new Runnable() {
@Override
public void run() {
try {
command.run();
} catch (Exception e) {
executor.shutdownNow();
callback.finished();
}
}
});
}
}
| gpl-3.0 |
roskens/opennms-pre-github | opennms-config/src/main/java/org/opennms/netmgt/config/ActiondConfigFactory.java | 4826 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2002-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.config;
import java.io.File;
import java.io.IOException;
import org.opennms.core.utils.ConfigFileConstants;
import org.opennms.core.xml.JaxbUtils;
import org.opennms.netmgt.config.actiond.ActiondConfiguration;
import org.springframework.core.io.FileSystemResource;
/**
* This is the singleton class used to load the configuration for the OpenNMS
* Actiond from the actiond-configuration xml file.
*
* <strong>Note: </strong>Users of this class should make sure the
* <em>init()</em> is called before calling any other method to ensure the
* config is loaded before accessing other convenience methods.
*
* @author <a href="mailto:sowmya@opennms.org">Sowmya Nataraj </a>
* @author <a href="http://www.opennms.org/">OpenNMS </a>
*/
public final class ActiondConfigFactory {
/**
* The singleton instance of this factory
*/
private static ActiondConfigFactory m_singleton = null;
/**
* The config class loaded from the config file
*/
private ActiondConfiguration m_config;
/**
* This member is set to true if the configuration file has been loaded.
*/
private static boolean m_loaded = false;
/**
* Private constructor
*
* @exception java.io.IOException
* Thrown if the specified config file cannot be read
*/
private ActiondConfigFactory(final String configFile) throws IOException {
m_config = JaxbUtils.unmarshal(ActiondConfiguration.class, new FileSystemResource(configFile));
}
/**
* Load the config from the default config file and create the singleton
* instance of this factory.
*
* @exception java.io.IOException
* Thrown if the specified config file cannot be read
* @throws java.io.IOException if any.
*/
public static synchronized void init() throws IOException {
if (m_loaded) {
// init already called - return
// to reload, reload() will need to be called
return;
}
final File cfgFile = ConfigFileConstants.getFile(ConfigFileConstants.ACTIOND_CONFIG_FILE_NAME);
m_singleton = new ActiondConfigFactory(cfgFile.getPath());
m_loaded = true;
}
/**
* Reload the config from the default config file
*
* @exception java.io.IOException
* Thrown if the specified config file cannot be read/loaded
* @throws java.io.IOException if any.
*/
public static synchronized void reload() throws IOException {
m_singleton = null;
m_loaded = false;
init();
}
/**
* Return the singleton instance of this factory.
*
* @return The current factory instance.
* @throws java.lang.IllegalStateException
* Thrown if the factory has not yet been initialized.
*/
public static synchronized ActiondConfigFactory getInstance() {
if (!m_loaded)
throw new IllegalStateException("The factory has not been initialized");
return m_singleton;
}
/**
* Return the maximum time that can be taken by a process.
*
* @return the maximum time that can be taken by a process
*/
public synchronized long getMaxProcessTime() {
return m_config.getMaxProcessTime();
}
/**
* Return the maximum number of processes that run simultaneously.
*
* @return the maximum number of processes that run simultaneously
*/
public synchronized int getMaxOutstandingActions() {
return m_config.getMaxOutstandingActions();
}
}
| agpl-3.0 |
bhutchinson/kfs | kfs-core/src/main/java/org/kuali/kfs/gl/batch/CollectorXmlInputFileType.java | 8941 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.gl.batch;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.kuali.kfs.coa.businessobject.Account;
import org.kuali.kfs.coa.service.AccountService;
import org.kuali.kfs.gl.batch.service.CollectorHelperService;
import org.kuali.kfs.gl.batch.service.impl.OriginEntryTotals;
import org.kuali.kfs.gl.businessobject.CollectorDetail;
import org.kuali.kfs.gl.businessobject.OriginEntryFull;
import org.kuali.kfs.sys.KFSConstants;
import org.kuali.kfs.sys.KFSKeyConstants;
import org.kuali.kfs.sys.batch.XmlBatchInputFileTypeBase;
import org.kuali.kfs.sys.context.SpringContext;
import org.kuali.kfs.sys.exception.ParseException;
import org.kuali.rice.core.api.datetime.DateTimeService;
import org.kuali.rice.krad.util.GlobalVariables;
/**
* Batch input type for the collector job.
*/
public class CollectorXmlInputFileType extends XmlBatchInputFileTypeBase {
private static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(CollectorXmlInputFileType.class);
protected DateTimeService dateTimeService;
private CollectorHelperService collectorHelperService;
/**
* Returns the identifier of the Collector's file type
*
* @return the Collector's file type identifier
* @see org.kuali.kfs.sys.batch.BatchInputFileType#getFileTypeIdentifer()
*/
public String getFileTypeIdentifer() {
return KFSConstants.COLLECTOR_XML_FILE_TYPE_INDENTIFIER;
}
/**
* Builds the file name using the following construction: All collector files start with gl_idbilltrans_ append the chartorg
* from the batch header append the username of the user who is uploading the file then the user supplied indentifier finally
* the timestamp
*
* @param user who uploaded the file
* @param parsedFileContents represents collector batch object
* @param userIdentifier user identifier for user who uploaded file
* @return String returns file name using the convention mentioned in the description
*
* @see org.kuali.kfs.sys.batch.BatchInputFileType#getFileName(org.kuali.rice.kim.api.identity.Person, java.lang.Object,
* java.lang.String)
*/
public String getFileName(String principalName, Object parsedFileContents, String userIdentifier) {
// this implementation assumes that there is only one batch in the XML file
CollectorBatch collectorBatch = ((List<CollectorBatch>) parsedFileContents).get(0);
String fileName = "gl_collector_" + collectorBatch.getChartOfAccountsCode() + collectorBatch.getOrganizationCode();
fileName += "_" + principalName;
if (StringUtils.isNotBlank(userIdentifier)) {
fileName += "_" + userIdentifier;
}
fileName += "_" + dateTimeService.toDateTimeStringForFilename(dateTimeService.getCurrentDate());
// remove spaces in filename
fileName = StringUtils.remove(fileName, " ");
return fileName;
}
public boolean validate(Object parsedFileContents) {
List<CollectorBatch> parsedBatches = (List<CollectorBatch>) parsedFileContents;
boolean allBatchesValid = true;
// add validation for chartCode-accountNumber, as chartCode is not required in xsd due to accounts-cant-cross-charts option
AccountService acctserv = SpringContext.getBean(AccountService.class);
for (CollectorBatch batch : parsedBatches) {
boolean isValid = true;
if (batch.getOriginEntries() != null) {
for (OriginEntryFull originEntry : batch.getOriginEntries()) {
// if chart code is empty while accounts cannot cross charts, then derive chart code from account number
if (StringUtils.isEmpty(originEntry.getChartOfAccountsCode())) {
if (acctserv.accountsCanCrossCharts()) {
GlobalVariables.getMessageMap().putError(KFSConstants.GLOBAL_ERRORS, KFSKeyConstants.ERROR_BATCH_UPLOAD_FILE_EMPTY_CHART, originEntry.getAccountNumber());
isValid = false;
}
else {
// accountNumber shall not be empty, otherwise won't pass schema validation
Account account = acctserv.getUniqueAccountForAccountNumber(originEntry.getAccountNumber());
if (account != null) {
originEntry.setChartOfAccountsCode(account.getChartOfAccountsCode());
}
else {
GlobalVariables.getMessageMap().putError(KFSConstants.GLOBAL_ERRORS, KFSKeyConstants.ERROR_BATCH_UPLOAD_FILE_INVALID_ACCOUNT, originEntry.getAccountNumber());
isValid = false;
}
}
}
}
}
if (batch.getCollectorDetails() != null) {
for (CollectorDetail collectorDetail : batch.getCollectorDetails()) {
// if chart code is empty while accounts cannot cross charts, then derive chart code from account number
if (StringUtils.isEmpty(collectorDetail.getChartOfAccountsCode())) {
if (acctserv.accountsCanCrossCharts()) {
// report error
GlobalVariables.getMessageMap().putError(KFSConstants.GLOBAL_ERRORS, KFSKeyConstants.ERROR_BATCH_UPLOAD_FILE_EMPTY_CHART, collectorDetail.getAccountNumber());
isValid = false;
}
else {
// accountNumber shall not be empty, otherwise won't pass schema validation
Account account = acctserv.getUniqueAccountForAccountNumber(collectorDetail.getAccountNumber());
if (account != null) {
collectorDetail.setChartOfAccountsCode(account.getChartOfAccountsCode());
}
else {
GlobalVariables.getMessageMap().putError(KFSConstants.GLOBAL_ERRORS, KFSKeyConstants.ERROR_BATCH_UPLOAD_FILE_INVALID_ACCOUNT, collectorDetail.getAccountNumber());
isValid = false;
}
}
}
}
}
isValid &= collectorHelperService.performValidation(batch);
if (isValid) {
isValid = collectorHelperService.checkTrailerTotals(batch, null);
}
allBatchesValid &= isValid;
}
return allBatchesValid;
}
/**
* Returns the Collector's title key
*
* @return the title key for the Collector
* @see org.kuali.kfs.sys.batch.BatchInputFileType#getTitleKey()
*/
public String getTitleKey() {
return KFSKeyConstants.MESSAGE_BATCH_UPLOAD_TITLE_COLLECTOR;
}
public String getAuthorPrincipalName(File file) {
String[] fileNameParts = StringUtils.split(file.getName(), "_");
if (fileNameParts.length > 4) {
return fileNameParts[3];
}
return null;
}
@Override
public Object parse(byte[] fileByteContent) throws ParseException {
CollectorBatch batch = (CollectorBatch) super.parse(fileByteContent);
OriginEntryTotals totals = new OriginEntryTotals();
totals.addToTotals(batch.getOriginEntries().iterator());
batch.setOriginEntryTotals(totals);
return Arrays.asList(batch);
}
public void setDateTimeService(DateTimeService dateTimeService) {
this.dateTimeService = dateTimeService;
}
public void setCollectorHelperService(CollectorHelperService collectorHelperService) {
this.collectorHelperService = collectorHelperService;
}
}
| agpl-3.0 |
gytis/narayana | qa/tests/src/org/jboss/jbossts/qa/CurrentTests01/Test36.java | 3574 | /*
* JBoss, Home of Professional Open Source
* Copyright 2007, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2005-2006,
* @author JBoss Inc.
*/
//
// Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
//
// Arjuna Technologies Ltd.,
// Newcastle upon Tyne,
// Tyne and Wear,
// UK.
//
package org.jboss.jbossts.qa.CurrentTests01;
/*
* Copyright (C) 1999-2001 by HP Bluestone Software, Inc. All rights Reserved.
*
* HP Arjuna Labs,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: Test36.java,v 1.2 2003/06/26 11:43:53 rbegg Exp $
*/
/*
* Try to get around the differences between Ansi CPP and
* K&R cpp with concatenation.
*/
/*
* Copyright (C) 1999-2001 by HP Bluestone Software, Inc. All rights Reserved.
*
* HP Arjuna Labs,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: Test36.java,v 1.2 2003/06/26 11:43:53 rbegg Exp $
*/
import org.jboss.jbossts.qa.Utils.OAInterface;
import org.jboss.jbossts.qa.Utils.ORBInterface;
import org.jboss.jbossts.qa.Utils.OTS;
import org.omg.CosTransactions.Current;
import org.omg.CosTransactions.NoTransaction;
/**
* Test to see if stop start of orb causes any problems
*/
public class Test36
{
public static void main(String[] args)
{
boolean correct = true;
try
{
ORBInterface.initORB(args, null);
OAInterface.initOA();
Current current = OTS.get_current();
try
{
current.commit(true);
correct = false;
}
catch (NoTransaction noTransaction)
{
}
if (!correct)
{
System.out.println("Failed");
return;
}
}
catch (Exception exception)
{
System.out.println("Failed");
System.err.println("Test036.main: " + exception);
exception.printStackTrace(System.err);
return;
}
try
{
OAInterface.shutdownOA();
ORBInterface.shutdownORB();
}
catch (Exception exception)
{
System.err.println("Test036.main: " + exception);
exception.printStackTrace(System.err);
}
//now do the test again and see what happens
try
{
ORBInterface.initORB(args, null);
OAInterface.initOA();
Current current = OTS.get_current();
try
{
current.commit(true);
correct = false;
}
catch (NoTransaction noTransaction)
{
}
if (correct)
{
System.out.println("Passed");
}
else
{
System.out.println("Failed");
}
}
catch (Exception exception)
{
System.out.println("Failed");
System.err.println("Test036.main: " + exception);
exception.printStackTrace(System.err);
}
try
{
OAInterface.shutdownOA();
ORBInterface.shutdownORB();
}
catch (Exception exception)
{
System.err.println("Test01.main: " + exception);
exception.printStackTrace(System.err);
}
}
}
| lgpl-2.1 |
MaOrKsSi/HZS.Durian | 增强/org.hzs.mongodb/src/org/bson/util/annotations/NotThreadSafe.java | 1113 | /*
* Copyright (c) 2005 Brian Goetz and Tim Peierls
* Released under the Creative Commons Attribution License
* (http://creativecommons.org/licenses/by/2.5)
* Official home: http://www.jcip.net
*
* Any republication or derived work distributed in source code form
* must include this copyright and license notice.
*/
package org.bson.util.annotations;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* The class to which this annotation is applied is not thread-safe.
* This annotation primarily exists for clarifying the non-thread-safety of a class
* that might otherwise be assumed to be thread-safe, despite the fact that it is a bad
* idea to assume a class is thread-safe without good reason.
* @see ThreadSafe
*
* @deprecated This class is NOT a part of public API and will be dropped in 3.x versions.
*/
@Documented
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Deprecated
public @interface NotThreadSafe {
}
| lgpl-3.0 |
TribeMedia/aura | aura/src/main/java/org/auraframework/def/ActionDef.java | 1943 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.def;
import java.util.List;
/**
* Interface for actions, with params, types, and return type
*/
public interface ActionDef extends Definition {
@Override
DefDescriptor<ActionDef> getDescriptor();
/**
* Actions can be server side or client side. If they are client side the
* server just sends the code down and remains oblivious as to how they are
* run
*/
public enum ActionType {
CLIENT, SERVER;
}
/**
* @return type of this action
*/
ActionType getActionType();
/**
* The name of this action is the unique identifier the component can use to
* call this action
*
* @return the name of this action
*/
@Override
String getName();
/**
* Basic type system stuff
*
* @return the type this action returns
*/
DefDescriptor<TypeDef> getReturnType();
/**
* Get all the parameters for this action. They are returned in the order
* they are defined but because the names are provided some contexts can
* provide args in any order and match up the params
*
* @return ordered list of parameters
*/
List<ValueDef> getParameters();
/**
* get the parameters that can be logged.
*/
List<String> getLoggableParams();
}
| apache-2.0 |
pdxrunner/geode | geode-dunit/src/main/java/org/apache/geode/security/generator/XmlAuthzCredentialGenerator.java | 8766 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.security.generator;
import java.security.Principal;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import org.apache.geode.cache.operations.OperationContext.OperationCode;
import org.apache.geode.security.templates.UsernamePrincipal;
import org.apache.geode.security.templates.XmlAuthorization;
import org.apache.geode.util.test.TestUtil;
public class XmlAuthzCredentialGenerator extends AuthzCredentialGenerator {
private static final String dummyXml = "authz-dummy.xml";
private static final String ldapXml = "authz-ldap.xml";
private static final String pkcsXml = "authz-pkcs.xml";
private static final String sslXml = "authz-ssl.xml";
private static final String[] QUERY_REGIONS = {"/Portfolios", "/Positions", "/AuthRegion"};
public static OperationCode[] READER_OPS =
{OperationCode.GET, OperationCode.REGISTER_INTEREST, OperationCode.UNREGISTER_INTEREST,
OperationCode.KEY_SET, OperationCode.CONTAINS_KEY, OperationCode.EXECUTE_FUNCTION};
public static OperationCode[] WRITER_OPS = {OperationCode.PUT, OperationCode.DESTROY,
OperationCode.INVALIDATE, OperationCode.REGION_CLEAR};
public static OperationCode[] QUERY_OPS = {OperationCode.QUERY, OperationCode.EXECUTE_CQ,
OperationCode.STOP_CQ, OperationCode.CLOSE_CQ};
private static final byte READER_ROLE = 1;
private static final byte WRITER_ROLE = 2;
private static final byte QUERY_ROLE = 3;
private static final byte ADMIN_ROLE = 4;
private static Set readerOpsSet;
private static Set writerOpsSet;
private static Set queryOpsSet;
private static Set queryRegionSet;
static {
readerOpsSet = new HashSet();
for (int index = 0; index < READER_OPS.length; index++) {
readerOpsSet.add(READER_OPS[index]);
}
writerOpsSet = new HashSet();
for (int index = 0; index < WRITER_OPS.length; index++) {
writerOpsSet.add(WRITER_OPS[index]);
}
queryOpsSet = new HashSet();
for (int index = 0; index < QUERY_OPS.length; index++) {
queryOpsSet.add(QUERY_OPS[index]);
}
queryRegionSet = new HashSet();
for (int index = 0; index < QUERY_REGIONS.length; index++) {
queryRegionSet.add(QUERY_REGIONS[index]);
}
}
@Override
protected Properties init() throws IllegalArgumentException {
final Properties sysProps = new Properties();
final String dirName = "/org/apache/geode/security/generator/";
if (this.generator.classCode().isDummy()) {
final String xmlFilename =
TestUtil.getResourcePath(XmlAuthzCredentialGenerator.class, dirName + dummyXml);
sysProps.setProperty(XmlAuthorization.DOC_URI_PROP_NAME, xmlFilename);
} else if (this.generator.classCode().isLDAP()) {
final String xmlFilename =
TestUtil.getResourcePath(XmlAuthzCredentialGenerator.class, dirName + ldapXml);
sysProps.setProperty(XmlAuthorization.DOC_URI_PROP_NAME, xmlFilename);
// } else if (this.generator.classCode().isPKCS()) {
// sysProps.setProperty(XmlAuthorization.DOC_URI_PROP_NAME, dirName + pkcsXml);
// }
// } else if (this.generator.classCode().isSSL()) {
// sysProps.setProperty(XmlAuthorization.DOC_URI_PROP_NAME, dirName + sslXml);
// }
} else {
throw new IllegalArgumentException("No XML defined for XmlAuthorization module to work with "
+ this.generator.getAuthenticator());
}
return sysProps;
}
@Override
public ClassCode classCode() {
return ClassCode.XML;
}
@Override
public String getAuthorizationCallback() {
return XmlAuthorization.class.getName() + ".create";
}
private Principal getDummyPrincipal(final byte roleType, final int index) {
final String[] admins = new String[] {"root", "admin", "administrator"};
final int numReaders = 3;
final int numWriters = 3;
switch (roleType) {
case READER_ROLE:
return new UsernamePrincipal("reader" + (index % numReaders));
case WRITER_ROLE:
return new UsernamePrincipal("writer" + (index % numWriters));
case QUERY_ROLE:
return new UsernamePrincipal("reader" + ((index % 2) + 3));
default:
return new UsernamePrincipal(admins[index % admins.length]);
}
}
@Override
protected Principal getAllowedPrincipal(final OperationCode[] opCodes, final String[] regionNames,
final int index) {
if (this.generator.classCode().isDummy()) {
final byte roleType = getRequiredRole(opCodes, regionNames);
return getDummyPrincipal(roleType, index);
} else if (this.generator.classCode().isLDAP()) {
final byte roleType = getRequiredRole(opCodes, regionNames);
return getLdapPrincipal(roleType, index);
}
return null;
}
@Override
protected Principal getDisallowedPrincipal(final OperationCode[] opCodes,
final String[] regionNames, final int index) {
final byte roleType = getRequiredRole(opCodes, regionNames);
byte disallowedRoleType = READER_ROLE;
switch (roleType) {
case READER_ROLE:
disallowedRoleType = WRITER_ROLE;
break;
case WRITER_ROLE:
disallowedRoleType = READER_ROLE;
break;
case QUERY_ROLE:
disallowedRoleType = READER_ROLE;
break;
case ADMIN_ROLE:
disallowedRoleType = READER_ROLE;
break;
}
if (this.generator.classCode().isDummy()) {
return getDummyPrincipal(disallowedRoleType, index);
} else if (this.generator.classCode().isLDAP()) {
return getLdapPrincipal(disallowedRoleType, index);
}
return null;
}
@Override
protected int getNumPrincipalTries(final OperationCode[] opCodes, final String[] regionNames) {
return 5;
}
private Principal getLdapPrincipal(final byte roleType, final int index) {
final String userPrefix = "gemfire";
final int[] readerIndices = {3, 4, 5};
final int[] writerIndices = {6, 7, 8};
final int[] queryIndices = {9, 10};
final int[] adminIndices = {1, 2};
switch (roleType) {
case READER_ROLE:
int readerIndex = readerIndices[index % readerIndices.length];
return new UsernamePrincipal(userPrefix + readerIndex);
case WRITER_ROLE:
int writerIndex = writerIndices[index % writerIndices.length];
return new UsernamePrincipal(userPrefix + writerIndex);
case QUERY_ROLE:
int queryIndex = queryIndices[index % queryIndices.length];
return new UsernamePrincipal(userPrefix + queryIndex);
default:
int adminIndex = adminIndices[index % adminIndices.length];
return new UsernamePrincipal(userPrefix + adminIndex);
}
}
private byte getRequiredRole(final OperationCode[] opCodes, final String[] regionNames) {
byte roleType = ADMIN_ROLE;
boolean requiresReader = true;
boolean requiresWriter = true;
boolean requiresQuery = true;
for (int opNum = 0; opNum < opCodes.length; opNum++) {
final OperationCode opCode = opCodes[opNum];
if (requiresReader && !readerOpsSet.contains(opCode)) {
requiresReader = false;
}
if (requiresWriter && !writerOpsSet.contains(opCode)) {
requiresWriter = false;
}
if (requiresQuery && !queryOpsSet.contains(opCode)) {
requiresQuery = false;
}
}
if (requiresReader) {
roleType = READER_ROLE;
} else if (requiresWriter) {
roleType = WRITER_ROLE;
} else if (requiresQuery) {
if (regionNames != null && regionNames.length > 0) {
for (int index = 0; index < regionNames.length; index++) {
final String regionName = XmlAuthorization.normalizeRegionName(regionNames[index]);
if (requiresQuery && !queryRegionSet.contains(regionName)) {
requiresQuery = false;
break;
}
}
if (requiresQuery) {
roleType = QUERY_ROLE;
}
}
}
return roleType;
}
}
| apache-2.0 |
roberth/pitest | pitest/src/main/java/org/pitest/help/PitHelpError.java | 833 | /*
* Copyright 2011 Henry Coles
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.pitest.help;
public class PitHelpError extends RuntimeException {
private static final long serialVersionUID = 1L;
public PitHelpError(final Help message, final Object... params) {
super(message.format(params));
}
}
| apache-2.0 |
winklerm/drools | kie-api/src/main/java/org/kie/api/internal/runtime/KieRuntimes.java | 823 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.api.internal.runtime;
import java.util.Map;
import org.kie.api.internal.utils.KieService;
public interface KieRuntimes extends KieService {
Map<String, KieRuntimeService> getRuntimes();
Class getServiceInterface();
}
| apache-2.0 |
alina-ipatina/pentaho-kettle | engine/src/org/pentaho/di/www/StartTransServlet.java | 9111 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.www;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URLEncoder;
import java.util.UUID;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.owasp.encoder.Encode;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.LoggingObjectType;
import org.pentaho.di.core.logging.SimpleLoggingObject;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
public class StartTransServlet extends BaseHttpServlet implements CartePluginInterface {
private static Class<?> PKG = StartTransServlet.class;
private static final long serialVersionUID = -5879200987669847357L;
public static final String CONTEXT_PATH = "/kettle/startTrans";
public StartTransServlet() {
}
public StartTransServlet( TransformationMap transformationMap ) {
super( transformationMap );
}
/**
<div id="mindtouch">
<h1>/kettle/startTrans</h1>
<a name="GET"></a>
<h2>GET</h2>
<p>Executes transformation previously uploaded to Carte server.</p>
<p><b>Example Request:</b><br />
<pre function="syntax.xml">
GET /kettle/startTrans/?name=dummy-trans&xml=Y
</pre>
</p>
<h3>Parameters</h3>
<table class="pentaho-table">
<tbody>
<tr>
<th>name</th>
<th>description</th>
<th>type</th>
</tr>
<tr>
<td>name</td>
<td>Name of the transformation to be executed.</td>
<td>query</td>
</tr>
<tr>
<td>xml</td>
<td>Boolean flag which sets the output format required. Use <code>Y</code> to receive XML response.</td>
<td>boolean, optional</td>
</tr>
<tr>
<td>id</td>
<td>Carte transformation ID of the transformation to be executed. This parameter is optional when xml=Y is used.</td>
<td>query, optional</td>
</tr>
</tbody>
</table>
<h3>Response Body</h3>
<table class="pentaho-table">
<tbody>
<tr>
<td align="right">text:</td>
<td>HTML</td>
</tr>
<tr>
<td align="right">media types:</td>
<td>text/xml, text/html</td>
</tr>
</tbody>
</table>
<p>Response XML or HTML containing operation result. When using xml=Y <code>result</code> field indicates whether
operation was successful (<code>OK</code>) or not (<code>ERROR</code>).</p>
<p><b>Example Response:</b></p>
<pre function="syntax.xml">
<?xml version="1.0" encoding="UTF-8"?>
<webresult>
<result>OK</result>
<message>Transformation [dummy-trans] was started.</message>
<id/>
</webresult>
</pre>
<h3>Status Codes</h3>
<table class="pentaho-table">
<tbody>
<tr>
<th>code</th>
<th>description</th>
</tr>
<tr>
<td>200</td>
<td>Request was processed.</td>
</tr>
<tr>
<td>500</td>
<td>Internal server error occurs during request processing.</td>
</tr>
</tbody>
</table>
</div>
*/
public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException,
IOException {
if ( isJettyMode() && !request.getContextPath().startsWith( CONTEXT_PATH ) ) {
return;
}
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "StartTransServlet.Log.StartTransRequested" ) );
}
String transName = request.getParameter( "name" );
String id = request.getParameter( "id" );
if ( StringUtils.isEmpty( transName ) ) {
transName = "";
}
boolean useXML = "Y".equalsIgnoreCase( request.getParameter( "xml" ) );
response.setStatus( HttpServletResponse.SC_OK );
PrintWriter out = response.getWriter();
if ( useXML ) {
response.setContentType( "text/xml" );
response.setCharacterEncoding( Const.XML_ENCODING );
out.print( XMLHandler.getXMLHeader( Const.XML_ENCODING ) );
} else {
response.setContentType( "text/html;charset=UTF-8" );
out.println( "<HTML>" );
out.println( "<HEAD>" );
out.println( "<TITLE>" + BaseMessages.getString( PKG, "StartTransServlet.Log.StartOfTrans" ) + "</TITLE>" );
out.println( "<META http-equiv=\"Refresh\" content=\"2;url="
+ convertContextPath( GetTransStatusServlet.CONTEXT_PATH ) + "?name="
+ URLEncoder.encode( transName, "UTF-8" ) + "\">" );
out.println( "<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" );
out.println( "</HEAD>" );
out.println( "<BODY>" );
}
try {
// ID is optional...
//
Trans trans;
CarteObjectEntry entry;
if ( Utils.isEmpty( id ) ) {
// get the first transformation that matches...
//
entry = getTransformationMap().getFirstCarteObjectEntry( transName );
if ( entry == null ) {
trans = null;
} else {
id = entry.getId();
trans = getTransformationMap().getTransformation( entry );
}
} else {
// Take the ID into account!
//
entry = new CarteObjectEntry( transName, id );
trans = getTransformationMap().getTransformation( entry );
}
if ( trans != null ) {
// Discard old log lines from old transformation runs
//
KettleLogStore.discardLines( trans.getLogChannelId(), true );
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject =
new SimpleLoggingObject( CONTEXT_PATH, LoggingObjectType.CARTE, null );
servletLoggingObject.setContainerObjectId( carteObjectId );
servletLoggingObject.setLogLevel( trans.getLogLevel() );
trans.setParent( servletLoggingObject );
executeTrans( trans );
String message = BaseMessages.getString( PKG, "StartTransServlet.Log.TransStarted", transName );
if ( useXML ) {
out.println( new WebResult( WebResult.STRING_OK, message ).getXML() );
} else {
out.println( "<H1>" + Encode.forHtml( message ) + "</H1>" );
out.println( "<a href=\""
+ convertContextPath( GetTransStatusServlet.CONTEXT_PATH ) + "?name="
+ URLEncoder.encode( transName, "UTF-8" ) + "&id=" + URLEncoder.encode( id, "UTF-8" ) + "\">"
+ BaseMessages.getString( PKG, "TransStatusServlet.BackToStatusPage" ) + "</a><p>" );
}
} else {
String message = BaseMessages.getString( PKG, "TransStatusServlet.Log.CoundNotFindSpecTrans", transName );
if ( useXML ) {
out.println( new WebResult( WebResult.STRING_ERROR, message, id ) );
} else {
out.println( "<H1>" + Encode.forHtml( message ) + "</H1>" );
out.println( "<a href=\""
+ convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">"
+ BaseMessages.getString( PKG, "TransStatusServlet.BackToStatusPage" ) + "</a><p>" );
}
}
} catch ( Exception ex ) {
if ( useXML ) {
out.println( new WebResult( WebResult.STRING_ERROR, BaseMessages.getString(
PKG, "StartTransServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker( ex ) ) ) );
} else {
out.println( "<p>" );
out.println( "<pre>" );
out.println( Encode.forHtml( Const.getStackTracker( ex ) ) );
out.println( "</pre>" );
}
}
if ( !useXML ) {
out.println( "<p>" );
out.println( "</BODY>" );
out.println( "</HTML>" );
}
}
public String toString() {
return "Start transformation";
}
public String getService() {
return CONTEXT_PATH + " (" + toString() + ")";
}
protected void executeTrans( Trans trans ) throws KettleException {
trans.execute( null );
}
public String getContextPath() {
return CONTEXT_PATH;
}
}
| apache-2.0 |
mahak/hbase | hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java | 1432 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Client-side call timeout
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
public class CallTimeoutException extends HBaseIOException {
public CallTimeoutException(final String msg) {
super(msg);
}
/**
* CallTimeoutException with cause
*
* @param message the message for this exception
* @param cause the cause for this exception
*/
public CallTimeoutException(final String message, final Throwable cause) {
super(message, cause);
}
}
| apache-2.0 |
jushanghui/jsh | src/main/server/com/baidu/hsb/mysql/bio/executor/MultiNodeExecutor.java | 26246 | /**
* Baidu.com,Inc.
* Copyright (c) 2000-2013 All Rights Reserved.
*/
package com.baidu.hsb.mysql.bio.executor;
import static com.baidu.hsb.route.RouteResultsetNode.DEFAULT_REPLICA_INDEX;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.log4j.Logger;
import com.baidu.hsb.HeisenbergConfig;
import com.baidu.hsb.HeisenbergServer;
import com.baidu.hsb.config.ErrorCode;
import com.baidu.hsb.config.util.LoggerUtil;
import com.baidu.hsb.exception.UnknownDataNodeException;
import com.baidu.hsb.mysql.MySQLDataNode;
import com.baidu.hsb.mysql.PacketUtil;
import com.baidu.hsb.mysql.bio.Channel;
import com.baidu.hsb.mysql.bio.MySQLChannel;
import com.baidu.hsb.net.mysql.BinaryPacket;
import com.baidu.hsb.net.mysql.EOFPacket;
import com.baidu.hsb.net.mysql.ErrorPacket;
import com.baidu.hsb.net.mysql.FieldPacket;
import com.baidu.hsb.net.mysql.MySQLPacket;
import com.baidu.hsb.net.mysql.OkPacket;
import com.baidu.hsb.route.RouteResultset;
import com.baidu.hsb.route.RouteResultsetNode;
import com.baidu.hsb.server.ServerConnection;
import com.baidu.hsb.server.session.BlockingSession;
import com.baidu.hsb.server.session.MultiExecutorTask;
import com.baidu.hsb.util.StringUtil;
/**
* 多数据节点执行器
*
* @author xiongzhao@baidu.com
*/
public final class MultiNodeExecutor extends NodeExecutor {
private static final Logger LOGGER = Logger
.getLogger(MultiNodeExecutor.class);
private static final int RECEIVE_CHUNK_SIZE = 16 * 1024;
private AtomicBoolean isFail = new AtomicBoolean(false);
private int unfinishedNodeCount;
private int errno;
private String errMessage;
private AtomicBoolean fieldEOF = new AtomicBoolean(false);
private byte packetId;
private long affectedRows;
private long insertId;
private ByteBuffer buffer;
private final ReentrantLock lock = new ReentrantLock();
private final Condition taskFinished = lock.newCondition();
private final DefaultCommitExecutor icExecutor = new DefaultCommitExecutor() {
@Override
protected String getErrorMessage() {
return "Internal commit";
}
@Override
protected Logger getLogger() {
return MultiNodeExecutor.LOGGER;
}
};
private long nodeCount = 0;
@Override
public void terminate() throws InterruptedException {
final ReentrantLock lock = this.lock;
lock.lock();
try {
while (unfinishedNodeCount > 0) {
taskFinished.await();
}
} finally {
lock.unlock();
}
icExecutor.terminate();
}
private void decrementCountToZero() {
final ReentrantLock lock = this.lock;
lock.lock();
try {
unfinishedNodeCount = 0;
taskFinished.signalAll();
} finally {
lock.unlock();
}
}
private boolean decrementCountAndIsZero(int c) {
final ReentrantLock lock = this.lock;
lock.lock();
try {
unfinishedNodeCount = unfinishedNodeCount - c;
int ufc = unfinishedNodeCount;
taskFinished.signalAll();
return ufc <= 0;
} finally {
lock.unlock();
}
}
/**
* 多数据节点执行
*
* @param nodes
* never null
*/
public void execute(RouteResultsetNode[] nodes, final boolean autocommit,
final BlockingSession ss, final int flag, final String sql) {
// 初始化
final ReentrantLock lock = this.lock;
lock.lock();
try {
this.isFail.set(false);
this.unfinishedNodeCount = 0;
this.nodeCount = 0;
for (RouteResultsetNode rrn : nodes) {
unfinishedNodeCount += rrn.getSqlCount();
this.nodeCount++;
}
this.errno = 0;
this.errMessage = null;
this.fieldEOF.set(false);
this.packetId = 0;
this.affectedRows = 0L;
this.insertId = 0L;
this.buffer = ss.getSource().allocate();
} finally {
lock.unlock();
}
if (ss.getSource().isClosed()) {
decrementCountToZero();
ss.getSource().recycle(this.buffer);
return;
}
// 多节点处理
ConcurrentMap<RouteResultsetNode, Channel> target = ss.getTarget();
for (RouteResultsetNode rrn : nodes) {
Channel c = target.get(rrn);
if (c != null) {
c.setRunning(true);
}
}
final AtomicLong exeTime = new AtomicLong(0);
ThreadPoolExecutor exec = ss.getSource().getProcessor().getExecutor();
for (final RouteResultsetNode rrn : nodes) {
final Channel c = target.get(rrn);
if (c != null) {
exec.execute(new Runnable() {
@Override
public void run() {
execute0(rrn, c, autocommit, ss, flag, sql, exeTime);
}
});
} else {
newExecute(rrn, autocommit, ss, flag, sql, exeTime);
}
}
}
/**
* 新通道的执行
*/
private void newExecute(final RouteResultsetNode rrn, final boolean autocommit,
final BlockingSession ss, final int flag, final String sql,
final AtomicLong exeTime) {
final ServerConnection sc = ss.getSource();
// 检查数据节点是否存在
HeisenbergConfig conf = HeisenbergServer.getInstance().getConfig();
final MySQLDataNode dn = conf.getDataNodes().get(rrn.getName());
if (dn == null) {
handleFailure(ss, rrn, new SimpleErrInfo(new UnknownDataNodeException(
"Unknown dataNode '" + rrn.getName() + "'"), ErrorCode.ER_BAD_DB_ERROR, sc, rrn),
rrn.getSqlCount(), exeTime, sql);
return;
}
//提交执行任务
sc.getProcessor().getExecutor().execute(new Runnable() {
@Override
public void run() {
try {
//5s读取时间,否则超时
MultiExecutorTask.runTask(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
runTask(rrn, dn, ss, sc, autocommit, flag, sql, exeTime);
return true;
}
}, 5);
} catch (InterruptedException e) {
killServerTask(rrn, ss);
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_NET_READ_INTERRUPTED,
sc, rrn), rrn.getSqlCount(), exeTime, sql);
} catch (ExecutionException e) {
killServerTask(rrn, ss);
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_MULTI_EXEC_ERROR, sc,
rrn), rrn.getSqlCount(), exeTime, sql);
} catch (TimeoutException e) {
killServerTask(rrn, ss);
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_MULTI_QUERY_TIMEOUT,
sc, rrn), rrn.getSqlCount(), exeTime, sql);
}
}
});
}
private void killServerTask(RouteResultsetNode rrn, BlockingSession ss) {
ConcurrentMap<RouteResultsetNode, Channel> target = ss.getTarget();
Channel c = target.get(rrn);
if (c != null) {
c.kill();
}
}
private void runTask(final RouteResultsetNode rrn, final MySQLDataNode dn,
final BlockingSession ss, final ServerConnection sc,
final boolean autocommit, final int flag, final String sql,
final AtomicLong exeTime) {
// 取得数据通道
int i = rrn.getReplicaIndex();
Channel c = null;
try {
c = (i == DEFAULT_REPLICA_INDEX) ? dn.getChannel() : dn.getChannel(i);
} catch (final Exception e) {
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_BAD_DB_ERROR, sc, rrn),
rrn.getSqlCount(), exeTime, sql);
return;
}
c.setRunning(true);
Channel old = ss.getTarget().put(rrn, c);
if (old != null && c != old) {
old.close();
}
// 执行
execute0(rrn, c, autocommit, ss, flag, sql, exeTime);
}
/**
* 执行
*/
private void execute0(RouteResultsetNode rrn, Channel c, boolean autocommit,
BlockingSession ss, int flag, final String sql, final AtomicLong exeTime) {
ServerConnection sc = ss.getSource();
if (isFail.get() || sc.isClosed()) {
c.setRunning(false);
handleFailure(ss, rrn, null, rrn.getSqlCount(), exeTime, sql);
return;
}
long s = System.currentTimeMillis();
extSql: for (final String stmt : rrn.getStatement()) {
try {
// 执行并等待返回
BinaryPacket bin = ((MySQLChannel) c).execute(stmt, rrn, sc, autocommit);
//System.out.println(rrn.getName() + ",sql[" + stmt + "]");
//LOGGER.info("node[" + rrn.getName()+"],sql["+stmt+"],recv=>"+ByteUtil.formatByte(bin.data)+"<=");
// 接收和处理数据
final ReentrantLock lock = MultiNodeExecutor.this.lock;
lock.lock();
try {
switch (bin.data[0]) {
case ErrorPacket.FIELD_COUNT:
c.setRunning(false);
handleFailure(ss, rrn,
new BinaryErrInfo((MySQLChannel) c, bin, sc, rrn), 1, exeTime, sql);
break;
case OkPacket.FIELD_COUNT:
OkPacket ok = new OkPacket();
ok.read(bin);
affectedRows += ok.affectedRows;
// set lastInsertId
if (ok.insertId > 0) {
insertId = (insertId == 0) ? ok.insertId : Math.min(insertId,
ok.insertId);
}
c.setRunning(false);
handleSuccessOK(ss, rrn, autocommit, ok);
break;
default: // HEADER|FIELDS|FIELD_EOF|ROWS|LAST_EOF
final MySQLChannel mc = (MySQLChannel) c;
if (fieldEOF.get()) {
for (;;) {
bin = mc.receive();
// LOGGER.info("FIELD_EOF:"
// + com.baidu.hsb.route.util.ByteUtil
// .formatByte(bin.data));
switch (bin.data[0]) {
case ErrorPacket.FIELD_COUNT:
c.setRunning(false);
handleFailure(ss, rrn, new BinaryErrInfo(mc, bin, sc,
rrn), 1, exeTime, sql);
continue extSql;
case EOFPacket.FIELD_COUNT:
handleRowData(rrn, c, ss, exeTime, sql);
continue extSql;
default:
continue;
}
}
} else {
bin.packetId = ++packetId;// HEADER
List<MySQLPacket> headerList = new LinkedList<MySQLPacket>();
headerList.add(bin);
for (;;) {
bin = mc.receive();
//LOGGER.info("NO_FIELD_EOF:" + com.baidu.hsb.route.util.ByteUtil.formatByte(bin.data));
switch (bin.data[0]) {
case ErrorPacket.FIELD_COUNT:
c.setRunning(false);
handleFailure(ss, rrn, new BinaryErrInfo(mc, bin, sc,
rrn), 1, exeTime, sql);
continue extSql;
case EOFPacket.FIELD_COUNT:
bin.packetId = ++packetId;// FIELD_EOF
for (MySQLPacket packet : headerList) {
buffer = packet.write(buffer, sc);
}
headerList = null;
buffer = bin.write(buffer, sc);
fieldEOF.set(true);
handleRowData(rrn, c, ss, exeTime, sql);
continue extSql;
default:
bin.packetId = ++packetId;// FIELDS
switch (flag) {
case RouteResultset.REWRITE_FIELD:
StringBuilder fieldName = new StringBuilder();
fieldName.append("Tables_in_").append(
ss.getSource().getSchema());
FieldPacket field = PacketUtil.getField(bin,
fieldName.toString());
headerList.add(field);
break;
default:
headerList.add(bin);
}
}
}
}
}
} finally {
lock.unlock();
// System.out.println("sql[" + stmt + "]suc pkId:" + bin.packetId);
}
} catch (final IOException e) {
c.close();
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_YES, sc, rrn), 1, exeTime,
sql);
} catch (final RuntimeException e) {
c.close();
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_YES, sc, rrn), 1, exeTime,
sql);
} finally {
long e = System.currentTimeMillis() - s;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("[" + rrn.getName() + "][" + stmt + "]" + "exetime:" + e
+ "ms pre:" + exeTime.get());
}
exeTime.getAndAdd(e);
}
}
}
/**
* 处理RowData数据
*/
private void handleRowData(final RouteResultsetNode rrn, Channel c, BlockingSession ss,
final AtomicLong exeTime, final String sql) throws IOException {
final ServerConnection source = ss.getSource();
BinaryPacket bin = null;
int size = 0;
for (;;) {
bin = ((MySQLChannel) c).receive();
//System.out.println(rrn.getName() + "rowData-->");
switch (bin.data[0]) {
case ErrorPacket.FIELD_COUNT:
c.setRunning(false);
handleFailure(ss, rrn, new BinaryErrInfo(((MySQLChannel) c), bin, source, rrn),
1, exeTime, sql);
return;
case EOFPacket.FIELD_COUNT:
c.setRunning(false);
if (source.isAutocommit()) {
c = ss.getTarget().remove(rrn);
if (c != null) {
if (isFail.get() || source.isClosed()) {
c.close();
} else {
c.release();
}
}
}
handleSuccessEOF(ss, rrn, bin, exeTime, sql);
return;
default:
bin.packetId = ++packetId;// ROWS
buffer = bin.write(buffer, source);
size += bin.packetLength;
if (size > RECEIVE_CHUNK_SIZE) {
// LOGGER.info(rrn.getName() + "hasNext-->");
handleNext(rrn, c, ss, exeTime, sql);
return;
}
}
}
}
/**
* 处理下一个任务
*/
private void handleNext(final RouteResultsetNode rrn, final Channel c,
final BlockingSession ss, final AtomicLong exeTime, final String sql) {
final ServerConnection sc = ss.getSource();
// sc.getProcessor().getExecutor().execute(new Runnable() {
// @Override
// public void run() {
final ReentrantLock lock = MultiNodeExecutor.this.lock;
lock.lock();
try {
handleRowData(rrn, c, ss, exeTime, sql);
} catch (final IOException e) {
c.close();
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_YES, sc, rrn), 1, exeTime, sql);
} catch (final RuntimeException e) {
c.close();
handleFailure(ss, rrn, new SimpleErrInfo(e, ErrorCode.ER_YES, sc, rrn), 1, exeTime, sql);
} finally {
lock.unlock();
}
// }
// });
}
/**
* @throws nothing
* never throws any exception
*/
private void handleSuccessEOF(BlockingSession ss, final RouteResultsetNode rrn,
BinaryPacket bin, final AtomicLong exeTime, final String sql) {
if (decrementCountAndIsZero(1)) {
try {
if (isFail.get()) {
notifyFailure(ss);
return;
}
try {
ServerConnection source = ss.getSource();
if (source.isAutocommit()) {
ss.release();
}
bin.packetId = ++packetId;// LAST_EOF
source.write(bin.write(buffer, source));
} catch (Exception e) {
LOGGER.warn("exception happens in success notification: " + ss.getSource(), e);
}
} finally {
LoggerUtil.printDigest(LOGGER, exeTime.get() / nodeCount, sql);
}
}
}
/**
* @throws nothing
* never throws any exception
*/
private void handleSuccessOK(BlockingSession ss, RouteResultsetNode rrn, boolean autocommit,
OkPacket ok) {
if (decrementCountAndIsZero(1)) {
if (isFail.get()) {
notifyFailure(ss);
return;
}
try {
ServerConnection source = ss.getSource();
ok.packetId = ++packetId;// OK_PACKET
ok.affectedRows = affectedRows;
if (insertId > 0) {
ok.insertId = insertId;
source.setLastInsertId(insertId);
}
if (source.isAutocommit()) {
if (!autocommit) { // 前端非事务模式,后端事务模式,则需要自动递交后端事务。
icExecutor.commit(ok, ss, ss.getTarget().size());
} else {
ss.release();
ok.write(source);
}
} else {
ok.write(source);
}
source.recycle(buffer);
} catch (Exception e) {
LOGGER.warn("exception happens in success notification: " + ss.getSource(), e);
}
}
}
private void handleFailure(BlockingSession ss, RouteResultsetNode rrn, ErrInfo errInfo, int c,
final AtomicLong exeTime, final String sql) {
try {
// 标记为执行失败,并记录第一次异常信息。
if (!isFail.getAndSet(true) && errInfo != null) {
errno = errInfo.getErrNo();
errMessage = errInfo.getErrMsg();
errInfo.logErr();
}
} catch (Exception e) {
LOGGER.warn("handleFailure failed in " + getClass().getSimpleName() + ", source = "
+ ss.getSource(), e);
} finally {
LoggerUtil.printDigest(LOGGER, exeTime.get(), sql);
}
if (decrementCountAndIsZero(c)) {
notifyFailure(ss);
}
}
/**
* 通知,执行异常
*
* @throws nothing
* never throws any exception
*/
private void notifyFailure(BlockingSession ss) {
try {
// 清理
ss.clear();
ServerConnection sc = ss.getSource();
sc.setTxInterrupt();
// 通知
ErrorPacket err = new ErrorPacket();
err.packetId = ++packetId;// ERROR_PACKET
err.errno = errno;
err.message = StringUtil.encode(errMessage, sc.getCharset());
sc.write(err.write(buffer, sc));
} catch (Exception e) {
LOGGER.warn("exception happens in failure notification: " + ss.getSource(), e);
}
}
protected static interface ErrInfo {
int getErrNo();
String getErrMsg();
void logErr();
}
protected static class BinaryErrInfo implements ErrInfo {
private String errMsg;
private int errNo;
private ServerConnection source;
private RouteResultsetNode rrn;
private MySQLChannel mc;
public BinaryErrInfo(MySQLChannel mc, BinaryPacket bin, ServerConnection sc,
RouteResultsetNode rrn) {
this.mc = mc;
this.source = sc;
this.rrn = rrn;
ErrorPacket err = new ErrorPacket();
err.read(bin);
this.errMsg = (err.message == null) ? null : StringUtil.decode(err.message,
mc.getCharset());
this.errNo = err.errno;
}
@Override
public int getErrNo() {
return errNo;
}
@Override
public String getErrMsg() {
return errMsg;
}
@Override
public void logErr() {
try {
LOGGER.warn(mc.getErrLog(rrn.getLogger(), errMsg, source));
} catch (Exception e) {
}
}
}
protected static class SimpleErrInfo implements ErrInfo {
private Exception e;
private int errNo;
private ServerConnection source;
private RouteResultsetNode rrn;
public SimpleErrInfo(Exception e, int errNo, ServerConnection sc, RouteResultsetNode rrn) {
this.e = e;
this.errNo = errNo;
this.source = sc;
this.rrn = rrn;
}
@Override
public int getErrNo() {
return errNo;
}
@Override
public String getErrMsg() {
String msg = e.getMessage();
return msg == null ? e.getClass().getSimpleName() : msg;
}
@Override
public void logErr() {
try {
LOGGER.warn(new StringBuilder().append(source).append(rrn).toString(), e);
} catch (Exception e) {
}
}
}
}
| apache-2.0 |
tweise/incubator-apex-malhar | library/src/main/java/org/apache/apex/malhar/lib/testbench/CountAndLastTupleTestSink.java | 1339 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.apex.malhar.lib.testbench;
/**
* A sink implementation to collect expected test results.
* <p>
* @displayName Count And Last Tuple Test Sink
* @category Test Bench
* @tags count, sink
* @since 0.3.2
*/
public class CountAndLastTupleTestSink<T> extends CountTestSink<T>
{
public Object tuple = null;
/**
* clears data
*/
@Override
public void clear()
{
this.tuple = null;
super.clear();
}
@Override
public void put(T tuple)
{
this.tuple = tuple;
count++;
}
}
| apache-2.0 |
daedric/buck | test/com/facebook/buck/eden/cli/Sha1Command.java | 1847 | /*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.eden.cli;
import com.facebook.buck.eden.EdenClient;
import com.facebook.buck.eden.EdenMount;
import com.facebook.buck.util.sha1.Sha1HashCode;
import com.facebook.eden.thrift.EdenError;
import com.facebook.thrift.TException;
import org.kohsuke.args4j.Argument;
import org.kohsuke.args4j.Option;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
public class Sha1Command implements Command {
@Option(name = "mount", aliases = {"-m"})
private String mountPoint;
@Argument
private List<String> paths = new ArrayList<>();
@Override
public int run() throws EdenError, IOException, TException {
Optional<EdenClient> client = EdenClient.newInstance();
if (!client.isPresent()) {
System.err.println("Could not connect to Eden");
return 1;
}
Path mountPoint = Paths.get(this.mountPoint);
EdenMount mount = client.get().getMountFor(mountPoint);
for (String path : paths) {
Path entry = mountPoint.relativize(Paths.get(path));
Sha1HashCode sha1 = mount.getSha1(entry);
System.out.printf("%s %s\n", entry, sha1);
}
return 0;
}
}
| apache-2.0 |
tyler-travis/openstorefront | server/openstorefront/openstorefront-web/src/main/java/edu/usu/sdl/openstorefront/web/rest/GeneralExceptionHandler.java | 2067 | /*
* Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.usu.sdl.openstorefront.web.rest;
import edu.usu.sdl.openstorefront.core.entity.ErrorTypeCode;
import edu.usu.sdl.openstorefront.core.model.ErrorInfo;
import edu.usu.sdl.openstorefront.core.view.SystemErrorModel;
import edu.usu.sdl.openstorefront.service.ServiceProxy;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
/**
*
* @author dshurtleff
*/
@Provider
public class GeneralExceptionHandler
implements ExceptionMapper<Throwable>
{
@Context
HttpServletRequest httpServletRequest;
@Override
public Response toResponse(Throwable exception)
{
if (exception instanceof WebApplicationException) {
WebApplicationException webApplicationException = (WebApplicationException) exception;
return webApplicationException.getResponse();
} else {
ErrorInfo errorInfo = new ErrorInfo(exception, httpServletRequest);
errorInfo.setErrorTypeCode(ErrorTypeCode.REST_API);
ServiceProxy serviceProxy = new ServiceProxy();
SystemErrorModel systemErrorModel = serviceProxy.getSystemService().generateErrorTicket(errorInfo);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(systemErrorModel).type(MediaType.APPLICATION_JSON).build();
}
}
}
| apache-2.0 |
julianhyde/calcite | core/src/main/java/org/apache/calcite/adapter/enumerable/WinAggResetContext.java | 1412 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.enumerable;
/**
* Information for a call to
* {@link AggImplementor#implementReset(AggContext, AggResetContext)}.
*
* <p>The {@link AggResetContext} provides access to the accumulator variables
* that should be reset.
*
* <p>Note: the very first reset of windowed aggregates is performed with null
* knowledge of indices and row count in the partition.
* In other words, the implementation should treat indices and partition row
* count as a hint to pre-size the collections.
*/
public interface WinAggResetContext
extends AggResetContext, WinAggFrameContext {
}
| apache-2.0 |
Xylus/pinpoint | rpc/src/main/java/com/navercorp/pinpoint/rpc/server/ChannelProperties.java | 1042 | /*
* Copyright 2019 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.rpc.server;
import java.util.List;
/**
* @author Woonduk Kang(emeroad)
*/
public interface ChannelProperties {
String getAgentId();
String getApplicationName();
String getHostIp();
int getPid();
int getServiceType();
String getHostName();
long getStartTime();
String getAgentVersion();
int getSocketId();
List<Integer> getSupportCommand();
Object get(Object key);
}
| apache-2.0 |
nvoron23/Kylin | common/src/main/java/org/apache/kylin/common/mr/KylinReducer.java | 1271 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.common.mr;
import org.apache.kylin.common.util.HadoopUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Reducer;
/**
* Created by Hongbin Ma(Binmahone) on 1/19/15.
*/
public class KylinReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
protected void publishConfiguration(Configuration conf) {
HadoopUtil.setCurrentConfiguration(conf);
}
}
| apache-2.0 |
dlnufox/ignite | modules/core/src/main/java/org/apache/ignite/configuration/ConnectorConfiguration.java | 17549 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.configuration;
import java.net.Socket;
import javax.cache.configuration.Factory;
import javax.net.ssl.SSLContext;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.internal.client.ssl.GridSslContextFactory;
import org.apache.ignite.ssl.SslContextFactory;
import org.jetbrains.annotations.Nullable;
/**
* REST access configuration.
*/
public class ConnectorConfiguration {
/** Default TCP server port. */
public static final int DFLT_TCP_PORT = 11211;
/** Default TCP_NODELAY flag. */
public static final boolean DFLT_TCP_NODELAY = true;
/** Default TCP direct buffer flag. */
public static final boolean DFLT_TCP_DIRECT_BUF = false;
/** Default REST idle timeout. */
public static final int DFLT_IDLE_TIMEOUT = 7000;
/** Default rest port range. */
public static final int DFLT_PORT_RANGE = 100;
/** Default size of REST thread pool. */
public static final int DFLT_REST_CORE_THREAD_CNT = IgniteConfiguration.DFLT_PUBLIC_THREAD_CNT;
/** Default max size of REST thread pool. */
public static final int DFLT_REST_MAX_THREAD_CNT = IgniteConfiguration.DFLT_PUBLIC_THREAD_CNT;
/** Default keep alive time for REST thread pool. */
public static final long DFLT_KEEP_ALIVE_TIME = 0;
/** Default max queue capacity of REST thread pool. */
public static final int DFLT_THREADPOOL_QUEUE_CAP = Integer.MAX_VALUE;
/** Default socket send and receive buffer size. */
public static final int DFLT_SOCK_BUF_SIZE = 32 * 1024;
/** Jetty XML configuration path. */
private String jettyPath;
/** REST secret key. */
private String secretKey;
/** TCP host. */
private String host;
/** TCP port. */
private int port = DFLT_TCP_PORT;
/** TCP no delay flag. */
private boolean noDelay = DFLT_TCP_NODELAY;
/** REST TCP direct buffer flag. */
private boolean directBuf = DFLT_TCP_DIRECT_BUF;
/** REST TCP send buffer size. */
private int sndBufSize = DFLT_SOCK_BUF_SIZE;
/** REST TCP receive buffer size. */
private int rcvBufSize = DFLT_SOCK_BUF_SIZE;
/** REST TCP send queue limit. */
private int sndQueueLimit;
/** REST TCP selector count. */
private int selectorCnt = Math.min(4, Runtime.getRuntime().availableProcessors());
/** Idle timeout. */
private long idleTimeout = DFLT_IDLE_TIMEOUT;
/** SSL enable flag, default is disabled. */
private boolean sslEnabled;
/** SSL need client auth flag. */
private boolean sslClientAuth;
/** SSL context factory for rest binary server. */
private GridSslContextFactory sslCtxFactory;
/** SSL context factory for rest binary server. */
private Factory<SSLContext> sslFactory;
/** Port range */
private int portRange = DFLT_PORT_RANGE;
/** REST requests thread pool size. */
private int threadPoolSize = DFLT_REST_CORE_THREAD_CNT;
/** Client message interceptor. */
private ConnectorMessageInterceptor msgInterceptor;
/**
* Creates client connection configuration with all default values.
*/
public ConnectorConfiguration() {
// No-op.
}
/**
* Creates client connection configuration by copying all properties from
* given configuration.
*
* @param cfg Client configuration.
*/
public ConnectorConfiguration(ConnectorConfiguration cfg) {
assert cfg != null;
msgInterceptor = cfg.getMessageInterceptor();
threadPoolSize = cfg.getThreadPoolSize();
idleTimeout = cfg.getIdleTimeout();
jettyPath = cfg.getJettyPath();
portRange = cfg.getPortRange();
secretKey = cfg.getSecretKey();
directBuf = cfg.isDirectBuffer();
host = cfg.getHost();
noDelay = cfg.isNoDelay();
port = cfg.getPort();
rcvBufSize = cfg.getReceiveBufferSize();
selectorCnt = cfg.getSelectorCount();
sndBufSize = cfg.getSendBufferSize();
sndQueueLimit = cfg.getSendQueueLimit();
sslClientAuth = cfg.isSslClientAuth();
sslCtxFactory = cfg.getSslContextFactory();
sslEnabled = cfg.isSslEnabled();
}
/**
* Sets path, either absolute or relative to {@code IGNITE_HOME}, to {@code JETTY}
* XML configuration file. {@code JETTY} is used to support REST over HTTP protocol for
* accessing Ignite APIs remotely.
*
* @param jettyPath Path to {@code JETTY} XML configuration file.
*/
public void setJettyPath(String jettyPath) {
this.jettyPath = jettyPath;
}
/**
* Gets path, either absolute or relative to {@code IGNITE_HOME}, to {@code Jetty}
* XML configuration file. {@code Jetty} is used to support REST over HTTP protocol for
* accessing Ignite APIs remotely.
* <p>
* If not provided, Jetty instance with default configuration will be started picking
* {@link IgniteSystemProperties#IGNITE_JETTY_HOST} and {@link IgniteSystemProperties#IGNITE_JETTY_PORT}
* as host and port respectively.
*
* @return Path to {@code JETTY} XML configuration file.
* @see IgniteSystemProperties#IGNITE_JETTY_HOST
* @see IgniteSystemProperties#IGNITE_JETTY_PORT
*/
public String getJettyPath() {
return jettyPath;
}
/**
* Sets secret key to authenticate REST requests. If key is {@code null} or empty authentication is disabled.
*
* @param secretKey REST secret key.
*/
public void setSecretKey(@Nullable String secretKey) {
this.secretKey = secretKey;
}
/**
* Gets secret key to authenticate REST requests. If key is {@code null} or empty authentication is disabled.
*
* @return Secret key.
* @see IgniteSystemProperties#IGNITE_JETTY_HOST
* @see IgniteSystemProperties#IGNITE_JETTY_PORT
*/
@Nullable public String getSecretKey() {
return secretKey;
}
/**
* Gets host for TCP binary protocol server. This can be either an
* IP address or a domain name.
* <p>
* If not defined, system-wide local address will be used
* (see {@link IgniteConfiguration#getLocalHost()}.
* <p>
* You can also use {@code 0.0.0.0} value to bind to all
* locally-available IP addresses.
*
* @return TCP host.
*/
public String getHost() {
return host;
}
/**
* Sets host for TCP binary protocol server.
*
* @param host TCP host.
*/
public void setHost(String host) {
this.host = host;
}
/**
* Gets port for TCP binary protocol server.
* <p>
* Default is {@link #DFLT_TCP_PORT}.
*
* @return TCP port.
*/
public int getPort() {
return port;
}
/**
* Sets port for TCP binary protocol server.
*
* @param port TCP port.
*/
public void setPort(int port) {
this.port = port;
}
/**
* Gets flag indicating whether {@code TCP_NODELAY} option should be set for accepted client connections.
* Setting this option reduces network latency and should be set to {@code true} in majority of cases.
* For more information, see {@link Socket#setTcpNoDelay(boolean)}
* <p/>
* If not specified, default value is {@link #DFLT_TCP_NODELAY}.
*
* @return Whether {@code TCP_NODELAY} option should be enabled.
*/
public boolean isNoDelay() {
return noDelay;
}
/**
* Sets whether {@code TCP_NODELAY} option should be set for all accepted client connections.
*
* @param noDelay {@code True} if option should be enabled.
* @see #isNoDelay()
*/
public void setNoDelay(boolean noDelay) {
this.noDelay = noDelay;
}
/**
* Gets flag indicating whether REST TCP server should use direct buffers. A direct buffer is a buffer
* that is allocated and accessed using native system calls, without using JVM heap. Enabling direct
* buffer <em>may</em> improve performance and avoid memory issues (long GC pauses due to huge buffer
* size).
*
* @return Whether direct buffer should be used.
*/
public boolean isDirectBuffer() {
return directBuf;
}
/**
* Sets whether to use direct buffer for REST TCP server.
*
* @param directBuf {@code True} if option should be enabled.
* @see #isDirectBuffer()
*/
public void setDirectBuffer(boolean directBuf) {
this.directBuf = directBuf;
}
/**
* Gets REST TCP server send buffer size.
*
* @return REST TCP server send buffer size (0 for default).
*/
public int getSendBufferSize() {
return sndBufSize;
}
/**
* Sets REST TCP server send buffer size.
*
* @param sndBufSize Send buffer size.
* @see #getSendBufferSize()
*/
public void setSendBufferSize(int sndBufSize) {
this.sndBufSize = sndBufSize;
}
/**
* Gets REST TCP server receive buffer size.
*
* @return REST TCP server receive buffer size (0 for default).
*/
public int getReceiveBufferSize() {
return rcvBufSize;
}
/**
* Sets REST TCP server receive buffer size.
*
* @param rcvBufSize Receive buffer size.
* @see #getReceiveBufferSize()
*/
public void setReceiveBufferSize(int rcvBufSize) {
this.rcvBufSize = rcvBufSize;
}
/**
* Gets REST TCP server send queue limit. If the limit exceeds, all successive writes will
* block until the queue has enough capacity.
*
* @return REST TCP server send queue limit (0 for unlimited).
*/
public int getSendQueueLimit() {
return sndQueueLimit;
}
/**
* Sets REST TCP server send queue limit.
*
* @param sndQueueLimit REST TCP server send queue limit (0 for unlimited).
* @see #getSendQueueLimit()
*/
public void setSendQueueLimit(int sndQueueLimit) {
this.sndQueueLimit = sndQueueLimit;
}
/**
* Gets number of selector threads in REST TCP server. Higher value for this parameter
* may increase throughput, but also increases context switching.
*
* @return Number of selector threads for REST TCP server.
*/
public int getSelectorCount() {
return selectorCnt;
}
/**
* Sets number of selector threads for REST TCP server.
*
* @param selectorCnt Number of selector threads for REST TCP server.
* @see #getSelectorCount()
*/
public void setSelectorCount(int selectorCnt) {
this.selectorCnt = selectorCnt;
}
/**
* Gets idle timeout for REST server.
* <p>
* This setting is used to reject half-opened sockets. If no packets
* come within idle timeout, the connection is closed.
*
* @return Idle timeout in milliseconds.
*/
public long getIdleTimeout() {
return idleTimeout;
}
/**
* Sets idle timeout for REST server.
*
* @param idleTimeout Idle timeout in milliseconds.
* @see #getIdleTimeout()
*/
public void setIdleTimeout(long idleTimeout) {
this.idleTimeout = idleTimeout;
}
/**
* Whether secure socket layer should be enabled on binary rest server.
* <p>
* Note that if this flag is set to {@code true}, an instance of {@link GridSslContextFactory}
* should be provided, otherwise binary rest protocol will fail to start.
*
* @return {@code True} if SSL should be enabled.
*/
public boolean isSslEnabled() {
return sslEnabled;
}
/**
* Sets whether Secure Socket Layer should be enabled for REST TCP binary protocol.
* <p/>
* Note that if this flag is set to {@code true}, then a valid instance of {@link GridSslContextFactory}
* should be provided in {@link IgniteConfiguration}. Otherwise, TCP binary protocol will fail to start.
*
* @param sslEnabled {@code True} if SSL should be enabled.
*/
public void setSslEnabled(boolean sslEnabled) {
this.sslEnabled = sslEnabled;
}
/**
* Gets a flag indicating whether or not remote clients will be required to have a valid SSL certificate which
* validity will be verified with trust manager.
*
* @return Whether or not client authentication is required.
*/
public boolean isSslClientAuth() {
return sslClientAuth;
}
/**
* Sets flag indicating whether or not SSL client authentication is required.
*
* @param sslClientAuth Whether or not client authentication is required.
*/
public void setSslClientAuth(boolean sslClientAuth) {
this.sslClientAuth = sslClientAuth;
}
/**
* Gets context factory that will be used for creating a secure socket layer of rest binary server.
*
* @return SslContextFactory instance.
* @see GridSslContextFactory
* @deprecated Use {@link #getSslFactory()} instead.
*/
@Deprecated
public GridSslContextFactory getSslContextFactory() {
return sslCtxFactory;
}
/**
* Sets instance of {@link GridSslContextFactory} that will be used to create an instance of {@code SSLContext}
* for Secure Socket Layer on TCP binary protocol. This factory will only be used if
* {@link #setSslEnabled(boolean)} is set to {@code true}.
*
* @param sslCtxFactory Instance of {@link GridSslContextFactory}
* @deprecated Use {@link #setSslFactory(Factory)} instead.
*/
@Deprecated
public void setSslContextFactory(GridSslContextFactory sslCtxFactory) {
this.sslCtxFactory = sslCtxFactory;
}
/**
* Gets context factory that will be used for creating a secure socket layer of rest binary server.
*
* @return SSL context factory instance.
* @see SslContextFactory
*/
public Factory<SSLContext> getSslFactory() {
return sslFactory;
}
/**
* Sets instance of {@link Factory} that will be used to create an instance of {@code SSLContext}
* for Secure Socket Layer on TCP binary protocol. This factory will only be used if
* {@link #setSslEnabled(boolean)} is set to {@code true}.
*
* @param sslFactory Instance of {@link Factory}
*/
public void setSslFactory(Factory<SSLContext> sslFactory) {
this.sslFactory = sslFactory;
}
/**
* Gets number of ports to try if configured port is already in use.
*
* @return Number of ports to try.
*/
public int getPortRange() {
return portRange;
}
/**
* Sets number of ports to try if configured one is in use.
*
* @param portRange Port range.
*/
public void setPortRange(int portRange) {
this.portRange = portRange;
}
/**
* Should return a thread pool size to be used for
* processing of client messages (REST requests).
*
* @return Thread pool size to be used for processing of client
* messages.
*/
public int getThreadPoolSize() {
return threadPoolSize;
}
/**
* Sets thread pool size to use for processing of client messages (REST requests).
*
* @param threadPoolSize Thread pool size to use for processing of client messages.
* @see #getThreadPoolSize()
*/
public void setThreadPoolSize(int threadPoolSize) {
this.threadPoolSize = threadPoolSize;
}
/**
* Gets interceptor for objects, moving to and from remote clients.
* If this method returns {@code null} then no interception will be applied.
* <p>
* Setting interceptor allows to transform all objects exchanged via REST protocol.
* For example if you use custom serialisation on client you can write interceptor
* to transform binary representations received from client to Java objects and later
* access them from java code directly.
* <p>
* Default value is {@code null}.
*
* @see ConnectorMessageInterceptor
* @return Interceptor.
*/
@Nullable public ConnectorMessageInterceptor getMessageInterceptor() {
return msgInterceptor;
}
/**
* Sets client message interceptor.
* <p>
* Setting interceptor allows to transform all objects exchanged via REST protocol.
* For example if you use custom serialisation on client you can write interceptor
* to transform binary representations received from client to Java objects and later
* access them from java code directly.
*
* @param interceptor Interceptor.
*/
public void setMessageInterceptor(ConnectorMessageInterceptor interceptor) {
msgInterceptor = interceptor;
}
} | apache-2.0 |
christoph-frick/groovy-core | subprojects/groovy-xml/src/main/java/groovy/util/XmlNodePrinter.java | 14050 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.util;
import groovy.xml.QName;
import org.codehaus.groovy.runtime.InvokerHelper;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Prints a <code>groovy.util.Node</code> (as used with <code>XmlParser</code>) including all children in XML format.
* Typical usage:
* <pre>
* def xml = '<html><head><title>Title</title></head><body><h1>Header</h1></body></html>'
* def root = new XmlParser().parseText(xml)
* new XmlNodePrinter(preserveWhitespace:true).print(root.body[0])
* </pre>
* which when run produces this on stdout (or use your own <code>PrintWriter</code> to direct elsewhere):
* <pre>
* <body>
* <h1>Header</h1>
* </body>
* </pre>
*
* @author Christian Stein
* @see groovy.util.NodePrinter
* @see groovy.xml.XmlUtil#serialize(Node)
*/
public class XmlNodePrinter {
protected final IndentPrinter out;
private String quote;
private boolean namespaceAware = true;
private boolean preserveWhitespace = false;
private boolean expandEmptyElements = false;
public XmlNodePrinter(PrintWriter out) {
this(out, " ");
}
public XmlNodePrinter(PrintWriter out, String indent) {
this(out, indent, "\"");
}
public XmlNodePrinter(PrintWriter out, String indent, String quote) {
this(new IndentPrinter(out, indent), quote);
}
public XmlNodePrinter(IndentPrinter out) {
this(out, "\"");
}
public XmlNodePrinter(IndentPrinter out, String quote) {
if (out == null) {
throw new IllegalArgumentException("Argument 'IndentPrinter out' must not be null!");
}
this.out = out;
this.quote = quote;
}
public XmlNodePrinter() {
this(new PrintWriter(new OutputStreamWriter(System.out)));
}
public void print(Node node) {
print(node, new NamespaceContext());
}
/**
* Check if namespace handling is enabled.
* Defaults to <code>true</code>.
*
* @return true if namespace handling is enabled
*/
public boolean isNamespaceAware() {
return namespaceAware;
}
/**
* Enable and/or disable namespace handling.
*
* @param namespaceAware the new desired value
*/
public void setNamespaceAware(boolean namespaceAware) {
this.namespaceAware = namespaceAware;
}
/**
* Check if whitespace preservation is enabled.
* Defaults to <code>false</code>.
*
* @return true if whitespaces are honoured when printing simple text nodes
*/
public boolean isPreserveWhitespace() {
return preserveWhitespace;
}
/**
* Enable and/or disable preservation of whitespace.
*
* @param preserveWhitespace the new desired value
*/
public void setPreserveWhitespace(boolean preserveWhitespace) {
this.preserveWhitespace = preserveWhitespace;
}
/**
* Get Quote to use when printing attributes.
*
* @return the quote character
*/
public String getQuote() {
return quote;
}
/**
* Set Quote to use when printing attributes.
*
* @param quote the quote character
*/
public void setQuote(String quote) {
this.quote = quote;
}
/**
* Whether empty elements are expanded from <tagName/> to <tagName></tagName>.
*
* @return <code>true</code>, if empty elements will be represented by an opening tag
* followed immediately by a closing tag.
*/
public boolean isExpandEmptyElements() {
return expandEmptyElements;
}
/**
* Whether empty elements are expanded from <tagName/> to <tagName></tagName>.
*
* @param expandEmptyElements if <code>true</code>, empty
* elements will be represented by an opening tag
* followed immediately by a closing tag.
* Defaults to <code>false</code>.
*/
public void setExpandEmptyElements(boolean expandEmptyElements) {
this.expandEmptyElements = expandEmptyElements;
}
protected void print(Node node, NamespaceContext ctx) {
/*
* Handle empty elements like '<br/>', '<img/> or '<hr noshade="noshade"/>.
*/
if (isEmptyElement(node)) {
printLineBegin();
out.print("<");
out.print(getName(node));
if (ctx != null) {
printNamespace(node, ctx);
}
printNameAttributes(node.attributes(), ctx);
if (expandEmptyElements) {
out.print("></");
out.print(getName(node));
out.print(">");
} else {
out.print("/>");
}
printLineEnd();
out.flush();
return;
}
/*
* Hook for extra processing, e.g. GSP tag element!
*/
if (printSpecialNode(node)) {
out.flush();
return;
}
/*
* Handle normal element like <html> ... </html>.
*/
Object value = node.value();
if (value instanceof List) {
printName(node, ctx, true, isListOfSimple((List) value));
printList((List) value, ctx);
printName(node, ctx, false, isListOfSimple((List) value));
out.flush();
return;
}
// treat as simple type - probably a String
printName(node, ctx, true, preserveWhitespace);
printSimpleItemWithIndent(value);
printName(node, ctx, false, preserveWhitespace);
out.flush();
}
private boolean isListOfSimple(List value) {
for (Object p : value) {
if (p instanceof Node) return false;
}
return preserveWhitespace;
}
protected void printLineBegin() {
out.printIndent();
}
protected void printLineEnd() {
printLineEnd(null);
}
protected void printLineEnd(String comment) {
if (comment != null) {
out.print(" <!-- ");
out.print(comment);
out.print(" -->");
}
out.println();
out.flush();
}
protected void printList(List list, NamespaceContext ctx) {
out.incrementIndent();
for (Object value : list) {
NamespaceContext context = new NamespaceContext(ctx);
/*
* If the current value is a node, recurse into that node.
*/
if (value instanceof Node) {
print((Node) value, context);
continue;
}
printSimpleItem(value);
}
out.decrementIndent();
}
protected void printSimpleItem(Object value) {
if (!preserveWhitespace) printLineBegin();
printEscaped(InvokerHelper.toString(value), false);
if (!preserveWhitespace) printLineEnd();
}
protected void printName(Node node, NamespaceContext ctx, boolean begin, boolean preserve) {
if (node == null) {
throw new NullPointerException("Node must not be null.");
}
Object name = node.name();
if (name == null) {
throw new NullPointerException("Name must not be null.");
}
if (!preserve || begin) printLineBegin();
out.print("<");
if (!begin) {
out.print("/");
}
out.print(getName(node));
if (ctx != null) {
printNamespace(node, ctx);
}
if (begin) {
printNameAttributes(node.attributes(), ctx);
}
out.print(">");
if (!preserve || !begin) printLineEnd();
}
protected boolean printSpecialNode(Node node) {
return false;
}
protected void printNamespace(Object object, NamespaceContext ctx) {
if (namespaceAware) {
if (object instanceof Node) {
printNamespace(((Node) object).name(), ctx);
} else if (object instanceof QName) {
QName qname = (QName) object;
String namespaceUri = qname.getNamespaceURI();
if (namespaceUri != null) {
String prefix = qname.getPrefix();
if (!ctx.isPrefixRegistered(prefix, namespaceUri)) {
ctx.registerNamespacePrefix(prefix, namespaceUri);
out.print(" ");
out.print("xmlns");
if (prefix.length() > 0) {
out.print(":");
out.print(prefix);
}
out.print("=" + quote);
out.print(namespaceUri);
out.print(quote);
}
}
}
}
}
protected void printNameAttributes(Map attributes, NamespaceContext ctx) {
if (attributes == null || attributes.isEmpty()) {
return;
}
for (Object p : attributes.entrySet()) {
Map.Entry entry = (Map.Entry) p;
out.print(" ");
out.print(getName(entry.getKey()));
out.print("=");
Object value = entry.getValue();
out.print(quote);
if (value instanceof String) {
printEscaped((String) value, true);
} else {
printEscaped(InvokerHelper.toString(value), true);
}
out.print(quote);
printNamespace(entry.getKey(), ctx);
}
}
private boolean isEmptyElement(Node node) {
if (node == null) {
throw new IllegalArgumentException("Node must not be null!");
}
if (!node.children().isEmpty()) {
return false;
}
return node.text().length() == 0;
}
private String getName(Object object) {
if (object instanceof String) {
return (String) object;
} else if (object instanceof QName) {
QName qname = (QName) object;
if (!namespaceAware) {
return qname.getLocalPart();
}
return qname.getQualifiedName();
} else if (object instanceof Node) {
Object name = ((Node) object).name();
return getName(name);
}
return object.toString();
}
private void printSimpleItemWithIndent(Object value) {
if (!preserveWhitespace) out.incrementIndent();
printSimpleItem(value);
if (!preserveWhitespace) out.decrementIndent();
}
// For ' and " we only escape if needed. As far as XML is concerned,
// we could always escape if we wanted to.
private void printEscaped(String s, boolean isAttributeValue) {
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
switch (c) {
case '<':
out.print("<");
break;
case '>':
out.print(">");
break;
case '&':
out.print("&");
break;
case '\'':
if (isAttributeValue && quote.equals("'"))
out.print("'");
else
out.print(c);
break;
case '"':
if (isAttributeValue && quote.equals("\""))
out.print(""");
else
out.print(c);
break;
case '\n':
if (isAttributeValue)
out.print(" ");
else
out.print(c);
break;
case '\r':
if (isAttributeValue)
out.print(" ");
else
out.print(c);
break;
default:
out.print(c);
}
}
}
protected class NamespaceContext {
private final Map<String, String> namespaceMap;
public NamespaceContext() {
namespaceMap = new HashMap<String, String>();
}
public NamespaceContext(NamespaceContext context) {
this();
namespaceMap.putAll(context.namespaceMap);
}
public boolean isPrefixRegistered(String prefix, String uri) {
return namespaceMap.containsKey(prefix) && namespaceMap.get(prefix).equals(uri);
}
public void registerNamespacePrefix(String prefix, String uri) {
if (!isPrefixRegistered(prefix, uri)) {
namespaceMap.put(prefix, uri);
}
}
public String getNamespace(String prefix) {
Object uri = namespaceMap.get(prefix);
return (uri == null) ? null : uri.toString();
}
}
}
| apache-2.0 |
fengshao0907/incubator-geode | gemfire-core/src/main/java/com/gemstone/gemfire/cache/lucene/internal/LuceneServiceImpl.java | 2557 | package com.gemstone.gemfire.cache.lucene.internal;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.Analyzer;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.lucene.LuceneIndex;
import com.gemstone.gemfire.cache.lucene.LuceneQueryFactory;
import com.gemstone.gemfire.cache.lucene.LuceneService;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
/**
* Implementation of LuceneService to create lucene index and query.
*
* @author Xiaojian Zhou
*
* @since 8.5
*/
public class LuceneServiceImpl implements LuceneService {
private final Cache cache;
private static LuceneServiceImpl instance;
private final HashMap<String, LuceneIndex> indexMap;
private LuceneServiceImpl(final Cache cache) {
if (cache == null) {
throw new IllegalStateException(LocalizedStrings.CqService_CACHE_IS_NULL.toLocalizedString());
}
GemFireCacheImpl gfc = (GemFireCacheImpl) cache;
gfc.getCancelCriterion().checkCancelInProgress(null);
this.cache = gfc;
// Initialize the Map which maintains indexes
this.indexMap = new HashMap<String, LuceneIndex>();
}
public static synchronized LuceneServiceImpl getInstance(final Cache cache) {
if (instance == null) {
instance = new LuceneServiceImpl(cache);
}
return instance;
}
public String getUniqueIndexName(String indexName, String regionName) {
String name = indexName+"#"+regionName.replace('/', '_');
return name;
}
@Override
public LuceneIndex createIndex(String indexName, String regionName, String... fields) {
// TODO Auto-generated method stub
return null;
}
@Override
public LuceneIndex getIndex(String indexName, String regionName) {
return indexMap.get(getUniqueIndexName(indexName, regionName));
}
@Override
public Collection<LuceneIndex> getAllIndexes() {
return indexMap.values();
}
@Override
public LuceneIndex createIndex(String indexName, String regionName,
Map<String, Analyzer> analyzerPerField) {
// TODO Auto-generated method stub
return null;
}
@Override
public void destroyIndex(LuceneIndex index) {
LuceneIndexImpl indexImpl = (LuceneIndexImpl)index;
indexMap.remove(getUniqueIndexName(index.getName(), index.getRegionName()));
indexImpl.close();
}
@Override
public LuceneQueryFactory createLuceneQueryFactory() {
return new LuceneQueryFactoryImpl();
}
}
| apache-2.0 |
scheib/chromium | weblayer/browser/android/javatests/src/org/chromium/weblayer/test/ErrorPageCallbackTest.java | 6146 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.weblayer.test;
import static org.chromium.content_public.browser.test.util.TestThreadUtils.runOnUiThreadBlocking;
import android.net.Uri;
import androidx.test.filters.SmallTest;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import org.chromium.net.test.EmbeddedTestServer;
import org.chromium.net.test.ServerCertificate;
import org.chromium.net.test.util.TestWebServer;
import org.chromium.weblayer.ErrorPage;
import org.chromium.weblayer.ErrorPageCallback;
import org.chromium.weblayer.Navigation;
import org.chromium.weblayer.NavigationController;
import org.chromium.weblayer.Tab;
import org.chromium.weblayer.shell.InstrumentationActivity;
/**
* Tests that ErrorPageCallback works as expected for handling error page interactions.
*/
@RunWith(WebLayerJUnit4ClassRunner.class)
public class ErrorPageCallbackTest {
@Rule
public InstrumentationActivityTestRule mActivityTestRule =
new InstrumentationActivityTestRule();
private InstrumentationActivity mActivity;
// Only one EmbeddedTestServer may be used at a time.
private TestWebServer mGoodServer;
private EmbeddedTestServer mBadSslServer;
private String mGoodUrl;
private String mBadUrl;
private Callback mCallback;
private static class Callback extends ErrorPageCallback {
public boolean mSignaled;
public String mSafetyPage;
public ErrorPage mErrorPage;
public Tab mTab;
public Callback(Tab tab) {
mTab = tab;
}
@Override
public boolean onBackToSafety() {
mSignaled = true;
if (mSafetyPage == null) {
return false;
}
mTab.getNavigationController().navigate(Uri.parse(mSafetyPage));
return true;
}
@Override
public ErrorPage getErrorPage(Navigation navigation) {
return mErrorPage;
}
}
@Before
public void setUp() throws Throwable {
mActivity = mActivityTestRule.launchShellWithUrl(null);
Assert.assertNotNull(mActivity);
mGoodServer = TestWebServer.start();
mGoodUrl = mGoodServer.setResponse("/ok.html", "<html>ok</html>", null);
mBadSslServer = EmbeddedTestServer.createAndStartHTTPSServer(
mActivity, ServerCertificate.CERT_MISMATCHED_NAME);
mBadUrl = mBadSslServer.getURL("/weblayer/test/data/simple_page.html");
mCallback = new Callback(mActivity.getTab());
mActivityTestRule.navigateAndWait(mGoodUrl);
mActivityTestRule.navigateAndWaitForFailure(mBadUrl);
}
@After
public void tearDown() {
mBadSslServer.stopAndDestroyServer();
}
/**
* Verifies that if there's no ErrorPageCallback, when the user clicks "back to safety",
* WebLayer provides default behavior (navigating back).
*/
@Test
@SmallTest
public void testBackToSafetyDefaultBehavior() throws Throwable {
NavigationWaiter navigationWaiter = new NavigationWaiter(
mGoodUrl, mActivity.getTab(), false /* expectFailure */, true /* waitForPaint */);
mActivityTestRule.executeScriptSync(
"window.certificateErrorPageController.dontProceed();", false);
navigationWaiter.waitForNavigation();
Assert.assertFalse(mCallback.mSignaled);
}
/**
* Verifies that if there's an ErrorPageCallback and onBackToSafety returns true, WebLayer does
* *not* provide default behavior.
*/
@Test
@SmallTest
public void testBackToSafetyOverride() throws Throwable {
TestThreadUtils.runOnUiThreadBlocking(
() -> { mActivity.getTab().setErrorPageCallback(mCallback); });
mCallback.mSafetyPage = mGoodServer.setResponse("/safe.html", "<html>safe</html>", null);
NavigationWaiter navigationWaiter = new NavigationWaiter(mCallback.mSafetyPage,
mActivity.getTab(), false /* expectFailure */, true /* waitForPaint */);
mActivityTestRule.executeScriptSync(
"window.certificateErrorPageController.dontProceed();", false);
navigationWaiter.waitForNavigation();
Assert.assertTrue(mCallback.mSignaled);
}
/**
* Verifies that if there's an ErrorPageCallback and onBackToSafety returns false, WebLayer
* *does* provide default behavior.
*/
@Test
@SmallTest
public void testBackToSafetyDontOverride() throws Throwable {
TestThreadUtils.runOnUiThreadBlocking(
() -> { mActivity.getTab().setErrorPageCallback(mCallback); });
NavigationWaiter navigationWaiter = new NavigationWaiter(
mGoodUrl, mActivity.getTab(), false /* expectFailure */, true /* waitForPaint */);
mActivityTestRule.executeScriptSync(
"window.certificateErrorPageController.dontProceed();", false);
navigationWaiter.waitForNavigation();
Assert.assertTrue(mCallback.mSignaled);
}
@Test
@SmallTest
public void testOverrideErrorPage() throws Throwable {
mCallback.mErrorPage = new ErrorPage("<html><head><title>test error</title>");
TestThreadUtils.runOnUiThreadBlocking(
() -> { mActivity.getTab().setErrorPageCallback(mCallback); });
String errorPageUrl = "http://localhost:7/non_existent";
mActivityTestRule.navigateAndWaitForFailure(errorPageUrl);
runOnUiThreadBlocking(() -> {
NavigationController navigationController =
mActivity.getTab().getNavigationController();
Assert.assertEquals("test error",
navigationController.getNavigationEntryTitle(
navigationController.getNavigationListCurrentIndex()));
});
}
}
| bsd-3-clause |
ric2b/Vivaldi-browser | chromium/android_webview/javatests/src/org/chromium/android_webview/test/WebViewModalDialogOverrideTest.java | 9869 | // Copyright 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* Test suite for displaying and functioning of modal dialogs.
*/
package org.chromium.android_webview.test;
import androidx.test.filters.MediumTest;
import androidx.test.filters.SmallTest;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.android_webview.AwContents;
import org.chromium.android_webview.JsPromptResultReceiver;
import org.chromium.android_webview.JsResultReceiver;
import org.chromium.android_webview.test.util.AwTestTouchUtils;
import org.chromium.base.test.util.CallbackHelper;
import org.chromium.base.test.util.Feature;
import org.chromium.content_public.browser.GestureListenerManager;
import org.chromium.content_public.browser.GestureStateListener;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Tests for callbacks implementing JS alerts and prompts.
*/
@RunWith(AwJUnit4ClassRunner.class)
public class WebViewModalDialogOverrideTest {
@Rule
public AwActivityTestRule mActivityTestRule = new AwActivityTestRule();
private static final String EMPTY_PAGE =
"<!doctype html>"
+ "<title>Modal Dialog Test</title><p>Testcase.</p>";
private static final String BEFORE_UNLOAD_URL =
"<!doctype html>"
+ "<head><script>window.onbeforeunload=function() {"
+ "return 'Are you sure?';"
+ "};</script></head></body>";
/*
* Verify that when the AwContentsClient calls handleJsAlert.
*/
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testOverrideAlertHandling() throws Throwable {
final String alertText = "Hello World!";
final AtomicBoolean callbackCalled = new AtomicBoolean(false);
// Returning true from the callback should not show a dialog.
TestAwContentsClient client = new TestAwContentsClient() {
@Override
public void handleJsAlert(String url, String message, JsResultReceiver res) {
callbackCalled.set(true);
res.confirm();
Assert.assertEquals(alertText, message);
}
};
AwTestContainerView view = mActivityTestRule.createAwTestContainerViewOnMainSync(client);
final AwContents awContents = view.getAwContents();
AwActivityTestRule.enableJavaScriptOnUiThread(awContents);
mActivityTestRule.loadDataSync(
awContents, client.getOnPageFinishedHelper(), EMPTY_PAGE, "text/html", false);
mActivityTestRule.executeJavaScriptAndWaitForResult(
awContents, client, "alert('" + alertText + "')");
Assert.assertTrue(callbackCalled.get());
}
/*
* Verify that when the AwContentsClient calls handleJsPrompt.
*/
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testOverridePromptHandling() throws Throwable {
final String promtText = "How do you like your eggs in the morning?";
final String promptDefault = "Scrambled";
final String promptResult = "I like mine with a kiss";
final AtomicBoolean called = new AtomicBoolean(false);
// Returning true from the callback should not show a dialog.
final TestAwContentsClient client = new TestAwContentsClient() {
@Override
public void handleJsPrompt(String url, String message, String defaultValue,
JsPromptResultReceiver res) {
Assert.assertEquals(promtText, message);
Assert.assertEquals(promptDefault, defaultValue);
res.confirm(promptResult);
called.set(true);
}
};
AwTestContainerView view = mActivityTestRule.createAwTestContainerViewOnMainSync(client);
final AwContents awContents = view.getAwContents();
AwActivityTestRule.enableJavaScriptOnUiThread(awContents);
mActivityTestRule.loadDataSync(
awContents, client.getOnPageFinishedHelper(), EMPTY_PAGE, "text/html", false);
String result = mActivityTestRule.executeJavaScriptAndWaitForResult(
awContents, client, "prompt('" + promtText + "','" + promptDefault + "')");
Assert.assertTrue(called.get());
Assert.assertEquals("\"" + promptResult + "\"", result);
}
/*
* Verify that when the AwContentsClient calls handleJsConfirm and the client confirms.
*/
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testOverrideConfirmHandlingConfirmed() throws Throwable {
final String confirmText = "Would you like a cookie?";
final AtomicBoolean called = new AtomicBoolean(false);
// Returning true from the callback should not show a dialog.
TestAwContentsClient client = new TestAwContentsClient() {
@Override
public void handleJsConfirm(String url, String message, JsResultReceiver res) {
Assert.assertEquals(confirmText, message);
res.confirm();
called.set(true);
}
};
AwTestContainerView view = mActivityTestRule.createAwTestContainerViewOnMainSync(client);
final AwContents awContents = view.getAwContents();
AwActivityTestRule.enableJavaScriptOnUiThread(awContents);
mActivityTestRule.loadDataSync(
awContents, client.getOnPageFinishedHelper(), EMPTY_PAGE, "text/html", false);
String result = mActivityTestRule.executeJavaScriptAndWaitForResult(
awContents, client, "confirm('" + confirmText + "')");
Assert.assertTrue(called.get());
Assert.assertEquals("true", result);
}
/*
* Verify that when the AwContentsClient calls handleJsConfirm and the client cancels.
*/
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testOverrideConfirmHandlingCancelled() throws Throwable {
final String confirmText = "Would you like a cookie?";
final AtomicBoolean called = new AtomicBoolean(false);
// Returning true from the callback should not show a dialog.
TestAwContentsClient client = new TestAwContentsClient() {
@Override
public void handleJsConfirm(String url, String message, JsResultReceiver res) {
Assert.assertEquals(confirmText, message);
res.cancel();
called.set(true);
}
};
AwTestContainerView view = mActivityTestRule.createAwTestContainerViewOnMainSync(client);
final AwContents awContents = view.getAwContents();
AwActivityTestRule.enableJavaScriptOnUiThread(awContents);
mActivityTestRule.loadDataSync(
awContents, client.getOnPageFinishedHelper(), EMPTY_PAGE, "text/html", false);
String result = mActivityTestRule.executeJavaScriptAndWaitForResult(
awContents, client, "confirm('" + confirmText + "')");
Assert.assertTrue(called.get());
Assert.assertEquals("false", result);
}
private static class TapGestureStateListener implements GestureStateListener {
private CallbackHelper mCallbackHelper = new CallbackHelper();
public int getCallCount() {
return mCallbackHelper.getCallCount();
}
public void waitForTap(int currentCallCount) throws Throwable {
mCallbackHelper.waitForCallback(currentCallCount);
}
@Override
public void onSingleTap(boolean consumed) {
mCallbackHelper.notifyCalled();
}
}
/**
* Taps on a view and waits for a callback.
*/
private void tapViewAndWait(AwTestContainerView view) throws Throwable {
final TapGestureStateListener tapGestureStateListener = new TapGestureStateListener();
int callCount = tapGestureStateListener.getCallCount();
TestThreadUtils.runOnUiThreadBlocking(() -> {
GestureListenerManager.fromWebContents(view.getWebContents())
.addListener(tapGestureStateListener);
});
AwTestTouchUtils.simulateTouchCenterOfView(view);
tapGestureStateListener.waitForTap(callCount);
}
/*
* Verify that when the AwContentsClient calls handleJsBeforeUnload
*/
@Test
@MediumTest
@Feature({"AndroidWebView"})
public void testOverrideBeforeUnloadHandling() throws Throwable {
final CallbackHelper jsBeforeUnloadHelper = new CallbackHelper();
TestAwContentsClient client = new TestAwContentsClient() {
@Override
public void handleJsBeforeUnload(String url, String message, JsResultReceiver res) {
res.cancel();
jsBeforeUnloadHelper.notifyCalled();
}
};
AwTestContainerView view = mActivityTestRule.createAwTestContainerViewOnMainSync(client);
final AwContents awContents = view.getAwContents();
AwActivityTestRule.enableJavaScriptOnUiThread(awContents);
mActivityTestRule.loadDataSync(awContents, client.getOnPageFinishedHelper(),
BEFORE_UNLOAD_URL, "text/html", false);
AwActivityTestRule.enableJavaScriptOnUiThread(awContents);
// JavaScript onbeforeunload dialogs require a user gesture.
tapViewAndWait(view);
// Don't wait synchronously because we don't leave the page.
int currentCallCount = jsBeforeUnloadHelper.getCallCount();
mActivityTestRule.loadDataAsync(awContents, EMPTY_PAGE, "text/html", false);
jsBeforeUnloadHelper.waitForCallback(currentCallCount);
}
}
| bsd-3-clause |
garimakhulbe/autorest | src/generator/AutoRest.Java.Azure.Fluent.Tests/src/main/java/fixtures/azureparametergrouping/implementation/AutoRestParameterGroupingTestServiceImpl.java | 5765 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
package fixtures.azureparametergrouping.implementation;
import com.microsoft.azure.AzureClient;
import com.microsoft.azure.AzureServiceClient;
import com.microsoft.azure.RestClient;
import com.microsoft.rest.credentials.ServiceClientCredentials;
/**
* Initializes a new instance of the AutoRestParameterGroupingTestServiceImpl class.
*/
public final class AutoRestParameterGroupingTestServiceImpl extends AzureServiceClient {
/** the {@link AzureClient} used for long running operations. */
private AzureClient azureClient;
/**
* Gets the {@link AzureClient} used for long running operations.
* @return the azure client;
*/
public AzureClient getAzureClient() {
return this.azureClient;
}
/** Gets or sets the preferred language for the response. */
private String acceptLanguage;
/**
* Gets Gets or sets the preferred language for the response.
*
* @return the acceptLanguage value.
*/
public String acceptLanguage() {
return this.acceptLanguage;
}
/**
* Sets Gets or sets the preferred language for the response.
*
* @param acceptLanguage the acceptLanguage value.
* @return the service client itself
*/
public AutoRestParameterGroupingTestServiceImpl withAcceptLanguage(String acceptLanguage) {
this.acceptLanguage = acceptLanguage;
return this;
}
/** Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. */
private int longRunningOperationRetryTimeout;
/**
* Gets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @return the longRunningOperationRetryTimeout value.
*/
public int longRunningOperationRetryTimeout() {
return this.longRunningOperationRetryTimeout;
}
/**
* Sets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @param longRunningOperationRetryTimeout the longRunningOperationRetryTimeout value.
* @return the service client itself
*/
public AutoRestParameterGroupingTestServiceImpl withLongRunningOperationRetryTimeout(int longRunningOperationRetryTimeout) {
this.longRunningOperationRetryTimeout = longRunningOperationRetryTimeout;
return this;
}
/** When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. */
private boolean generateClientRequestId;
/**
* Gets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @return the generateClientRequestId value.
*/
public boolean generateClientRequestId() {
return this.generateClientRequestId;
}
/**
* Sets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @param generateClientRequestId the generateClientRequestId value.
* @return the service client itself
*/
public AutoRestParameterGroupingTestServiceImpl withGenerateClientRequestId(boolean generateClientRequestId) {
this.generateClientRequestId = generateClientRequestId;
return this;
}
/**
* The ParameterGroupingsInner object to access its operations.
*/
private ParameterGroupingsInner parameterGroupings;
/**
* Gets the ParameterGroupingsInner object to access its operations.
* @return the ParameterGroupingsInner object.
*/
public ParameterGroupingsInner parameterGroupings() {
return this.parameterGroupings;
}
/**
* Initializes an instance of AutoRestParameterGroupingTestService client.
*
* @param credentials the management credentials for Azure
*/
public AutoRestParameterGroupingTestServiceImpl(ServiceClientCredentials credentials) {
this("https://localhost", credentials);
}
/**
* Initializes an instance of AutoRestParameterGroupingTestService client.
*
* @param baseUrl the base URL of the host
* @param credentials the management credentials for Azure
*/
public AutoRestParameterGroupingTestServiceImpl(String baseUrl, ServiceClientCredentials credentials) {
this(new RestClient.Builder()
.withBaseUrl(baseUrl)
.withCredentials(credentials)
.build());
}
/**
* Initializes an instance of AutoRestParameterGroupingTestService client.
*
* @param restClient the REST client to connect to Azure.
*/
public AutoRestParameterGroupingTestServiceImpl(RestClient restClient) {
super(restClient);
initialize();
}
protected void initialize() {
this.acceptLanguage = "en-US";
this.longRunningOperationRetryTimeout = 30;
this.generateClientRequestId = true;
this.parameterGroupings = new ParameterGroupingsInner(restClient().retrofit(), this);
this.azureClient = new AzureClient(this);
}
/**
* Gets the User-Agent header for the client.
*
* @return the user agent string.
*/
@Override
public String userAgent() {
return String.format("Azure-SDK-For-Java/%s (%s)",
getClass().getPackage().getImplementationVersion(),
"AutoRestParameterGroupingTestService, 1.0.0");
}
}
| mit |
shitikanth/jabref | src/test/java/org/jabref/logic/importer/ImportDataTest.java | 1902 | package org.jabref.logic.importer;
import java.io.File;
import java.nio.file.Paths;
import org.junit.Assert;
import org.junit.Test;
/**
* @author Nosh&Dan
* @version 09.11.2008 | 19:41:40
*/
public class ImportDataTest {
public static final File FILE_IN_DATABASE = Paths
.get("src/test/resources/org/jabref/logic/importer/unlinkedFilesTestFolder/pdfInDatabase.pdf").toFile();
public static final File FILE_NOT_IN_DATABASE = Paths
.get("src/test/resources/org/jabref/logic/importer/unlinkedFilesTestFolder/pdfNotInDatabase.pdf")
.toFile();
public static final File EXISTING_FOLDER = Paths
.get("src/test/resources/org/jabref/logic/importer/unlinkedFilesTestFolder").toFile();
public static final File NOT_EXISTING_FOLDER = Paths.get("notexistingfolder").toFile();
public static final File NOT_EXISTING_PDF = Paths
.get("src/test/resources/org/jabref/logic/importer/unlinkedFilesTestFolder/null.pdf").toFile();
public static final File UNLINKED_FILES_TEST_BIB = Paths
.get("src/test/resources/org/jabref/util/unlinkedFilesTestBib.bib").toFile();
/**
* Tests the testing environment.
*/
@Test
public void testTestingEnvironment() {
Assert.assertTrue(ImportDataTest.EXISTING_FOLDER.exists());
Assert.assertTrue(ImportDataTest.EXISTING_FOLDER.isDirectory());
Assert.assertTrue(ImportDataTest.FILE_IN_DATABASE.exists());
Assert.assertTrue(ImportDataTest.FILE_IN_DATABASE.isFile());
Assert.assertTrue(ImportDataTest.FILE_NOT_IN_DATABASE.exists());
Assert.assertTrue(ImportDataTest.FILE_NOT_IN_DATABASE.isFile());
}
@Test
public void testOpenNotExistingDirectory() {
Assert.assertFalse(ImportDataTest.NOT_EXISTING_FOLDER.exists());
Assert.assertFalse(ImportDataTest.NOT_EXISTING_PDF.exists());
}
}
| mit |
alondene/android-chess | app/src/main/java/jwtc/android/timeseal/streams/b.java | 604 | package jwtc.android.timeseal.streams;
import java.io.IOException;
import java.io.OutputStream;
public class b extends OutputStream
{
private final c a;
public void write(int paramInt)
throws IOException
{
this.a.a(paramInt);
}
public void write(byte[] paramArrayOfByte, int paramInt1, int paramInt2)
throws IOException
{
this.a.a(paramArrayOfByte, paramInt1, paramInt2);
}
public void close()
throws IOException
{
//this.a.jdMethod_try();
this.a.jdField_try();
}
public b(c paramc)
{
this.a = paramc;
}
} | mit |
taimur97/tilt-game-android | app/src/main/java/com/mediamonks/googleflip/pages/game/physics/constants/Physics.java | 280 | package com.mediamonks.googleflip.pages.game.physics.constants;
/**
* Physics constants
*/
public class Physics {
public static final float BALL_DENSITY = 0.11f;
public static final float GRAVITY_FACTOR = 392.0f;
public static final float WALL_ELASTICITY = .38f;
}
| mit |
stachon/XChange | xchange-dragonex/src/main/java/org/knowm/xchange/dragonex/dto/DragonexException.java | 612 | package org.knowm.xchange.dragonex.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import si.mazi.rescu.HttpStatusExceptionSupport;
@SuppressWarnings("serial")
public class DragonexException extends HttpStatusExceptionSupport {
@JsonProperty("error")
private String error;
public DragonexException() {}
public DragonexException(String error) {
this.error = error;
setHttpStatusCode(200);
}
public String getError() {
return error;
}
public void setError(String error) {
this.error = error;
}
@Override
public String getMessage() {
return error;
}
}
| mit |
Snickermicker/smarthome | bundles/storage/org.eclipse.smarthome.storage.mapdb/src/main/java/org/eclipse/smarthome/storage/mapdb/internal/PropertiesTypeAdapterFactory.java | 1340 | /**
* Copyright (c) 2014,2019 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.smarthome.storage.mapdb.internal;
import java.lang.reflect.Type;
import com.google.gson.Gson;
import com.google.gson.TypeAdapter;
import com.google.gson.TypeAdapterFactory;
import com.google.gson.reflect.TypeToken;
/**
* TypeAdapterFactory responsible for returning a new instance of {@link PropertiesTypeAdapter} if the given type
* matches Map<String, Object>
* or null otherwise.
*
* @author Ivan Iliev
*
*/
public class PropertiesTypeAdapterFactory implements TypeAdapterFactory {
@SuppressWarnings({ "unused", "unchecked" })
@Override
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> typeToken) {
Type type = typeToken.getType();
Class<? super T> rawType = typeToken.getRawType();
if (!PropertiesTypeAdapter.TOKEN.equals(typeToken)) {
return null;
}
return (TypeAdapter<T>) new PropertiesTypeAdapter(gson);
}
}
| epl-1.0 |
pravinw/openhab | bundles/binding/org.openhab.binding.zwave/src/main/java/org/openhab/binding/zwave/internal/protocol/AssociationGroup.java | 1068 | /**
* Copyright (c) 2010-2014, openHAB.org and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.zwave.internal.protocol;
import java.util.ArrayList;
import java.util.List;
import com.thoughtworks.xstream.annotations.XStreamAlias;
/**
* This class provides a storage class for zwave association groups
* within the node class. This is then serialised to XML.
*
* @author Chris Jackson
* @since 1.4.0
*
*/
@XStreamAlias("associationGroup")
public class AssociationGroup {
int Index;
List<Integer> Nodes = new ArrayList<Integer>();
public AssociationGroup(int index) {
Index = index;
}
public int getIndex() {
return Index;
}
public void setIndex(int newIndex) {
Index = newIndex;
}
public void addNode(int Node) {
Nodes.add(Node);
}
public List<Integer> getNodes() {
return Nodes;
}
}
| epl-1.0 |
google/error-prone-javac | src/jdk.jshell/share/classes/jdk/jshell/MethodSnippet.java | 3239 | /*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.jshell;
import java.util.Collection;
import jdk.jshell.Key.MethodKey;
/**
* Snippet for a method definition.
* The Kind is {@link jdk.jshell.Snippet.Kind#METHOD}.
* <p>
* <code>MethodSnippet</code> is immutable: an access to
* any of its methods will always return the same result.
* and thus is thread-safe.
*
* @since 9
* @jls 8.4: MethodDeclaration.
*/
public class MethodSnippet extends DeclarationSnippet {
final String signature;
private String qualifiedParamaterTypes;
MethodSnippet(MethodKey key, String userSource, Wrap guts,
String name, String signature, Wrap corralled,
Collection<String> declareReferences, Collection<String> bodyReferences,
DiagList syntheticDiags) {
super(key, userSource, guts, name, SubKind.METHOD_SUBKIND, corralled,
declareReferences, bodyReferences, syntheticDiags);
this.signature = signature;
}
/**
* A String representation of the parameter types of the method.
* @return a comma separated list of user entered parameter types for the
* method.
*/
public String parameterTypes() {
return key().parameterTypes();
}
/**
* The full type signature of the method, including return type.
* @return A String representation of the parameter and return types
*/
public String signature() {
return signature;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("MethodSnippet:");
sb.append(name());
sb.append('/');
sb.append(signature());
sb.append('-');
sb.append(source());
return sb.toString();
}
/**** internal access ****/
@Override
MethodKey key() {
return (MethodKey) super.key();
}
String qualifiedParameterTypes() {
return qualifiedParamaterTypes;
}
void setQualifiedParamaterTypes(String sig) {
qualifiedParamaterTypes = sig;
}
}
| gpl-2.0 |
shengge/oschina-android-app | src/com/barcode/executor/AsyncTaskExecInterface.java | 764 | /*
* Copyright (C) 2012 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.barcode.executor;
import android.os.AsyncTask;
public interface AsyncTaskExecInterface {
<T> void execute(AsyncTask<T,?,?> task, T... args);
}
| gpl-2.0 |
jtux270/translate | ovirt/backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/config/IConfigUtilsInterface.java | 2221 | package org.ovirt.engine.core.common.config;
import java.util.Date;
import java.util.Map;
import org.ovirt.engine.core.compat.TimeSpan;
import org.ovirt.engine.core.compat.Version;
/**
* Config Utils Interface
*/
public interface IConfigUtilsInterface {
/**
* Gets the bool value.
*
* @param name
* The name.
* @param defaultValue
* if set to <c>true</c> [default value].
* @return
*/
boolean getBoolValue(String name, String defaultValue);
/**
* Get map value.
*
* @param name the name of of the config value
* @param defaultValue default value
* @return
*/
Map<String, String> getMapValue(String name, String defaultValue);
/**
* Gets the int value.
*
* @param name
* The name.
* @param defaultValue
* The default value.
* @return
*/
int getIntValue(String name, String defaultValue);
/**
* Gets the date time value.
*
* @param name
* The name.
* @param defaultValue
* The default value.
* @return
*/
Date getDateTimeValue(String name, String defaultValue);
/**
* Gets the time span value.
*
* @param name
* The name.
* @param defaultValue
* The default value.
* @return
*/
TimeSpan getTimeSpanValue(String name, String defaultValue);
/**
* Gets the version value.
*
* @param name
* The name.
* @param defaultValue
* The default value.
* @return
*/
Version getVersionValue(String name, String defaultValue);
/**
* Gets the path value.
*
* @param name
* The name.
* @param defaultValue
* The default value.
* @return
*/
String getPathValue(String name, String defaultValue);
/**
* Sets the string value.
*
* @param name
* The name.
* @param value
* The value.
*/
void setStringValue(String name, String value);
<T> T getValue(ConfigValues configValue, String version);
}
| gpl-3.0 |
tempbottle/h-store | tests/frontend/org/voltdb/network/VoltNetworkTest.java | 8487 | /* This file is part of VoltDB.
* Copyright (C) 2008-2010 VoltDB L.L.C.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb.network;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.spi.SelectorProvider;
import java.util.Set;
import java.util.HashSet;
import junit.framework.*;
public class VoltNetworkTest extends TestCase {
private static class MockVoltPort extends VoltPort {
MockVoltPort(VoltNetwork vn, InputHandler handler) {
super (vn, handler, handler.getExpectedOutgoingMessageSize(), "");
}
@Override
public VoltPort call() {
m_running = false;
return null;
}
}
private static class MockInputHandler implements InputHandler {
@Override
public int getMaxRead() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void handleMessage(ByteBuffer message, Connection c) {
// TODO Auto-generated method stub
}
@Override
public ByteBuffer retrieveNextMessage(Connection c) {
// TODO Auto-generated method stub
return null;
}
@Override
public void started(Connection c) {
// TODO Auto-generated method stub
}
@Override
public void starting(Connection c) {
// TODO Auto-generated method stub
}
@Override
public void stopped(Connection c) {
// TODO Auto-generated method stub
}
@Override
public void stopping(Connection c) {
// TODO Auto-generated method stub
}
@Override
public int getExpectedOutgoingMessageSize() {
return 2048;
}
@Override
public Runnable offBackPressure() {
return new Runnable() {
@Override
public void run() {}
};
}
@Override
public Runnable onBackPressure() {
return new Runnable() {
@Override
public void run() {}
};
}
@Override
public QueueMonitor writestreamMonitor() {
return null;
}
@Override
public long connectionId() {
return 0;
}
}
public static class MockSelectionKey extends SelectionKey {
@Override
public
void cancel() {
}
@Override
public SelectableChannel channel() {
return null;
}
@Override
public int interestOps() {
return m_interestOps;
}
@Override
public SelectionKey interestOps(int interestOps) {
m_interestOps = interestOps;
return this;
}
public SelectionKey readyOps(int readyOps) {
m_readyOps = readyOps;
return this;
}
@Override
public boolean isValid() {
return true;
}
@Override
public int readyOps() {
return m_readyOps;
}
@Override
public Selector selector() {
return null;
}
public int m_interestOps;
public int m_readyOps;
public Object m_fakeAttachment;
}
public static class MockSelector extends Selector {
public SelectionKey m_fakeKey = null;
MockSelector() {
}
void setFakeKey(SelectionKey fakeKey) {
m_fakeKey = fakeKey;
}
@Override
public void close() throws IOException {
// TODO Auto-generated method stub
}
@Override
public boolean isOpen() {
// TODO Auto-generated method stub
return false;
}
@Override
public Set<SelectionKey> keys() {
// TODO Auto-generated method stub
return null;
}
@Override
public SelectorProvider provider() {
// TODO Auto-generated method stub
return null;
}
@Override
public int select() throws IOException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int select(long timeout) throws IOException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int selectNow() throws IOException {
// TODO Auto-generated method stub
return 0;
}
@Override
public Set<SelectionKey> selectedKeys() {
Set<SelectionKey> aset = new HashSet<SelectionKey>();
aset.add(m_fakeKey);
return aset;
}
@Override
public Selector wakeup() {
// TODO Auto-generated method stub
return null;
}
}
public void testInstallInterests() throws InterruptedException {
new MockSelector();
VoltNetwork vn = new VoltNetwork();
MockVoltPort vp = new MockVoltPort(vn, new MockInputHandler());
MockSelectionKey selectionKey = new MockSelectionKey();
vp.m_selectionKey = selectionKey;
// add the port to the changelist set and run install interests.
// the ports desired ops should be set to the selection key.
vn.addToChangeList(vp);
vn.installInterests();
assertEquals(selectionKey.interestOps(), vp.interestOps());
// should be able to wash, rinse and repeat this a few times.
// interesting as voltnetwork recycles some lists underneath
// the covers.
vp.setInterests(SelectionKey.OP_WRITE, 0);
vn.addToChangeList(vp);
vn.installInterests();
assertEquals(selectionKey.interestOps(), SelectionKey.OP_WRITE);
vp.setInterests(SelectionKey.OP_WRITE | SelectionKey.OP_READ, 0);
vn.addToChangeList(vp);
vn.installInterests();
assertEquals(selectionKey.interestOps(), vp.interestOps());
}
public void testInvokeCallbacks() throws InterruptedException{
MockSelector selector = new MockSelector();
VoltNetwork vn = new VoltNetwork(selector); // network with fake selector
MockVoltPort vp = new MockVoltPort(vn, new MockInputHandler()); // implement abstract run()
MockSelectionKey selectionKey = new MockSelectionKey(); // fake selection key
// glue the key, the selector and the port together.
selectionKey.interestOps(SelectionKey.OP_WRITE);
selector.setFakeKey(selectionKey);
vp.m_selectionKey = selectionKey;
selectionKey.attach(vp);
selectionKey.readyOps(SelectionKey.OP_WRITE);
// invoke call backs and see that the volt port has the expected
// selected operations.
vn.invokeCallbacks();
assertEquals(SelectionKey.OP_WRITE, vp.readyOps());
// and another time through, should have the new interests selected
vp.setInterests(SelectionKey.OP_ACCEPT, 0);
selectionKey.readyOps(SelectionKey.OP_ACCEPT);
vn.installInterests();
vn.invokeCallbacks();
vn.shutdown();
assertEquals(SelectionKey.OP_ACCEPT, vp.readyOps());
}
}
| gpl-3.0 |
apavlo/h-store | src/frontend/org/voltdb/plannodes/AbstractPlanNode.java | 27149 | /* This file is part of VoltDB.
* Copyright (C) 2008-2010 VoltDB L.L.C.
*
* VoltDB is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* VoltDB is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
*/
package org.voltdb.plannodes;
import java.util.*;
import java.util.Map.Entry;
import org.json.JSONArray;
import org.json.JSONObject;
import org.json.JSONString;
import org.json.JSONStringer;
import org.json.JSONException;
import org.voltdb.catalog.Cluster;
import org.voltdb.catalog.Database;
import org.voltdb.compiler.DatabaseEstimates;
import org.voltdb.compiler.ScalarValueHints;
import org.voltdb.planner.PlanAssembler;
import org.voltdb.planner.PlanColumn;
import org.voltdb.planner.PlanStatistics;
import org.voltdb.planner.PlannerContext;
import org.voltdb.planner.StatsField;
import org.voltdb.types.*;
import edu.brown.plannodes.PlanNodeUtil;
import edu.brown.utils.ClassUtil;
public abstract class AbstractPlanNode implements JSONString, Cloneable, Comparable<AbstractPlanNode> {
public enum Members {
ID,
PLAN_NODE_TYPE,
INLINE_NODES,
CHILDREN_IDS,
PARENT_IDS,
OUTPUT_COLUMNS,
IS_INLINE,
}
private int m_id = -1;
protected List<AbstractPlanNode> m_children = new ArrayList<AbstractPlanNode>();
protected List<AbstractPlanNode> m_parents = new ArrayList<AbstractPlanNode>();
protected HashSet<AbstractPlanNode> m_dominators = new HashSet<AbstractPlanNode>();
// PAVLO: We need this figure out how to reconstruct the tree
protected List<Integer> m_childrenIds = new ArrayList<Integer>();
protected List<Integer> m_parentIds = new ArrayList<Integer>();
// TODO: planner accesses this data directly. Should be protected.
protected ArrayList<Integer> m_outputColumns = new ArrayList<Integer>();
protected List<ScalarValueHints> m_outputColumnHints = new ArrayList<ScalarValueHints>();
protected long m_estimatedOutputTupleCount = 0;
/**
* Some PlanNodes can take advantage of inline PlanNodes to perform
* certain additional tasks while performing their main operation, rather than
* having to re-read tuples from intermediate results
*/
protected Map<PlanNodeType, AbstractPlanNode> m_inlineNodes = new HashMap<PlanNodeType, AbstractPlanNode>();
protected boolean m_isInline = false;
protected final PlannerContext m_context;
/**
* Instantiates a new plan node.
*
* @param id the id
*/
protected AbstractPlanNode(PlannerContext context, int id) {
assert(context != null);
assert(id != 0);
m_context = context;
m_id = id;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof AbstractPlanNode) {
AbstractPlanNode other = (AbstractPlanNode)obj;
return (m_id == other.m_id &&
m_isInline == other.m_isInline &&
this.getPlanNodeType() == other.getPlanNodeType() &&
this.getOutputColumnGUIDs().equals(other.getOutputColumnGUIDs()));
}
return (false);
}
protected final int getId() {
return (m_id);
}
@Override
public final Object clone() throws CloneNotSupportedException {
return (this.clone(true, true));
}
public Object clone(boolean clone_children, boolean clone_inline) throws CloneNotSupportedException {
AbstractPlanNode clone = (AbstractPlanNode)super.clone();
clone.overrideId(PlanAssembler.getNextPlanNodeId());
clone.m_children = new ArrayList<AbstractPlanNode>();
clone.m_parents = new ArrayList<AbstractPlanNode>();
clone.m_dominators = new HashSet<AbstractPlanNode>(m_dominators);
clone.m_childrenIds = new ArrayList<Integer>();
clone.m_outputColumns = new ArrayList<Integer>(m_outputColumns);
clone.m_outputColumnHints = new ArrayList<ScalarValueHints>(m_outputColumnHints);
clone.m_inlineNodes = new HashMap<PlanNodeType, AbstractPlanNode>();
// Clone Children
if (clone_children) {
// clone.m_children.clear();
// clone.m_childrenIds.clear();
for (AbstractPlanNode child_node : this.m_children) {
AbstractPlanNode child_clone = (AbstractPlanNode)child_node.clone(clone_inline, clone_children);
child_clone.m_parents.clear();
child_clone.m_parentIds.clear();
child_clone.m_parents.add(clone);
child_clone.m_parentIds.add(clone.m_id);
clone.m_children.add(child_clone);
clone.m_childrenIds.add(child_clone.m_id);
} // FOR
}
// Clone Inlines
if (clone_inline) {
// clone.m_inlineNodes.clear();
for (Entry<PlanNodeType, AbstractPlanNode> e : this.m_inlineNodes.entrySet()) {
AbstractPlanNode inline_clone = (AbstractPlanNode)e.getValue().clone(clone_inline, clone_children);
clone.m_inlineNodes.put(e.getKey(), inline_clone);
} // FOR
}
return (clone);
}
public void overrideId(int newId) {
m_id = newId;
}
/**
* Create a PlanNode that clones the configuration information but
* is not inserted in the plan graph and has a unique plan node id.
*/
protected void produceCopyForTransformation(AbstractPlanNode copy) {
for (Integer colGuid : m_outputColumns) {
copy.m_outputColumns.add(colGuid);
}
copy.m_outputColumnHints.addAll(m_outputColumnHints);
copy.m_estimatedOutputTupleCount = m_estimatedOutputTupleCount;
// clone is not yet implemented for every node.
assert(m_inlineNodes.size() == 0);
assert(m_isInline == false);
// the api requires the copy is not (yet) connected
assert (copy.m_parents.size() == 0);
assert (copy.m_children.size() == 0);
}
public abstract PlanNodeType getPlanNodeType();
public void setOutputColumns(Collection<Integer> col_guids) {
this.m_outputColumns.clear();
this.m_outputColumns.addAll(col_guids);
}
public boolean updateOutputColumns(Database db) {
//System.out.println("updateOutputColumns Node type: " + this.getPlanNodeType() + " # of inline nodes: " + this.getInlinePlanNodes().size());
ArrayList<Integer> childCols = new ArrayList<Integer>();
for (AbstractPlanNode child : m_children) {
boolean result = child.updateOutputColumns(db);
assert(result);
// print child inline columns
// for (Integer out : child.m_outputColumns)
// {
// System.out.println(m_context.get(out).displayName());
// }
childCols.addAll(child.m_outputColumns);
}
ArrayList<Integer> new_output_cols = new ArrayList<Integer>();
new_output_cols = createOutputColumns(db, childCols);
for (AbstractPlanNode child : m_inlineNodes.values()) {
if (child instanceof IndexScanPlanNode)
continue;
new_output_cols = child.createOutputColumns(db, new_output_cols);
}
// Before we wipe out the old column list, free any PlanColumns that
// aren't getting reused
for (Integer col : m_outputColumns)
{
if (!new_output_cols.contains(col))
{
m_context.freeColumn(col);
}
}
m_outputColumns = new_output_cols;
return true;
}
/** By default, a plan node does not alter its input schema */
@SuppressWarnings("unchecked")
protected ArrayList<Integer> createOutputColumns(Database db, ArrayList<Integer> input) {
return (ArrayList<Integer>)input.clone();
}
/**
* Get number of output columns for this node
* @return
*/
public int getOutputColumnGUIDCount() {
return (this.m_outputColumns.size());
}
/**
* Return the PlanColumn GUID at the given offset
* @param idx
* @return
*/
public int getOutputColumnGUID(int idx) {
return (this.m_outputColumns.get(idx));
}
/**
* Get the list of the Output PlanColumn GUIDs
* @return
*/
public List<Integer> getOutputColumnGUIDs() {
return (this.m_outputColumns);
}
public PlanColumn findMatchingOutputColumn(String tableName,
String columnName,
String columnAlias)
{
boolean found = false;
PlanColumn retval = null;
for (Integer colguid : m_outputColumns) {
PlanColumn plancol = m_context.get(colguid);
if ((plancol.originTableName().equals(tableName)) &&
((plancol.originColumnName().equals(columnName)) ||
(plancol.originColumnName().equals(columnAlias))))
{
found = true;
retval = plancol;
break;
}
}
if (!found) {
assert(found) : "Found no candidate output column.";
throw new RuntimeException("Found no candidate output column.");
}
return retval;
}
public void validate() throws Exception {
//
// Make sure our children have us listed as their parents
//
for (AbstractPlanNode child : m_children) {
if (!child.m_parents.contains(this)) {
throw new Exception("ERROR: The child PlanNode '" + child.toString() + "' does not " +
"have its parent PlanNode '" + toString() + "' in its parents list");
}
child.validate();
}
//
// Inline PlanNodes
//
if (!m_inlineNodes.isEmpty()) {
for (AbstractPlanNode node : m_inlineNodes.values()) {
//
// Make sure that we're not attached to some kind of tree somewhere...
//
if (!node.m_children.isEmpty()) {
throw new Exception("ERROR: The inline PlanNode '" + node + "' has children inside of PlanNode '" + this + "'");
} else if (!node.m_parents.isEmpty()) {
throw new Exception("ERROR: The inline PlanNode '" + node + "' has parents inside of PlanNode '" + this + "'");
} else if (!node.isInline()) {
throw new Exception("ERROR: The inline PlanNode '" + node + "' was not marked as inline for PlanNode '" + this + "'");
} else if (!node.getInlinePlanNodes().isEmpty()) {
throw new Exception("ERROR: The inline PlanNode '" + node + "' has its own inline PlanNodes inside of PlanNode '" + this + "'");
}
node.validate();
}
}
}
@Override
public final String toString() {
return String.format("%s[#%02d]", getPlanNodeType().toString(), m_id);
}
public boolean computeEstimatesRecursively(PlanStatistics stats, Cluster cluster, Database db, DatabaseEstimates estimates, ScalarValueHints[] paramHints) {
assert(estimates != null);
m_outputColumnHints.clear();
m_estimatedOutputTupleCount = 0;
// recursively compute and collect stats from children
for (AbstractPlanNode child : m_children) {
boolean result = child.computeEstimatesRecursively(stats, cluster, db, estimates, paramHints);
assert(result);
m_outputColumnHints.addAll(child.m_outputColumnHints);
m_estimatedOutputTupleCount += child.m_estimatedOutputTupleCount;
stats.incrementStatistic(0, StatsField.TUPLES_READ, m_estimatedOutputTupleCount);
}
return true;
}
/**
* Gets the id.
*
* @return the id
*/
public Integer getPlanNodeId() {
return m_id;
}
/**
* Add a plan node as a child of this node and link this node as it's parent.
* @param child The node to add.
*/
public void addAndLinkChild(AbstractPlanNode child) {
m_children.add(child);
child.m_parents.add(this);
}
/** Remove child from this node.
* @param child to remove.
*/
public void unlinkChild(AbstractPlanNode child) {
m_children.remove(child);
child.m_parents.remove(this);
}
/**
* Gets the children.
* @return the children
*/
public int getChildPlanNodeCount() {
return m_children.size();
}
/**
* @param index
* @return The child node of this node at a given index or null if none exists.
*/
public AbstractPlanNode getChild(int index) {
return m_children.get(index);
}
/**
* Gets all of the children of this node
* @return
*/
public List<AbstractPlanNode> getChildren() {
return (Collections.unmodifiableList(m_children));
}
public void clearChildren() {
m_children.clear();
m_childrenIds.clear();
}
public boolean hasChild(AbstractPlanNode receive) {
return m_children.contains(receive);
}
/**
* Gets the number of parents.
* @return the parents
*/
public int getParentPlanNodeCount() {
return m_parents.size();
}
public AbstractPlanNode getParent(int index) {
return m_parents.get(index);
}
/**
* Gets all of the parents of this node
* @return
*/
public List<AbstractPlanNode> getParents() {
return (Collections.unmodifiableList(m_parents));
}
public void clearParents() {
m_parents.clear();
m_parentIds.clear();
}
public void removeFromGraph() {
for (AbstractPlanNode parent : m_parents)
parent.m_children.remove(this);
for (AbstractPlanNode child : m_children)
child.m_parents.remove(this);
m_parents.clear();
m_children.clear();
}
/** Interject the provided node between this node and this node's current children */
public void addIntermediary(AbstractPlanNode node) {
// transfer this node's children to node
Iterator<AbstractPlanNode> it = m_children.iterator();
while (it.hasNext()) {
AbstractPlanNode child = it.next();
it.remove(); // remove this.child from m_children
assert(child.getParentPlanNodeCount() == 1) :
String.format("Expected %s to have only one parent but it has %s", child, child.getParents());
child.clearParents(); // and reset child's parents list
node.addAndLinkChild(child); // set node.child and child.parent
}
// and add node to this node's children
assert(m_children.size() == 0);
addAndLinkChild(node);
}
/**
* @return The map of inlined nodes.
*/
public Map<PlanNodeType, AbstractPlanNode> getInlinePlanNodes() {
return m_inlineNodes;
}
public int getInlinePlanNodeCount() {
return (m_inlineNodes.size());
}
/**
* @param node
*/
public void addInlinePlanNode(AbstractPlanNode node) {
node.m_isInline = true;
m_inlineNodes.put(node.getPlanNodeType(), node);
node.m_children.clear();
node.m_parents.clear();
}
/**
*
* @param type
*/
public void removeInlinePlanNode(PlanNodeType type) {
if (m_inlineNodes.containsKey(type)) {
m_inlineNodes.remove(type);
}
}
/**
*
* @param type
* @return An inlined node of the given type or null if none.
*/
@SuppressWarnings("unchecked")
public <T extends AbstractPlanNode> T getInlinePlanNode(PlanNodeType type) {
return (T)m_inlineNodes.get(type);
}
/**
* Return all of the inline AbstractPlanNodes with the same class
* @param clazz
* @return
*/
@SuppressWarnings("unchecked")
public <T extends AbstractPlanNode> Collection<T> getInlinePlanNodes(Class<T> clazz) {
Set<T> ret = new HashSet<T>();
for (AbstractPlanNode inline : this.m_inlineNodes.values()) {
if (ClassUtil.getSuperClasses(inline.getClass()).contains(clazz)) {
ret.add((T)inline);
}
} // FOR
return (ret);
}
/**
*
* @return Is this node inlined in another node.
*/
public Boolean isInline() {
return m_isInline;
}
/**
* @return the dominator list for a node
*/
public HashSet<AbstractPlanNode> getDominators() {
return m_dominators;
}
/**
* Initialize a hashset for each node containing that node's dominators
* (the set of predecessors that *always* precede this node in a traversal
* of the plan-graph in reverse-execution order (from root to leaves)).
*/
public void calculateDominators() {
HashSet<AbstractPlanNode> visited = new HashSet<AbstractPlanNode>();
calculateDominators_recurse(visited);
}
private void calculateDominators_recurse(HashSet<AbstractPlanNode> visited) {
if (visited.contains(this)) {
assert(false): "do not expect loops in plangraph.";
return;
}
visited.add(this);
m_dominators.clear();
m_dominators.add(this);
// find nodes that are in every parent's dominator set.
HashMap<AbstractPlanNode, Integer> union = new HashMap<AbstractPlanNode, Integer>();
for (AbstractPlanNode n : m_parents) {
for (AbstractPlanNode d : n.getDominators()) {
if (union.containsKey(d))
union.put(d, union.get(d) + 1);
else
union.put(d, 1);
}
}
for (AbstractPlanNode pd : union.keySet() ) {
if (union.get(pd) == m_parents.size())
m_dominators.add(pd);
}
for (AbstractPlanNode n : m_children)
n.calculateDominators_recurse(visited);
}
/**
* @param type plan node type to search for
* @return a list of nodes that are eventual successors of this node of the desired type
*/
public List<AbstractPlanNode> findAllNodesOfType(PlanNodeType type) {
HashSet<AbstractPlanNode> visited = new HashSet<AbstractPlanNode>();
ArrayList<AbstractPlanNode> collected = new ArrayList<AbstractPlanNode>();
findAllNodesOfType_recurse(type, collected, visited);
return collected;
}
public void findAllNodesOfType_recurse(PlanNodeType type,ArrayList<AbstractPlanNode> collected,
HashSet<AbstractPlanNode> visited)
{
if (visited.contains(this)) {
assert(false): "do not expect loops in plangraph.";
return;
}
visited.add(this);
if (getPlanNodeType() == type)
collected.add(this);
for (AbstractPlanNode n : m_children)
n.findAllNodesOfType_recurse(type, collected, visited);
}
public void freeColumns(Set<Integer> skip) {
Collection<Integer> guids = PlanNodeUtil.getAllPlanColumnGuids(this);
guids.removeAll(skip);
for (Integer guid : guids) {
m_context.freeColumn(guid);
} // FOR
}
@Override
public int compareTo(AbstractPlanNode other) {
int diff = 0;
// compare child nodes
HashMap<Integer, AbstractPlanNode> nodesById = new HashMap<Integer, AbstractPlanNode>();
for (AbstractPlanNode node : m_children)
nodesById.put(node.getPlanNodeId(), node);
for (AbstractPlanNode node : other.m_children) {
AbstractPlanNode myNode = nodesById.get(node.getPlanNodeId());
diff = myNode.compareTo(node);
if (diff != 0) return diff;
}
// compare inline nodes
HashMap<Integer, Entry<PlanNodeType, AbstractPlanNode>> inlineNodesById =
new HashMap<Integer, Entry<PlanNodeType, AbstractPlanNode>>();
for (Entry<PlanNodeType, AbstractPlanNode> e : m_inlineNodes.entrySet())
inlineNodesById.put(e.getValue().getPlanNodeId(), e);
for (Entry<PlanNodeType, AbstractPlanNode> e : other.m_inlineNodes.entrySet()) {
Entry<PlanNodeType, AbstractPlanNode> myE = inlineNodesById.get(e.getValue().getPlanNodeId());
if (myE.getKey() != e.getKey()) return -1;
diff = myE.getValue().compareTo(e.getValue());
if (diff != 0) return diff;
}
diff = m_id - other.m_id;
return diff;
}
// produce a file that can imported into graphviz for easier visualization
public String toDOTString() {
StringBuilder sb = new StringBuilder();
// id [label=id: value-type <value-type-attributes>];
// id -> child_id;
// id -> child_id;
sb.append(m_id).append(" [label=\"").append(m_id).append(": ").append(getPlanNodeType()).append("\" ");
sb.append(getValueTypeDotString(this));
sb.append("];\n");
for (AbstractPlanNode node : m_inlineNodes.values()) {
sb.append(m_id).append(" -> ").append(node.getPlanNodeId().intValue()).append(";\n");
sb.append(node.toDOTString());
}
for (AbstractPlanNode node : m_children) {
sb.append(m_id).append(" -> ").append(node.getPlanNodeId().intValue()).append(";\n");
}
return sb.toString();
}
// maybe not worth polluting
private String getValueTypeDotString(AbstractPlanNode pn) {
PlanNodeType pnt = pn.getPlanNodeType();
if (pn.isInline()) {
return "fontcolor=\"white\" style=\"filled\" fillcolor=\"red\"";
}
if (pnt == PlanNodeType.SEND || pnt == PlanNodeType.RECEIVE) {
return "fontcolor=\"white\" style=\"filled\" fillcolor=\"black\"";
}
return "";
}
@Override
public String toJSONString() {
JSONStringer stringer = new JSONStringer();
try
{
stringer.object();
toJSONString(stringer);
stringer.endObject();
}
catch (JSONException e)
{
throw new RuntimeException("Failed to serialize " + this, e);
// System.exit(-1);
}
return stringer.toString();
}
public void toJSONString(JSONStringer stringer) throws JSONException {
stringer.key(Members.ID.name()).value(m_id);
stringer.key(Members.PLAN_NODE_TYPE.name()).value(getPlanNodeType().toString());
stringer.key(Members.IS_INLINE.name()).value(m_isInline);
stringer.key(Members.INLINE_NODES.name()).array();
PlanNodeType types[] = new PlanNodeType[m_inlineNodes.size()];
int i = 0;
for (PlanNodeType type : m_inlineNodes.keySet()) {
types[i++] = type;
}
Arrays.sort(types);
for (PlanNodeType type : types) {
AbstractPlanNode node = m_inlineNodes.get(type);
assert(node != null);
assert(node instanceof JSONString);
stringer.value(node);
}
/*for (Map.Entry<PlanNodeType, AbstractPlanNode> entry : m_inlineNodes.entrySet()) {
assert (entry.getValue() instanceof JSONString);
stringer.value(entry.getValue());
}*/
stringer.endArray();
stringer.key(Members.CHILDREN_IDS.name()).array();
for (AbstractPlanNode node : m_children) {
stringer.value(node.getPlanNodeId().intValue());
}
stringer.endArray().key(Members.PARENT_IDS.name()).array();
for (AbstractPlanNode node : m_parents) {
stringer.value(node.getPlanNodeId().intValue());
}
stringer.endArray(); //end inlineNodes
stringer.key(Members.OUTPUT_COLUMNS.name());
stringer.array();
for (int col = 0; col < m_outputColumns.size(); col++) {
PlanColumn column = m_context.get(m_outputColumns.get(col));
column.toJSONString(stringer);
}
stringer.endArray();
}
abstract protected void loadFromJSONObject(JSONObject obj, Database db) throws JSONException;
public static AbstractPlanNode fromJSONObject(JSONObject obj, Database db) throws JSONException {
PlanNodeType pnt = PlanNodeType.valueOf(obj.getString(Members.PLAN_NODE_TYPE.name()));
AbstractPlanNode node = null;
try {
node = (AbstractPlanNode)ClassUtil.newInstance(pnt.getPlanNodeClass(),
new Object[]{ PlannerContext.singleton(), 1 },
new Class[]{ PlannerContext.class, Integer.class });
} catch (Exception e) {
e.printStackTrace();
return null;
}
node.m_id = obj.getInt(Members.ID.name());
node.m_isInline = obj.getBoolean(Members.IS_INLINE.name());
JSONArray inlineNodes = obj.getJSONArray(Members.INLINE_NODES.name());
for (int ii = 0; ii < inlineNodes.length(); ii++) {
JSONObject inobj = inlineNodes.getJSONObject(ii);
AbstractPlanNode inlineNode = AbstractPlanNode.fromJSONObject(inobj, db);
node.m_inlineNodes.put(inlineNode.getPlanNodeType(), inlineNode);
}
JSONArray childrenIds = obj.getJSONArray(Members.CHILDREN_IDS.name());
for (int ii = 0; ii < childrenIds.length(); ii++) {
node.m_childrenIds.add(childrenIds.getInt(ii));
}
JSONArray parentIds = obj.getJSONArray(Members.PARENT_IDS.name());
for (int ii = 0; ii < parentIds.length(); ii++) {
node.m_parentIds.add(parentIds.getInt(ii));
}
JSONArray outputColumns = obj.getJSONArray(Members.OUTPUT_COLUMNS.name());
for (int ii = 0; ii < outputColumns.length(); ii++) {
JSONObject jsonObject = outputColumns.getJSONObject(ii);
PlanColumn column = PlanColumn.fromJSONObject(jsonObject, db);
assert(column != null);
node.m_outputColumns.add(column.guid());
// System.err.println(String.format("[%02d] %s => %s", ii, node, column));
}
node.loadFromJSONObject(obj, db);
return node;
}
}
| gpl-3.0 |
sanyaade-g2g-repos/droidar | droidar/src/gui/RadarView.java | 9228 | package gui;
import gl.GLCamera;
import gl.HasColor;
import gl.HasPosition;
import gl.scenegraph.Shape;
import util.EfficientList;
import util.Vec;
import worldData.Obj;
import worldData.RenderableEntity;
import worldData.UpdateTimer;
import worldData.Updateable;
import worldData.World;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.MotionEvent;
public class RadarView extends SimpleCustomView implements Updateable {
private static final int DEFAULT_VIEW_SIZE = 100;
private static final int MARGIN = 4;
private static final float DEFAULT_UPDATE_SPEED = 0.1f;
private static final int DEFAULT_RADAR_MAX_DISTANCE = 200;
private static final int MIN_DISP_RADIUS = 20;
private Paint paint;
private Paint linePaint;
private int minimumSize = DEFAULT_VIEW_SIZE;
private int mySize;
private int myHalfSize;
private Vec myRotVec;
private int myDisplRadius = DEFAULT_RADAR_MAX_DISTANCE;
private boolean displayOutOfRadarArea = true;
private boolean rotateNeedle = false;
private EfficientList<RenderableEntity> items;
private GLCamera myCamera;
private Bitmap background;
private double myRotation;
private UpdateTimer myTimer;
private float myUpdateSpeed = DEFAULT_UPDATE_SPEED;
private double myTouchScaleFactor = 5;
private String debug;
public RadarView(Context context, GLCamera camera,
int minimumRadarViewSize, int displRadiusInMeters,
float updateSpeed, boolean rotateNeedle,
boolean displayOutOfRadarArea, EfficientList<RenderableEntity> items) {
super(context);
init(minimumRadarViewSize);
myCamera = camera;
setRotateNeedle(rotateNeedle);
setRadarDisplRadius(displRadiusInMeters);
setDisplayOutOfRadarArea(displayOutOfRadarArea);
setItems(items);
setUpdateSpeed(updateSpeed);
}
public void setUpdateSpeed(float myUpdateSpeed) {
this.myUpdateSpeed = myUpdateSpeed;
}
public void setItems(EfficientList<RenderableEntity> items) {
this.items = items;
}
public void setRotateNeedle(boolean rotateNeedle) {
this.rotateNeedle = rotateNeedle;
setRotation(myRotation);
}
public void setDisplayOutOfRadarArea(boolean displayOutOfRadarArea) {
this.displayOutOfRadarArea = displayOutOfRadarArea;
}
public void setRadarDisplRadius(int displRadiusInMeters) {
this.myDisplRadius = displRadiusInMeters;
}
@Deprecated
public RadarView(Context context, AttributeSet attrs) {
super(context, attrs);
init(DEFAULT_VIEW_SIZE);
}
/**
* @param myTargetActivity
* @param radarViewSize
* size of the radar view in pixels (e.g. pass
* {@link Setup#getScreenWidth()/3}
* @param camera
* @param items
* e.g. the complete virtual {@link World} (then use
* {@link World#getAllItems()})
*/
public RadarView(Activity myTargetActivity, int radarViewSize,
GLCamera camera, EfficientList<RenderableEntity> items) {
this(myTargetActivity, camera, radarViewSize,
DEFAULT_RADAR_MAX_DISTANCE, DEFAULT_UPDATE_SPEED, false, true,
items);
}
private void init(int minimumViewSize) {
myTimer = new UpdateTimer(myUpdateSpeed, null);
paint = new Paint();
paint.setAntiAlias(true);
paint.setColor(Color.WHITE);
linePaint = new Paint();
linePaint.setStyle(Paint.Style.STROKE);
linePaint.setStrokeWidth(2);
this.minimumSize = minimumViewSize;
setSize(minimumViewSize);
if (isInEditMode())
loadDemoValues();
}
public void setSize(int viewSize) {
if (viewSize < minimumSize)
viewSize = minimumSize;
mySize = viewSize;
myHalfSize = viewSize / 2;
setRotation(myRotation);
background = null;
getBackGround();
}
/**
* This method will only be called when the view is displayed in the eclipse
* xml layout editor
*/
private void loadDemoValues() {
setRotateNeedle(true);
setRotation(45);
setDisplayedAreaSize(200);
setElementsOutOfRadarAreaVisible(true);
myCamera = new GLCamera();
myCamera.setPosition(new Vec(40, 40, 0));
items = new EfficientList<RenderableEntity>();
items.add(newObj(40, 500));
items.add(newObj(10, 10));
items.add(newObj(200, 200));
items.add(newObj(200, -200));
}
private RenderableEntity newObj(int x, int y) {
Obj o = new Obj();
Shape s = new Shape(gl.Color.getRandomRGBColor());
s.setPosition(new Vec(x, y, 0));
o.setComp(s);
return o;
}
public void setElementsOutOfRadarAreaVisible(boolean b) {
displayOutOfRadarArea = b;
}
public void setDisplayedAreaSize(int areaRadiusInMeters) {
myDisplRadius = areaRadiusInMeters;
}
public void setRotation(double rotation) {
myRotation = rotation;
myRotVec = new Vec(myHalfSize / 2.5f, 0, 0);
myRotVec.rotateAroundZAxis(rotation - 90);
this.postInvalidate();
}
@Override
protected void onDraw(Canvas canvas) {
/*
* TODO store in bitmap object and only redraw if something changes to
* increase performance!
*/
drawBackGround(canvas);
if (items != null)
drawItems(canvas);
paint.setColor(Color.BLACK);
drawCircle(canvas, myHalfSize, myHalfSize, myHalfSize / 30, paint);
linePaint.setColor(Color.BLACK);
drawCircle(canvas, myHalfSize, myHalfSize, myHalfSize - MARGIN,
linePaint);
drawCompassNeedle(canvas);
if (debug != null) {
paint.setColor(Color.RED);
canvas.drawText(debug, 0, myHalfSize, paint);
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
return onTouch(event.getX() - myHalfSize, event.getY() - myHalfSize);
}
private boolean onTouch(float x, float y) {
double distFromCenter = Math.sqrt(x * x + y * y);
// TODO use the myHalfSize to calculate percent value. important to stay
// size independent!
distFromCenter *= myTouchScaleFactor;
myDisplRadius = (int) distFromCenter;
if (myDisplRadius < MIN_DISP_RADIUS)
myDisplRadius = MIN_DISP_RADIUS;
return true;
}
private void drawCompassNeedle(Canvas canvas) {
linePaint.setColor(Color.RED);
if (rotateNeedle) {
canvas.drawLine(myHalfSize, myHalfSize, myHalfSize + myRotVec.x,
myHalfSize + myRotVec.y, linePaint);
} else {
canvas.drawLine(myHalfSize, myHalfSize, myHalfSize, myHalfSize
- myHalfSize / 2.5f, linePaint);
}
}
private void drawItems(Canvas canvas) {
for (int i = 0; i < items.myLength; i++) {
if (items.get(i) instanceof HasPosition) {
RenderableEntity element = items.get(i);
Vec pos = ((HasPosition) element).getPosition().copy()
.sub(myCamera.getPosition());
float length = pos.getLength();
if (length > myDisplRadius) {
if (displayOutOfRadarArea) {
pos.setLength(myDisplRadius);
length = myDisplRadius;
} else
continue;
}
if (!rotateNeedle) {
pos.rotateAroundZAxis(myRotation);
}
/*
* now convert the distance in meters into a distance in pixels:
*/
pos.setLength(length / myDisplRadius * (myHalfSize - MARGIN));
/*
* the canvas coords are not like the opengl coords! 10,10 means
* down on the screen
*/
// debug = "" + pos;
float northPos = myHalfSize - pos.y;
float eastPos = myHalfSize + pos.x;
// debug="n="+northPos+", e="+eastPos;
drawElement(element, canvas, northPos, eastPos);
}
}
}
private void drawElement(RenderableEntity element, Canvas canvas,
float northPos, float eastPos) {
paint.setColor(Color.WHITE);
if (element instanceof HasColor) {
gl.Color c = ((HasColor) element).getColor();
if (c != null)
paint.setColor(c.toIntARGB());
}
drawCircle(canvas, eastPos, northPos, 6, paint);
}
private void drawBackGround(Canvas canvas) {
canvas.drawBitmap(getBackGround(), 0, 0, paint);
}
private Bitmap getBackGround() {
if (background == null)
background = createBackground(mySize, myHalfSize);
return background;
}
/**
* This method is used to create a static background bitmap for better
* performance when drawing the radar
*
* @param size
* @param halfSize
* @return
*/
private Bitmap createBackground(int size, int halfSize) {
Bitmap b = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
Paint p = new Paint();
p.setAntiAlias(true);
p.setColor(Color.WHITE);
p.setAlpha(150);
drawCircle(c, halfSize, halfSize, halfSize - MARGIN, p);
// shadow
p.setColor(Color.BLACK);
p.setAlpha(100);
p.setStyle(Paint.Style.STROKE);
p.setStrokeWidth(4);
int shadowOffset = 2;
drawCircle(c, halfSize + shadowOffset, halfSize + shadowOffset,
halfSize - MARGIN, p);
p.setColor(Color.BLACK);
p.setStrokeWidth(2);
drawCircle(c, halfSize, halfSize, halfSize - MARGIN, p);
return b;
}
@Override
public boolean update(float timeDelta, Updateable parent) {
if (myTimer.update(timeDelta, parent)) {
setRotation(myCamera.getCameraAnglesInDegree()[0]);
}
/*
* TODO if view was removed from parent it can return false here!
*/
return true;
}
@Override
public void onResizeEvent(int recommendedHeight, int recommendedWidth) {
int min = Math.min(recommendedHeight, recommendedWidth);
setSize(min);
this.setMeasuredDimension(mySize, mySize);
}
}
| gpl-3.0 |
UIKit0/TarsosDSP | src/core/be/tarsos/dsp/wavelet/lift/package-info.java | 994 | /*
* _______ _____ _____ _____
* |__ __| | __ \ / ____| __ \
* | | __ _ _ __ ___ ___ ___| | | | (___ | |__) |
* | |/ _` | '__/ __|/ _ \/ __| | | |\___ \| ___/
* | | (_| | | \__ \ (_) \__ \ |__| |____) | |
* |_|\__,_|_| |___/\___/|___/_____/|_____/|_|
*
* -------------------------------------------------------------
*
* TarsosDSP is developed by Joren Six at IPEM, University Ghent
*
* -------------------------------------------------------------
*
* Info: http://0110.be/tag/TarsosDSP
* Github: https://github.com/JorenSix/TarsosDSP
* Releases: http://0110.be/releases/TarsosDSP/
*
* TarsosDSP includes modified source code by various authors,
* for credits and info, see README.
*
*/
/**
* Wavelet transforms using the lifting scheme algorithm. Implementation by Ian Kaplan
*/
package be.tarsos.dsp.wavelet.lift;
| gpl-3.0 |
ua-eas/ua-kfs-5.3 | test/unit/src/org/kuali/kfs/module/cam/document/dataaccess/impl/MockDepreciationBatchDao.java | 5782 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.cam.document.dataaccess.impl;
import java.sql.Date;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.kuali.kfs.module.cam.batch.AssetPaymentInfo;
import org.kuali.kfs.module.cam.document.dataaccess.DepreciationBatchDao;
import org.kuali.kfs.sys.businessobject.GeneralLedgerPendingEntry;
import org.kuali.rice.core.api.util.type.KualiDecimal;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class MockDepreciationBatchDao implements DepreciationBatchDao {
private DepreciationBatchDao impl;
private List<String> assetPaymentsStr = new ArrayList<String>();
@Override
public void updateAssetPayments(List<AssetPaymentInfo> assetPayments, Integer fiscalMonth) {
impl.updateAssetPayments(assetPayments, fiscalMonth);
for (AssetPaymentInfo assetPaymentInfo : assetPayments) {
String t = fiscalMonth + "-" + assetPaymentInfo.getCapitalAssetNumber() + "-" + assetPaymentInfo.getPaymentSequenceNumber() + "-" + assetPaymentInfo.getTransactionAmount().bigDecimalValue();
this.assetPaymentsStr.add(t);
}
}
@Override
public Integer getFullyDepreciatedAssetCount() {
return impl.getFullyDepreciatedAssetCount();
}
@Override
public Collection<AssetPaymentInfo> getListOfDepreciableAssetPaymentInfo(Integer fiscalYear, Integer fiscalMonth, Calendar depreciationDate) {
return impl.getListOfDepreciableAssetPaymentInfo(fiscalYear, fiscalMonth, depreciationDate);
}
@Override
public void resetPeriodValuesWhenFirstFiscalPeriod(Integer fiscalPeriod) throws Exception {
impl.resetPeriodValuesWhenFirstFiscalPeriod(fiscalPeriod);
}
@Override
public void savePendingGLEntries(List<GeneralLedgerPendingEntry> glPendingEntries) {
impl.savePendingGLEntries(glPendingEntries);
}
@Override
public void updateAssetsCreatedInLastFiscalPeriod(Integer fiscalMonth, Integer fiscalYear) {
impl.updateAssetsCreatedInLastFiscalPeriod(fiscalMonth, fiscalYear);
}
@Override
public Map<Long, KualiDecimal> getPrimaryDepreciationBaseAmountForSV() {
return impl.getPrimaryDepreciationBaseAmountForSV();
}
/**
* Gets the assetPaymentsStr attribute.
*
* @return Returns the assetPaymentsStr.
*/
public List<String> getAssetPaymentsStr() {
return assetPaymentsStr;
}
/**
* Sets the assetPaymentsStr attribute value.
*
* @param assetPaymentsStr The assetPaymentsStr to set.
*/
public void setAssetPaymentsStr(List<String> assetPaymentsStr) {
this.assetPaymentsStr = assetPaymentsStr;
}
/**
* Gets the impl attribute.
*
* @return Returns the impl.
*/
public DepreciationBatchDao getImpl() {
return impl;
}
/**
* Sets the impl attribute value.
*
* @param impl The impl to set.
*/
public void setImpl(DepreciationBatchDao impl) {
this.impl = impl;
}
@Override
public Object[] getAssetAndPaymentCount(Integer fiscalYear, Integer fiscalMonth, Calendar depreciationDate, boolean includePending) {
return impl.getAssetAndPaymentCount(fiscalYear, fiscalMonth, depreciationDate, includePending);
}
@Override
public Object[] getFederallyOwnedAssetAndPaymentCount(Integer fiscalYear, Integer fiscalMonth, Calendar depreciationDate) {
return impl.getFederallyOwnedAssetAndPaymentCount(fiscalYear, fiscalMonth, depreciationDate);
}
@Override
public Integer getRetireDocLockedAssetCount() {
return impl.getRetireDocLockedAssetCount();
}
@Override
public Integer getTransferDocLockedAssetCount() {
return impl.getTransferDocLockedAssetCount();
}
@Override
public Set<Long> getLockedAssets() {
return impl.getLockedAssets();
}
@Override
public Collection<AssetPaymentInfo> getListOfDepreciableAssetPaymentInfoYearEnd(Integer fiscalYear, Integer fiscalMonth, Calendar depreciationDate, boolean includeRetired) {
return impl.getListOfDepreciableAssetPaymentInfoYearEnd(fiscalYear, fiscalMonth, depreciationDate, includeRetired);
}
@Override
public List<Map<String, Object>> getAssetsByDepreciationConvention(Date lastFiscalYearDate, List<String> movableEquipmentObjectSubTypes, String depreciationConventionCd) {
// TODO Auto-generated method stub
return null;
}
@Override
public void updateAssetInServiceAndDepreciationDate(List<String> selectedAssets, Date inServiceDate, Date depreciationDate) {
// TODO Auto-generated method stub
}
}
| agpl-3.0 |
vineetgarg02/hive | itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribNegativeCliDriver.java | 1934 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.cli;
import java.io.File;
import java.util.List;
import org.apache.hadoop.hive.cli.control.CliAdapter;
import org.apache.hadoop.hive.cli.control.CliConfigs;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestContribNegativeCliDriver {
static CliAdapter adapter = new CliConfigs.ContribNegativeCliConfig().getCliAdapter();
@Parameters(name = "{0}")
public static List<Object[]> getParameters() throws Exception {
return adapter.getParameters();
}
@ClassRule
public static TestRule cliClassRule = adapter.buildClassRule();
@Rule
public TestRule cliTestRule = adapter.buildTestRule();
private String name;
private File qfile;
public TestContribNegativeCliDriver(String name, File qfile) {
this.name = name;
this.qfile = qfile;
}
@Test
public void testCliDriver() throws Exception {
adapter.runTest(name, qfile);
}
}
| apache-2.0 |
asedunov/intellij-community | platform/projectModel-impl/src/com/intellij/openapi/components/ComponentSerializationUtil.java | 1889 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.components;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.xmlb.XmlSerializer;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
/**
* @author nik
*/
public class ComponentSerializationUtil {
@NotNull
public static <S> Class<S> getStateClass(@NotNull Class<? extends PersistentStateComponent> aClass) {
TypeVariable<Class<PersistentStateComponent>> variable = PersistentStateComponent.class.getTypeParameters()[0];
Type type = ReflectionUtil.resolveVariableInHierarchy(variable, aClass);
assert type != null : aClass;
@SuppressWarnings("unchecked") Class<S> result = (Class<S>)ReflectionUtil.getRawType(type);
return result;
}
public static <S> void loadComponentState(@NotNull PersistentStateComponent<S> configuration, @Nullable Element element) {
if (element != null) {
Class<S> stateClass = getStateClass(configuration.getClass());
@SuppressWarnings("unchecked") S state = stateClass.equals(Element.class) ? (S)element : XmlSerializer.deserialize(element, stateClass);
if (state != null) {
configuration.loadState(state);
}
}
}
}
| apache-2.0 |
mztaylor/rice-git | rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/util/RouteToCompletionUtil.java | 2620 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.util;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.kew.api.KewApiConstants;
import org.kuali.rice.krad.bo.AdHocRouteRecipient;
import org.kuali.rice.krad.document.Document;
import java.util.List;
import java.util.ListIterator;
public class RouteToCompletionUtil {
/**
* Checks if there is atleast one Ad-Hoc Completion request for the document and based on that returns a boolean
* value.
*/
public static boolean checkIfAtleastOneAdHocCompleteRequestExist(Document document) {
boolean foundAtleastOneCompleteReq = false;
// iterating the adhoc recpients list to check if there is atleast on complete request for the document.
foundAtleastOneCompleteReq = loopAndCheckValue(document.getAdHocRouteWorkgroups()) || loopAndCheckValue(
document.getAdHocRoutePersons());
return foundAtleastOneCompleteReq;
}
/**
* Loops and checks if the required value is present in the loop used for checking if there is atleast one adhoc
* completion
* request present for a person or work group
*/
private static boolean loopAndCheckValue(List adhoc) {
if (adhoc == null) {
return false;
}
ListIterator<AdHocRouteRecipient> groupIter = adhoc.listIterator();
String valueToCheck = null;
AdHocRouteRecipient recipient = null;
boolean foundAtleastOneCompleteReq = false;
while (groupIter.hasNext()) {
recipient = groupIter.next();
valueToCheck = recipient.getActionRequested();
if (StringUtils.isNotEmpty(valueToCheck)) {
if (KewApiConstants.ACTION_REQUEST_COMPLETE_REQ.equals(valueToCheck)) {
foundAtleastOneCompleteReq = true;
break;
}
}
}
return foundAtleastOneCompleteReq;
}
}
| apache-2.0 |
tuoshao/floodlight-ratelimiter | src/test/java/net/floodlightcontroller/core/util/SingletonTaskTest.java | 11038 | /**
* Copyright 2011, Big Switch Networks, Inc.
* Originally created by David Erickson, Stanford University
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
package net.floodlightcontroller.core.util;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import net.floodlightcontroller.test.FloodlightTestCase;
public class SingletonTaskTest extends FloodlightTestCase {
public int ran = 0;
public int finished = 0;
public long time = 0;
@Before
public void setUp() throws Exception {
super.setUp();
ran = 0;
finished = 0;
time = 0;
}
@Test
public void testBasic() throws InterruptedException {
ScheduledExecutorService ses =
Executors.newSingleThreadScheduledExecutor();
SingletonTask st1 = new SingletonTask(ses, new Runnable() {
@Override
public void run() {
ran += 1;
}
});
st1.reschedule(0, null);
ses.shutdown();
ses.awaitTermination(5, TimeUnit.SECONDS);
assertEquals("Check that task ran", 1, ran);
}
@Test
public void testDelay() throws InterruptedException {
ScheduledExecutorService ses =
Executors.newSingleThreadScheduledExecutor();
SingletonTask st1 = new SingletonTask(ses, new Runnable() {
@Override
public void run() {
ran += 1;
time = System.nanoTime();
}
});
long start = System.nanoTime();
st1.reschedule(10, TimeUnit.MILLISECONDS);
assertFalse("Check that task hasn't run yet", ran > 0);
ses.shutdown();
ses.awaitTermination(5, TimeUnit.SECONDS);
assertEquals("Check that task ran", 1, ran);
assertTrue("Check that time passed appropriately",
(time - start) >= TimeUnit.NANOSECONDS.convert(10, TimeUnit.MILLISECONDS));
}
@Test
public void testReschedule() throws InterruptedException {
ScheduledExecutorService ses =
Executors.newSingleThreadScheduledExecutor();
final Object tc = this;
SingletonTask st1 = new SingletonTask(ses, new Runnable() {
@Override
public void run() {
synchronized (tc) {
ran += 1;
}
time = System.nanoTime();
}
});
long start = System.nanoTime();
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
st1.reschedule(20, TimeUnit.MILLISECONDS);
Thread.sleep(5);
assertFalse("Check that task hasn't run yet", ran > 0);
ses.shutdown();
ses.awaitTermination(5, TimeUnit.SECONDS);
assertEquals("Check that task ran only once", 1, ran);
assertTrue("Check that time passed appropriately: " + (time - start),
(time - start) >= TimeUnit.NANOSECONDS.convert(55, TimeUnit.MILLISECONDS));
}
@Test
public void testConcurrentAddDelay() throws InterruptedException {
ScheduledExecutorService ses =
Executors.newSingleThreadScheduledExecutor();
final Object tc = this;
SingletonTask st1 = new SingletonTask(ses, new Runnable() {
@Override
public void run() {
synchronized (tc) {
ran += 1;
}
try {
Thread.sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
synchronized (tc) {
finished += 1;
time = System.nanoTime();
}
}
});
long start = System.nanoTime();
st1.reschedule(5, TimeUnit.MILLISECONDS);
Thread.sleep(20);
assertEquals("Check that task started", 1, ran);
assertEquals("Check that task not finished", 0, finished);
st1.reschedule(75, TimeUnit.MILLISECONDS);
assertTrue("Check task running state true", st1.context.taskRunning);
assertTrue("Check task should run state true", st1.context.taskShouldRun);
assertEquals("Check that task started", 1, ran);
assertEquals("Check that task not finished", 0, finished);
Thread.sleep(150);
assertTrue("Check task running state false", !st1.context.taskRunning);
assertTrue("Check task should run state false", !st1.context.taskShouldRun);
assertEquals("Check that task ran exactly twice", 2, ran);
assertEquals("Check that task finished exactly twice", 2, finished);
assertTrue("Check that time passed appropriately: " + (time - start),
(time - start) >= TimeUnit.NANOSECONDS.convert(130, TimeUnit.MILLISECONDS));
assertTrue("Check that time passed appropriately: " + (time - start),
(time - start) <= TimeUnit.NANOSECONDS.convert(160, TimeUnit.MILLISECONDS));
ses.shutdown();
ses.awaitTermination(5, TimeUnit.SECONDS);
}
@Test
public void testConcurrentAddDelay2() throws InterruptedException {
ScheduledExecutorService ses =
Executors.newSingleThreadScheduledExecutor();
final Object tc = this;
SingletonTask st1 = new SingletonTask(ses, new Runnable() {
@Override
public void run() {
synchronized (tc) {
ran += 1;
}
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
synchronized (tc) {
finished += 1;
time = System.nanoTime();
}
}
});
long start = System.nanoTime();
st1.reschedule(5, TimeUnit.MILLISECONDS);
Thread.sleep(20);
assertEquals("Check that task started", 1, ran);
assertEquals("Check that task not finished", 0, finished);
st1.reschedule(25, TimeUnit.MILLISECONDS);
assertTrue("Check task running state true", st1.context.taskRunning);
assertTrue("Check task should run state true", st1.context.taskShouldRun);
assertEquals("Check that task started", 1, ran);
assertEquals("Check that task not finished", 0, finished);
Thread.sleep(150);
assertTrue("Check task running state false", !st1.context.taskRunning);
assertTrue("Check task should run state false", !st1.context.taskShouldRun);
assertEquals("Check that task ran exactly twice", 2, ran);
assertEquals("Check that task finished exactly twice", 2, finished);
assertTrue("Check that time passed appropriately: " + (time - start),
(time - start) >= TimeUnit.NANOSECONDS.convert(100, TimeUnit.MILLISECONDS));
assertTrue("Check that time passed appropriately: " + (time - start),
(time - start) <= TimeUnit.NANOSECONDS.convert(125, TimeUnit.MILLISECONDS));
ses.shutdown();
ses.awaitTermination(5, TimeUnit.SECONDS);
}
@Test
public void testConcurrentAddNoDelay() throws InterruptedException {
ScheduledExecutorService ses =
Executors.newSingleThreadScheduledExecutor();
final Object tc = this;
SingletonTask st1 = new SingletonTask(ses, new Runnable() {
@Override
public void run() {
synchronized (tc) {
ran += 1;
}
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
synchronized (tc) {
finished += 1;
time = System.nanoTime();
}
}
});
long start = System.nanoTime();
st1.reschedule(0, null);
Thread.sleep(20);
assertEquals("Check that task started", 1, ran);
assertEquals("Check that task not finished", 0, finished);
st1.reschedule(0, null);
assertTrue("Check task running state true", st1.context.taskRunning);
assertTrue("Check task should run state true", st1.context.taskShouldRun);
assertEquals("Check that task started", 1, ran);
assertEquals("Check that task not finished", 0, finished);
Thread.sleep(150);
assertTrue("Check task running state false", !st1.context.taskRunning);
assertTrue("Check task should run state false", !st1.context.taskShouldRun);
assertEquals("Check that task ran exactly twice", 2, ran);
assertEquals("Check that task finished exactly twice", 2, finished);
assertTrue("Check that time passed appropriately: " + (time - start),
(time - start) >= TimeUnit.NANOSECONDS.convert(90, TimeUnit.MILLISECONDS));
assertTrue("Check that time passed appropriately: " + (time - start),
(time - start) <= TimeUnit.NANOSECONDS.convert(130, TimeUnit.MILLISECONDS));
ses.shutdown();
ses.awaitTermination(5, TimeUnit.SECONDS);
}
}
| apache-2.0 |
miniway/presto | presto-main/src/test/java/io/prestosql/cost/EstimateAssertion.java | 1265 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.cost;
import io.prestosql.util.MoreMath;
import static java.lang.Double.isNaN;
import static java.lang.String.format;
public final class EstimateAssertion
{
private EstimateAssertion() {}
private static final double TOLERANCE = 0.0000001;
public static void assertEstimateEquals(double actual, double expected, String messageFormat, Object... messageObjects)
{
if (isNaN(actual) && isNaN(expected)) {
return;
}
if (!MoreMath.nearlyEqual(actual, expected, TOLERANCE)) {
throw new AssertionError(format(messageFormat, messageObjects) + format(", expected [%f], but got [%f]", expected, actual));
}
}
}
| apache-2.0 |
tiarebalbi/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/packagestest/two/SecondConfiguration.java | 1191 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.packagestest.two;
import org.springframework.boot.autoconfigure.AutoConfigurationPackagesTests;
import org.springframework.boot.autoconfigure.AutoConfigurationPackagesTests.TestRegistrar;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
/**
* Sample configuration used in {@link AutoConfigurationPackagesTests}.
*
* @author Oliver Gierke
*/
@Configuration(proxyBeanMethods = false)
@Import(TestRegistrar.class)
public class SecondConfiguration {
}
| apache-2.0 |
robin13/elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/ValueFetcher.java | 1604 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.List;
/**
* A helper class for fetching field values during the {@link FetchFieldsPhase}. Each {@link MappedFieldType}
* is in charge of defining a value fetcher through {@link MappedFieldType#valueFetcher}.
*/
public interface ValueFetcher {
/**
* Given access to a document's _source, return this field's values.
*
* In addition to pulling out the values, they will be parsed into a standard form.
* For example numeric field mappers make sure to parse the source value into a number
* of the right type.
*
* Note that for array values, the order in which values are returned is undefined and
* should not be relied on.
*
* @param lookup a lookup structure over the document's source.
* @return a list a standardized field values.
*/
List<Object> fetchValues(SourceLookup lookup) throws IOException;
/**
* Update the leaf reader used to fetch values.
*/
default void setNextReader(LeafReaderContext context) {}
}
| apache-2.0 |
XiaominZhang/presto | presto-raptor/src/test/java/com/facebook/presto/raptor/RaptorQueryRunner.java | 3327 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.raptor;
import com.facebook.presto.Session;
import com.facebook.presto.testing.QueryRunner;
import com.facebook.presto.tests.DistributedQueryRunner;
import com.facebook.presto.tpch.TpchPlugin;
import com.facebook.presto.tpch.testing.SampledTpchPlugin;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.tpch.TpchTable;
import java.io.File;
import java.util.Map;
import static com.facebook.presto.testing.TestingSession.testSessionBuilder;
import static com.facebook.presto.tests.QueryAssertions.copyTpchTables;
import static com.facebook.presto.tpch.TpchMetadata.TINY_SCHEMA_NAME;
public final class RaptorQueryRunner
{
private RaptorQueryRunner() {}
public static QueryRunner createRaptorQueryRunner(TpchTable<?>... tables)
throws Exception
{
return createRaptorQueryRunner(ImmutableList.copyOf(tables));
}
public static QueryRunner createRaptorQueryRunner(Iterable<TpchTable<?>> tables)
throws Exception
{
DistributedQueryRunner queryRunner = new DistributedQueryRunner(createSession("tpch"), 2);
queryRunner.installPlugin(new TpchPlugin());
queryRunner.createCatalog("tpch", "tpch");
queryRunner.installPlugin(new SampledTpchPlugin());
queryRunner.createCatalog("tpch_sampled", "tpch_sampled");
queryRunner.installPlugin(new RaptorPlugin());
File baseDir = queryRunner.getCoordinator().getBaseDataDir().toFile();
Map<String, String> raptorProperties = ImmutableMap.<String, String>builder()
.put("metadata.db.type", "h2")
.put("metadata.db.filename", new File(baseDir, "db").getAbsolutePath())
.put("storage.data-directory", new File(baseDir, "data").getAbsolutePath())
.put("storage.max-shard-rows", "2000")
.put("backup.provider", "file")
.put("backup.directory", new File(baseDir, "backup").getAbsolutePath())
.build();
queryRunner.createCatalog("raptor", "raptor", raptorProperties);
copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, createSession(), tables);
copyTpchTables(queryRunner, "tpch_sampled", TINY_SCHEMA_NAME, createSampledSession(), tables);
return queryRunner;
}
public static Session createSession()
{
return createSession("tpch");
}
public static Session createSampledSession()
{
return createSession("tpch_sampled");
}
private static Session createSession(String schema)
{
return testSessionBuilder()
.setCatalog("raptor")
.setSchema(schema)
.build();
}
}
| apache-2.0 |
jmandawg/camel | components/camel-mybatis/src/main/java/org/apache/camel/component/mybatis/springboot/MyBatisComponentConfiguration.java | 1978 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mybatis.springboot;
import org.apache.ibatis.session.SqlSessionFactory;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* Performs a query poll insert update or delete in a relational database using
* MyBatis.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@ConfigurationProperties(prefix = "camel.component.mybatis")
public class MyBatisComponentConfiguration {
/**
* To use the SqlSessionFactory
*/
private SqlSessionFactory sqlSessionFactory;
/**
* Location of MyBatis xml configuration file. The default value is:
* SqlMapConfig.xml loaded from the classpath
*/
private String configurationUri;
public SqlSessionFactory getSqlSessionFactory() {
return sqlSessionFactory;
}
public void setSqlSessionFactory(SqlSessionFactory sqlSessionFactory) {
this.sqlSessionFactory = sqlSessionFactory;
}
public String getConfigurationUri() {
return configurationUri;
}
public void setConfigurationUri(String configurationUri) {
this.configurationUri = configurationUri;
}
} | apache-2.0 |
alanfgates/hive | ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ILongInExpr.java | 1053 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.vector.expressions;
/**
* Interface used for both filter and non-filter versions of IN to simplify
* VectorizationContext code.
*/
public interface ILongInExpr {
void setInListValues(long[] inVals);
}
| apache-2.0 |
christophd/camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/BrowseComponentBuilderFactory.java | 5829 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.browse.BrowseComponent;
/**
* Inspect the messages received on endpoints supporting BrowsableEndpoint.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface BrowseComponentBuilderFactory {
/**
* Browse (camel-browse)
* Inspect the messages received on endpoints supporting BrowsableEndpoint.
*
* Category: core,monitoring
* Since: 1.3
* Maven coordinates: org.apache.camel:camel-browse
*
* @return the dsl builder
*/
static BrowseComponentBuilder browse() {
return new BrowseComponentBuilderImpl();
}
/**
* Builder for the Browse component.
*/
interface BrowseComponentBuilder
extends
ComponentBuilder<BrowseComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default BrowseComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default BrowseComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default BrowseComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
class BrowseComponentBuilderImpl
extends
AbstractComponentBuilder<BrowseComponent>
implements
BrowseComponentBuilder {
@Override
protected BrowseComponent buildConcreteComponent() {
return new BrowseComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((BrowseComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((BrowseComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((BrowseComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
} | apache-2.0 |
shyamalschandra/flex-sdk | modules/thirdparty/batik/sources/org/apache/flex/forks/batik/svggen/DefaultImageHandler.java | 6245 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flex.forks.batik.svggen;
import java.awt.Image;
import java.awt.image.RenderedImage;
import java.awt.image.renderable.RenderableImage;
import org.apache.flex.forks.batik.util.XMLConstants;
import org.w3c.dom.Element;
/**
* This class provides a default implementation of the ImageHandler
* interface simply puts a place holder in the xlink:href
* attribute and sets the width and height of the element.
*
* @author <a href="mailto:vincent.hardy@eng.sun.com">Vincent Hardy</a>
* @version $Id: DefaultImageHandler.java 501495 2007-01-30 18:00:36Z dvholten $
* @see org.apache.flex.forks.batik.svggen.SVGGraphics2D
*/
public class DefaultImageHandler
implements ImageHandler, ErrorConstants, XMLConstants {
/**
* Build a <code>DefaultImageHandler</code>.
*/
public DefaultImageHandler() {
}
/**
* The handler should set the xlink:href tag and the width and
* height attributes.
*/
public void handleImage(Image image, Element imageElement,
SVGGeneratorContext generatorContext) {
//
// First, set the image width and height
//
imageElement.setAttributeNS(null, SVG_WIDTH_ATTRIBUTE, String.valueOf( image.getWidth( null ) ) );
imageElement.setAttributeNS(null, SVG_HEIGHT_ATTRIBUTE, String.valueOf( image.getHeight( null ) ) );
//
// Now, set the href
//
try {
handleHREF(image, imageElement, generatorContext);
} catch (SVGGraphics2DIOException e) {
try {
generatorContext.errorHandler.handleError(e);
} catch (SVGGraphics2DIOException io) {
// we need a runtime exception because
// java.awt.Graphics2D method doesn't throw exceptions..
throw new SVGGraphics2DRuntimeException(io);
}
}
}
/**
* The handler should set the xlink:href tag and the width and
* height attributes.
*/
public void handleImage(RenderedImage image, Element imageElement,
SVGGeneratorContext generatorContext) {
//
// First, set the image width and height
//
imageElement.setAttributeNS(null, SVG_WIDTH_ATTRIBUTE, String.valueOf( image.getWidth() ) );
imageElement.setAttributeNS(null, SVG_HEIGHT_ATTRIBUTE, String.valueOf( image.getHeight() ) );
//
// Now, set the href
//
try {
handleHREF(image, imageElement, generatorContext);
} catch (SVGGraphics2DIOException e) {
try {
generatorContext.errorHandler.handleError(e);
} catch (SVGGraphics2DIOException io) {
// we need a runtime exception because
// java.awt.Graphics2D method doesn't throw exceptions..
throw new SVGGraphics2DRuntimeException(io);
}
}
}
/**
* The handler should set the xlink:href tag and the width and
* height attributes.
*/
public void handleImage(RenderableImage image, Element imageElement,
SVGGeneratorContext generatorContext) {
//
// First, set the image width and height
//
imageElement.setAttributeNS(null, SVG_WIDTH_ATTRIBUTE, String.valueOf( image.getWidth() ) );
imageElement.setAttributeNS(null, SVG_HEIGHT_ATTRIBUTE, String.valueOf( image.getHeight() ) );
//
// Now, set the href
//
try {
handleHREF(image, imageElement, generatorContext);
} catch (SVGGraphics2DIOException e) {
try {
generatorContext.errorHandler.handleError(e);
} catch (SVGGraphics2DIOException io) {
// we need a runtime exception because
// java.awt.Graphics2D method doesn't throw exceptions..
throw new SVGGraphics2DRuntimeException(io);
}
}
}
/**
* This template method should set the xlink:href attribute on the input
* Element parameter
*/
protected void handleHREF(Image image, Element imageElement,
SVGGeneratorContext generatorContext)
throws SVGGraphics2DIOException {
// Simply write a placeholder
imageElement.setAttributeNS(XLINK_NAMESPACE_URI,
XLINK_HREF_QNAME, image.toString());
}
/**
* This template method should set the xlink:href attribute on the input
* Element parameter
*/
protected void handleHREF(RenderedImage image, Element imageElement,
SVGGeneratorContext generatorContext)
throws SVGGraphics2DIOException {
// Simply write a placeholder
imageElement.setAttributeNS(XLINK_NAMESPACE_URI,
XLINK_HREF_QNAME, image.toString());
}
/**
* This template method should set the xlink:href attribute on the input
* Element parameter
*/
protected void handleHREF(RenderableImage image, Element imageElement,
SVGGeneratorContext generatorContext)
throws SVGGraphics2DIOException {
// Simply write a placeholder
imageElement.setAttributeNS(XLINK_NAMESPACE_URI,
XLINK_HREF_QNAME, image.toString());
}
}
| apache-2.0 |
subhrajyotim/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/engine/runtime/Execution.java | 1392 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.runtime;
/**
* Represent a 'path of execution' in a process instance.
*
* Note that a {@link ProcessInstance} also is an execution.
*
* @author Joram Barrez
*/
public interface Execution {
/**
* The unique identifier of the execution.
*/
String getId();
/**
* Indicates if the execution is suspended.
*/
boolean isSuspended();
/**
* Indicates if the execution is ended.
*/
boolean isEnded();
/** Id of the root of the execution tree representing the process instance.
* It is the same as {@link #getId()} if this execution is the process instance. */
String getProcessInstanceId();
/**
* The id of the tenant this execution belongs to. Can be <code>null</code>
* if the execution belongs to no single tenant.
*/
String getTenantId();
}
| apache-2.0 |
psoreide/bnd | biz.aQute.bndlib.tests/test/test/export/annotation/testCalculated/impl/package-info.java | 52 | package test.export.annotation.testCalculated.impl;
| apache-2.0 |
doom369/netty | transport-sctp/src/main/java/com/sun/nio/sctp/NotificationHandler.java | 705 | /*
* Copyright 2011 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.sun.nio.sctp;
public interface NotificationHandler<T> {
}
| apache-2.0 |
jushanghui/jsh | src/main/parser/com/baidu/hsb/parser/ast/stmt/ddl/DDLRenameTableStatement.java | 1209 | /**
* Baidu.com,Inc.
* Copyright (c) 2000-2013 All Rights Reserved.
*/
package com.baidu.hsb.parser.ast.stmt.ddl;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import com.baidu.hsb.parser.ast.expression.primary.Identifier;
import com.baidu.hsb.parser.util.Pair;
import com.baidu.hsb.parser.visitor.SQLASTVisitor;
/**
* @author xiongzhao@baidu.com
*/
public class DDLRenameTableStatement implements DDLStatement {
private final List<Pair<Identifier, Identifier>> list;
public DDLRenameTableStatement() {
this.list = new LinkedList<Pair<Identifier, Identifier>>();
}
public DDLRenameTableStatement(List<Pair<Identifier, Identifier>> list) {
if (list == null) {
this.list = Collections.emptyList();
} else {
this.list = list;
}
}
public DDLRenameTableStatement addRenamePair(Identifier from, Identifier to) {
list.add(new Pair<Identifier, Identifier>(from, to));
return this;
}
public List<Pair<Identifier, Identifier>> getList() {
return list;
}
@Override
public void accept(SQLASTVisitor visitor) {
visitor.visit(this);
}
}
| apache-2.0 |
RyanMagnusson/cassandra | tools/stress/src/org/apache/cassandra/stress/operations/predefined/PredefinedOperation.java | 8299 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.stress.operations.predefined;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.cassandra.stress.Operation;
import org.apache.cassandra.stress.generate.*;
import org.apache.cassandra.stress.operations.PartitionOperation;
import org.apache.cassandra.stress.settings.Command;
import org.apache.cassandra.stress.settings.CqlVersion;
import org.apache.cassandra.stress.settings.StressSettings;
import org.apache.cassandra.stress.util.Timer;
import org.apache.cassandra.thrift.SlicePredicate;
import org.apache.cassandra.thrift.SliceRange;
public abstract class PredefinedOperation extends PartitionOperation
{
public static final byte[] EMPTY_BYTE_ARRAY = {};
public final Command type;
private final Distribution columnCount;
private Object cqlCache;
public PredefinedOperation(Command type, Timer timer, PartitionGenerator generator, SeedManager seedManager, StressSettings settings)
{
super(timer, settings, spec(generator, seedManager, settings.insert.rowPopulationRatio.get()));
this.type = type;
this.columnCount = settings.columns.countDistribution.get();
}
private static DataSpec spec(PartitionGenerator generator, SeedManager seedManager, RatioDistribution rowPopulationCount)
{
return new DataSpec(generator, seedManager, new DistributionFixed(1), rowPopulationCount, 1);
}
public boolean isCql3()
{
return settings.mode.cqlVersion == CqlVersion.CQL3;
}
public Object getCqlCache()
{
return cqlCache;
}
public void storeCqlCache(Object val)
{
cqlCache = val;
}
protected ByteBuffer getKey()
{
return (ByteBuffer) partitions.get(0).getPartitionKey(0);
}
final class ColumnSelection
{
final int[] indices;
final int lb, ub;
private ColumnSelection(int[] indices, int lb, int ub)
{
this.indices = indices;
this.lb = lb;
this.ub = ub;
}
public <V> List<V> select(List<V> in)
{
List<V> out = new ArrayList<>();
if (indices != null)
{
for (int i : indices)
out.add(in.get(i));
}
else
{
out.addAll(in.subList(lb, ub));
}
return out;
}
int count()
{
return indices != null ? indices.length : ub - lb;
}
SlicePredicate predicate()
{
final SlicePredicate predicate = new SlicePredicate();
if (indices == null)
{
predicate.setSlice_range(new SliceRange()
.setStart(settings.columns.names.get(lb))
.setFinish(EMPTY_BYTE_ARRAY)
.setReversed(false)
.setCount(count())
);
}
else
predicate.setColumn_names(select(settings.columns.names));
return predicate;
}
}
public String toString()
{
return type.toString();
}
ColumnSelection select()
{
if (settings.columns.slice)
{
int count = (int) columnCount.next();
int start;
if (count == settings.columns.maxColumnsPerKey)
start = 0;
else
start = 1 + ThreadLocalRandom.current().nextInt(settings.columns.maxColumnsPerKey - count);
return new ColumnSelection(null, start, start + count);
}
int count = (int) columnCount.next();
int totalCount = settings.columns.names.size();
if (count == settings.columns.names.size())
return new ColumnSelection(null, 0, count);
ThreadLocalRandom rnd = ThreadLocalRandom.current();
int[] indices = new int[count];
int c = 0, o = 0;
while (c < count && count + o < totalCount)
{
int leeway = totalCount - (count + o);
int spreadover = count - c;
o += Math.round(rnd.nextDouble() * (leeway / (double) spreadover));
indices[c] = o + c;
c++;
}
while (c < count)
{
indices[c] = o + c;
c++;
}
return new ColumnSelection(indices, 0, 0);
}
protected List<ByteBuffer> getColumnValues()
{
return getColumnValues(new ColumnSelection(null, 0, settings.columns.names.size()));
}
protected List<ByteBuffer> getColumnValues(ColumnSelection columns)
{
Row row = partitions.get(0).next();
ByteBuffer[] r = new ByteBuffer[columns.count()];
int c = 0;
if (columns.indices != null)
for (int i : columns.indices)
r[c++] = (ByteBuffer) row.get(i);
else
for (int i = columns.lb ; i < columns.ub ; i++)
r[c++] = (ByteBuffer) row.get(i);
return Arrays.asList(r);
}
public static Operation operation(Command type, Timer timer, PartitionGenerator generator, SeedManager seedManager, StressSettings settings, DistributionFactory counteradd)
{
switch (type)
{
case READ:
switch(settings.mode.style)
{
case THRIFT:
return new ThriftReader(timer, generator, seedManager, settings);
case CQL:
case CQL_PREPARED:
return new CqlReader(timer, generator, seedManager, settings);
default:
throw new UnsupportedOperationException();
}
case COUNTER_READ:
switch(settings.mode.style)
{
case THRIFT:
return new ThriftCounterGetter(timer, generator, seedManager, settings);
case CQL:
case CQL_PREPARED:
return new CqlCounterGetter(timer, generator, seedManager, settings);
default:
throw new UnsupportedOperationException();
}
case WRITE:
switch(settings.mode.style)
{
case THRIFT:
return new ThriftInserter(timer, generator, seedManager, settings);
case CQL:
case CQL_PREPARED:
return new CqlInserter(timer, generator, seedManager, settings);
default:
throw new UnsupportedOperationException();
}
case COUNTER_WRITE:
switch(settings.mode.style)
{
case THRIFT:
return new ThriftCounterAdder(counteradd, timer, generator, seedManager, settings);
case CQL:
case CQL_PREPARED:
return new CqlCounterAdder(counteradd, timer, generator, seedManager, settings);
default:
throw new UnsupportedOperationException();
}
}
throw new UnsupportedOperationException();
}
}
| apache-2.0 |
sguilhen/wildfly-elytron | src/main/java/org/wildfly/security/ssl/ExportCipherSuitePredicate.java | 1353 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.security.ssl;
final class ExportCipherSuitePredicate extends CipherSuitePredicate {
static final ExportCipherSuitePredicate TRUE = new ExportCipherSuitePredicate(true);
static final ExportCipherSuitePredicate FALSE = new ExportCipherSuitePredicate(false);
private final boolean export;
private ExportCipherSuitePredicate(final boolean export) {
this.export = export;
}
void toString(final StringBuilder b) {
b.append("export cipher suite is ").append(export);
}
public boolean test(final MechanismDatabase.Entry entry) {
return export == entry.isExport();
}
}
| apache-2.0 |
winger007/zstack | header/src/main/java/org/zstack/header/vm/APICreateVmInstanceMsg.java | 9072 | package org.zstack.header.vm;
import org.springframework.http.HttpMethod;
import org.zstack.header.cluster.ClusterVO;
import org.zstack.header.configuration.DiskOfferingVO;
import org.zstack.header.configuration.InstanceOfferingVO;
import org.zstack.header.core.scheduler.SchedulerVO;
import org.zstack.header.host.HostVO;
import org.zstack.header.identity.Action;
import org.zstack.header.image.ImageVO;
import org.zstack.header.message.APICreateMessage;
import org.zstack.header.message.APIEvent;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.APIParam;
import org.zstack.header.network.l3.L3NetworkVO;
import org.zstack.header.notification.ApiNotification;
import org.zstack.header.rest.RestRequest;
import org.zstack.header.storage.primary.PrimaryStorageVO;
import org.zstack.header.tag.TagResourceType;
import org.zstack.header.zone.ZoneVO;
import java.util.List;
import static java.util.Arrays.asList;
/**
* @api create a new vm instance
* @cli
* @httpMsg {
* "org.zstack.header.vm.APICreateVmInstanceMsg": {
* "name": "TestVm",
* "instanceOfferingUuid": "1618154b462a48749ca9b114cf4a2979",
* "imageUuid": "99a5eea648954ef7be2b8ede8f34fe26",
* "l3NetworkUuids": [
* "c4f6a370f80443798cc460ee07d56ff1",
* "f5fbd96e0df745bdb7bc4f4c19febe65",
* "c60285dca24d43a4b9a2e536674ddca1"
* ],
* "type": "UserVm",
* "dataDiskOfferingUuids": [],
* "description": "Test",
* "session": {
* "uuid": "49c7e4c1fc18499a9477dd426436a8a4"
* }
* }
* }
* @msg {
* "org.zstack.header.vm.APICreateVmInstanceMsg": {
* "name": "TestVm",
* "instanceOfferingUuid": "1618154b462a48749ca9b114cf4a2979",
* "imageUuid": "99a5eea648954ef7be2b8ede8f34fe26",
* "l3NetworkUuids": [
* "c4f6a370f80443798cc460ee07d56ff1",
* "f5fbd96e0df745bdb7bc4f4c19febe65",
* "c60285dca24d43a4b9a2e536674ddca1"
* ],
* "type": "UserVm",
* "dataDiskOfferingUuids": [],
* "description": "Test",
* "session": {
* "uuid": "49c7e4c1fc18499a9477dd426436a8a4"
* },
* "timeout": 1800000,
* "id": "add5fb2198f14980adf26db572d035c5",
* "serviceId": "api.portal",
* "creatingTime": 1398912618016
* }
* }
* @result See :ref:`APICreateVmInstanceEvent`
* @since 0.1.0
*/
@TagResourceType(VmInstanceVO.class)
@Action(category = VmInstanceConstant.ACTION_CATEGORY)
@RestRequest(
path = "/vm-instances",
method = HttpMethod.POST,
responseClass = APICreateVmInstanceEvent.class,
parameterName = "params"
)
public class APICreateVmInstanceMsg extends APICreateMessage {
/**
* @desc max length of 255 characters
*/
@APIParam(maxLength = 255)
private String name;
/**
* @desc uuid of instance offering. See :ref:`InstanceOfferingInventory`
*/
@APIParam(resourceType = InstanceOfferingVO.class, checkAccount = true)
private String instanceOfferingUuid;
/**
* @desc uuid of image. See :ref:`ImageInventory`
*/
@APIParam(resourceType = ImageVO.class, checkAccount = true)
private String imageUuid;
/**
* @desc a list of L3Network uuid the vm will create nic on. See :ref:`L3NetworkInventory`
*/
@APIParam(resourceType = L3NetworkVO.class, nonempty = true, checkAccount = true)
private List<String> l3NetworkUuids;
/**
* @desc see type of :ref:`VmInstanceInventory`
* @choices - UserVm
* - ApplianceVm
*/
@APIParam(validValues = {"UserVm", "ApplianceVm"}, required = false)
private String type;
/**
* @desc disk offering uuid for root volume. Optional when vm is created from RootVolumeTemplate,
* mandatory when vm is created from ISO. See 'mediaType' of :ref:`ImageInventory`
* @optional
*/
@APIParam(required = false, resourceType = DiskOfferingVO.class, checkAccount = true)
private String rootDiskOfferingUuid;
/**
* @desc disk offering uuid for data volumes. See :ref:`DiskOfferingInventory`
*/
@APIParam(required = false, resourceType = DiskOfferingVO.class, checkAccount = true)
private List<String> dataDiskOfferingUuids;
/**
* @desc when not null, vm will be created in the zone this uuid specified
* @optional
*/
@APIParam(required = false, resourceType = ZoneVO.class)
private String zoneUuid;
/**
* @desc when not null, vm will be created in the cluster this uuid specified
* @optional
*/
@APIParam(required = false, resourceType = ClusterVO.class)
private String clusterUuid;
/**
* @desc when not null, vm will be created on the host this uuid specified
* @optional
*/
@APIParam(required = false, resourceType = HostVO.class)
private String hostUuid;
/**
* @desc when not null, vm will be created on the primary storage this uuid specified
* @optional
*/
@APIParam(required = false, resourceType = PrimaryStorageVO.class)
private String primaryStorageUuidForRootVolume;
/**
* @desc max length of 255 characters
* @optional
*/
@APIParam(required = false, maxLength = 2048)
private String description;
/**
* @desc user-defined root password
* @optional
*/
// @APIParam(required = false, maxLength = 32, checkAccount = true, validRegexValues = VmInstanceConstant.USER_VM_REGEX_PASSWORD)
// private String rootPassword;
private String defaultL3NetworkUuid;
@APIParam(required = false, validValues = {"InstantStart", "JustCreate"})
private String strategy = VmCreationStrategy.InstantStart.toString();
public String getStrategy() {
return strategy;
}
public void setStrategy(String strategy) {
this.strategy = strategy;
}
public String getDefaultL3NetworkUuid() {
return defaultL3NetworkUuid;
}
public void setDefaultL3NetworkUuid(String defaultL3NetworkUuid) {
this.defaultL3NetworkUuid = defaultL3NetworkUuid;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getInstanceOfferingUuid() {
return instanceOfferingUuid;
}
public void setInstanceOfferingUuid(String instanceOfferingUuid) {
this.instanceOfferingUuid = instanceOfferingUuid;
}
public String getImageUuid() {
return imageUuid;
}
public void setImageUuid(String imageUuid) {
this.imageUuid = imageUuid;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getZoneUuid() {
return zoneUuid;
}
public void setZoneUuid(String zoneUuid) {
this.zoneUuid = zoneUuid;
}
public String getClusterUuid() {
return clusterUuid;
}
public void setClusterUuid(String clusterUuid) {
this.clusterUuid = clusterUuid;
}
public String getHostUuid() {
return hostUuid;
}
public void setHostUuid(String hostUuid) {
this.hostUuid = hostUuid;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<String> getL3NetworkUuids() {
return l3NetworkUuids;
}
public void setL3NetworkUuids(List<String> l3NetworkUuids) {
this.l3NetworkUuids = l3NetworkUuids;
}
public List<String> getDataDiskOfferingUuids() {
return dataDiskOfferingUuids;
}
public void setDataDiskOfferingUuids(List<String> dataDiskOfferingUuids) {
this.dataDiskOfferingUuids = dataDiskOfferingUuids;
}
public String getRootDiskOfferingUuid() {
return rootDiskOfferingUuid;
}
public void setRootDiskOfferingUuid(String rootDiskOfferingUuid) {
this.rootDiskOfferingUuid = rootDiskOfferingUuid;
}
public String getPrimaryStorageUuidForRootVolume() {
return primaryStorageUuidForRootVolume;
}
public void setPrimaryStorageUuidForRootVolume(String primaryStorageUuidForRootVolume) {
this.primaryStorageUuidForRootVolume = primaryStorageUuidForRootVolume;
}
public static APICreateVmInstanceMsg __example__() {
APICreateVmInstanceMsg msg = new APICreateVmInstanceMsg();
msg.setName("vm1");
msg.setDescription("this is a vm");
msg.setClusterUuid(uuid());
msg.setDataDiskOfferingUuids(asList(uuid(), uuid()));
msg.setImageUuid(uuid());
msg.setInstanceOfferingUuid(uuid());
msg.setL3NetworkUuids(asList(uuid()));
return msg;
}
public ApiNotification __notification__() {
APIMessage that = this;
return new ApiNotification() {
@Override
public void after(APIEvent evt) {
if (evt.isSuccess()) {
ntfy("Created").resource(((APICreateVmInstanceEvent) evt).getInventory().getUuid(), VmInstanceVO.class.getSimpleName())
.messageAndEvent(that, evt).done();
}
}
};
}
}
| apache-2.0 |
didiez/keycloak | federation/ldap/src/main/java/org/keycloak/storage/ldap/mappers/membership/role/RoleLDAPStorageMapper.java | 19167 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.storage.ldap.mappers.membership.role;
import org.jboss.logging.Logger;
import org.keycloak.component.ComponentModel;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ModelException;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleContainerModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.RoleUtils;
import org.keycloak.models.utils.UserModelDelegate;
import org.keycloak.storage.ldap.LDAPConfig;
import org.keycloak.storage.ldap.LDAPStorageProvider;
import org.keycloak.storage.ldap.LDAPUtils;
import org.keycloak.storage.ldap.idm.model.LDAPObject;
import org.keycloak.storage.ldap.idm.query.Condition;
import org.keycloak.storage.ldap.idm.query.internal.LDAPQuery;
import org.keycloak.storage.ldap.idm.query.internal.LDAPQueryConditionsBuilder;
import org.keycloak.storage.ldap.mappers.AbstractLDAPStorageMapper;
import org.keycloak.storage.ldap.mappers.membership.CommonLDAPGroupMapper;
import org.keycloak.storage.ldap.mappers.membership.CommonLDAPGroupMapperConfig;
import org.keycloak.storage.ldap.mappers.membership.LDAPGroupMapperMode;
import org.keycloak.storage.ldap.mappers.membership.UserRolesRetrieveStrategy;
import org.keycloak.storage.user.SynchronizationResult;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Map realm roles or roles of particular client to LDAP groups
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class RoleLDAPStorageMapper extends AbstractLDAPStorageMapper implements CommonLDAPGroupMapper {
private static final Logger logger = Logger.getLogger(RoleLDAPStorageMapper.class);
private final RoleMapperConfig config;
private final RoleLDAPStorageMapperFactory factory;
public RoleLDAPStorageMapper(ComponentModel mapperModel, LDAPStorageProvider ldapProvider, RoleLDAPStorageMapperFactory factory) {
super(mapperModel, ldapProvider);
this.config = new RoleMapperConfig(mapperModel);
this.factory = factory;
}
@Override
public LDAPQuery createLDAPGroupQuery() {
return createRoleQuery();
}
@Override
public CommonLDAPGroupMapperConfig getConfig() {
return config;
}
@Override
public void onImportUserFromLDAP(LDAPObject ldapUser, UserModel user, RealmModel realm, boolean isCreate) {
LDAPGroupMapperMode mode = config.getMode();
// For now, import LDAP role mappings just during create
if (mode == LDAPGroupMapperMode.IMPORT && isCreate) {
List<LDAPObject> ldapRoles = getLDAPRoleMappings(ldapUser);
// Import role mappings from LDAP into Keycloak DB
String roleNameAttr = config.getRoleNameLdapAttribute();
for (LDAPObject ldapRole : ldapRoles) {
String roleName = ldapRole.getAttributeAsString(roleNameAttr);
RoleContainerModel roleContainer = getTargetRoleContainer(realm);
RoleModel role = roleContainer.getRole(roleName);
if (role == null) {
role = roleContainer.addRole(roleName);
}
logger.debugf("Granting role [%s] to user [%s] during import from LDAP", roleName, user.getUsername());
user.grantRole(role);
}
}
}
@Override
public void onRegisterUserToLDAP(LDAPObject ldapUser, UserModel localUser, RealmModel realm) {
}
// Sync roles from LDAP to Keycloak DB
@Override
public SynchronizationResult syncDataFromFederationProviderToKeycloak(RealmModel realm) {
SynchronizationResult syncResult = new SynchronizationResult() {
@Override
public String getStatus() {
return String.format("%d imported roles, %d roles already exists in Keycloak", getAdded(), getUpdated());
}
};
logger.debugf("Syncing roles from LDAP into Keycloak DB. Mapper is [%s], LDAP provider is [%s]", mapperModel.getName(), ldapProvider.getModel().getName());
// Send LDAP query to load all roles
LDAPQuery ldapRoleQuery = createRoleQuery();
List<LDAPObject> ldapRoles = LDAPUtils.loadAllLDAPObjects(ldapRoleQuery, ldapProvider);
RoleContainerModel roleContainer = getTargetRoleContainer(realm);
String rolesRdnAttr = config.getRoleNameLdapAttribute();
for (LDAPObject ldapRole : ldapRoles) {
String roleName = ldapRole.getAttributeAsString(rolesRdnAttr);
if (roleContainer.getRole(roleName) == null) {
logger.debugf("Syncing role [%s] from LDAP to keycloak DB", roleName);
roleContainer.addRole(roleName);
syncResult.increaseAdded();
} else {
syncResult.increaseUpdated();
}
}
return syncResult;
}
// Sync roles from Keycloak back to LDAP
@Override
public SynchronizationResult syncDataFromKeycloakToFederationProvider(RealmModel realm) {
SynchronizationResult syncResult = new SynchronizationResult() {
@Override
public String getStatus() {
return String.format("%d roles imported to LDAP, %d roles already existed in LDAP", getAdded(), getUpdated());
}
};
if (config.getMode() != LDAPGroupMapperMode.LDAP_ONLY) {
logger.warnf("Ignored sync for federation mapper '%s' as it's mode is '%s'", mapperModel.getName(), config.getMode().toString());
return syncResult;
}
logger.debugf("Syncing roles from Keycloak into LDAP. Mapper is [%s], LDAP provider is [%s]", mapperModel.getName(), ldapProvider.getModel().getName());
// Send LDAP query to see which roles exists there
LDAPQuery ldapQuery = createRoleQuery();
List<LDAPObject> ldapRoles = ldapQuery.getResultList();
Set<String> ldapRoleNames = new HashSet<>();
String rolesRdnAttr = config.getRoleNameLdapAttribute();
for (LDAPObject ldapRole : ldapRoles) {
String roleName = ldapRole.getAttributeAsString(rolesRdnAttr);
ldapRoleNames.add(roleName);
}
RoleContainerModel roleContainer = getTargetRoleContainer(realm);
Set<RoleModel> keycloakRoles = roleContainer.getRoles();
for (RoleModel keycloakRole : keycloakRoles) {
String roleName = keycloakRole.getName();
if (ldapRoleNames.contains(roleName)) {
syncResult.increaseUpdated();
} else {
logger.debugf("Syncing role [%s] from Keycloak to LDAP", roleName);
createLDAPRole(roleName);
syncResult.increaseAdded();
}
}
return syncResult;
}
// TODO: Possible to merge with GroupMapper and move to common class
public LDAPQuery createRoleQuery() {
LDAPQuery ldapQuery = new LDAPQuery(ldapProvider);
// For now, use same search scope, which is configured "globally" and used for user's search.
ldapQuery.setSearchScope(ldapProvider.getLdapIdentityStore().getConfig().getSearchScope());
String rolesDn = config.getRolesDn();
ldapQuery.setSearchDn(rolesDn);
Collection<String> roleObjectClasses = config.getRoleObjectClasses(ldapProvider);
ldapQuery.addObjectClasses(roleObjectClasses);
String rolesRdnAttr = config.getRoleNameLdapAttribute();
String customFilter = config.getCustomLdapFilter();
if (customFilter != null && customFilter.trim().length() > 0) {
Condition customFilterCondition = new LDAPQueryConditionsBuilder().addCustomLDAPFilter(customFilter);
ldapQuery.addWhereCondition(customFilterCondition);
}
String membershipAttr = config.getMembershipLdapAttribute();
ldapQuery.addReturningLdapAttribute(rolesRdnAttr);
ldapQuery.addReturningLdapAttribute(membershipAttr);
return ldapQuery;
}
protected RoleContainerModel getTargetRoleContainer(RealmModel realm) {
boolean realmRolesMapping = config.isRealmRolesMapping();
if (realmRolesMapping) {
return realm;
} else {
String clientId = config.getClientId();
if (clientId == null) {
throw new ModelException("Using client roles mapping is requested, but parameter client.id not found!");
}
ClientModel client = realm.getClientByClientId(clientId);
if (client == null) {
throw new ModelException("Can't found requested client with clientId: " + clientId);
}
return client;
}
}
public LDAPObject createLDAPRole(String roleName) {
LDAPObject ldapRole = LDAPUtils.createLDAPGroup(ldapProvider, roleName, config.getRoleNameLdapAttribute(), config.getRoleObjectClasses(ldapProvider),
config.getRolesDn(), Collections.<String, Set<String>>emptyMap());
logger.debugf("Creating role [%s] to LDAP with DN [%s]", roleName, ldapRole.getDn().toString());
return ldapRole;
}
public void addRoleMappingInLDAP(String roleName, LDAPObject ldapUser) {
LDAPObject ldapRole = loadLDAPRoleByName(roleName);
if (ldapRole == null) {
ldapRole = createLDAPRole(roleName);
}
String membershipUserAttrName = getMembershipUserLdapAttribute();
LDAPUtils.addMember(ldapProvider, config.getMembershipTypeLdapAttribute(), config.getMembershipLdapAttribute(), membershipUserAttrName, ldapRole, ldapUser, true);
}
public void deleteRoleMappingInLDAP(LDAPObject ldapUser, LDAPObject ldapRole) {
String membershipUserAttrName = getMembershipUserLdapAttribute();
LDAPUtils.deleteMember(ldapProvider, config.getMembershipTypeLdapAttribute(), config.getMembershipLdapAttribute(), membershipUserAttrName, ldapRole, ldapUser);
}
public LDAPObject loadLDAPRoleByName(String roleName) {
LDAPQuery ldapQuery = createRoleQuery();
Condition roleNameCondition = new LDAPQueryConditionsBuilder().equal(config.getRoleNameLdapAttribute(), roleName);
ldapQuery.addWhereCondition(roleNameCondition);
return ldapQuery.getFirstResult();
}
protected List<LDAPObject> getLDAPRoleMappings(LDAPObject ldapUser) {
String strategyKey = config.getUserRolesRetrieveStrategy();
UserRolesRetrieveStrategy strategy = factory.getUserRolesRetrieveStrategy(strategyKey);
LDAPConfig ldapConfig = ldapProvider.getLdapIdentityStore().getConfig();
return strategy.getLDAPRoleMappings(this, ldapUser, ldapConfig);
}
@Override
public UserModel proxy(LDAPObject ldapUser, UserModel delegate, RealmModel realm) {
final LDAPGroupMapperMode mode = config.getMode();
// For IMPORT mode, all operations are performed against local DB
if (mode == LDAPGroupMapperMode.IMPORT) {
return delegate;
} else {
return new LDAPRoleMappingsUserDelegate(realm, delegate, ldapUser);
}
}
@Override
public void beforeLDAPQuery(LDAPQuery query) {
String strategyKey = config.getUserRolesRetrieveStrategy();
UserRolesRetrieveStrategy strategy = factory.getUserRolesRetrieveStrategy(strategyKey);
strategy.beforeUserLDAPQuery(query);
}
protected String getMembershipUserLdapAttribute() {
LDAPConfig ldapConfig = ldapProvider.getLdapIdentityStore().getConfig();
return config.getMembershipUserLdapAttribute(ldapConfig);
}
public class LDAPRoleMappingsUserDelegate extends UserModelDelegate {
private final RealmModel realm;
private final LDAPObject ldapUser;
private final RoleContainerModel roleContainer;
// Avoid loading role mappings from LDAP more times per-request
private Set<RoleModel> cachedLDAPRoleMappings;
public LDAPRoleMappingsUserDelegate(RealmModel realm, UserModel user, LDAPObject ldapUser) {
super(user);
this.realm = realm;
this.ldapUser = ldapUser;
this.roleContainer = getTargetRoleContainer(realm);
}
@Override
public Set<RoleModel> getRealmRoleMappings() {
if (roleContainer.equals(realm)) {
Set<RoleModel> ldapRoleMappings = getLDAPRoleMappingsConverted();
if (config.getMode() == LDAPGroupMapperMode.LDAP_ONLY) {
// Use just role mappings from LDAP
return ldapRoleMappings;
} else {
// Merge mappings from both DB and LDAP
Set<RoleModel> modelRoleMappings = super.getRealmRoleMappings();
ldapRoleMappings.addAll(modelRoleMappings);
return ldapRoleMappings;
}
} else {
return super.getRealmRoleMappings();
}
}
@Override
public Set<RoleModel> getClientRoleMappings(ClientModel client) {
if (roleContainer.equals(client)) {
Set<RoleModel> ldapRoleMappings = getLDAPRoleMappingsConverted();
if (config.getMode() == LDAPGroupMapperMode.LDAP_ONLY) {
// Use just role mappings from LDAP
return ldapRoleMappings;
} else {
// Merge mappings from both DB and LDAP
Set<RoleModel> modelRoleMappings = super.getClientRoleMappings(client);
ldapRoleMappings.addAll(modelRoleMappings);
return ldapRoleMappings;
}
} else {
return super.getClientRoleMappings(client);
}
}
@Override
public boolean hasRole(RoleModel role) {
Set<RoleModel> roles = getRoleMappings();
return RoleUtils.hasRole(roles, role)
|| RoleUtils.hasRoleFromGroup(getGroups(), role, true);
}
@Override
public void grantRole(RoleModel role) {
if (config.getMode() == LDAPGroupMapperMode.LDAP_ONLY) {
if (role.getContainer().equals(roleContainer)) {
// We need to create new role mappings in LDAP
cachedLDAPRoleMappings = null;
addRoleMappingInLDAP(role.getName(), ldapUser);
} else {
super.grantRole(role);
}
} else {
super.grantRole(role);
}
}
@Override
public Set<RoleModel> getRoleMappings() {
Set<RoleModel> modelRoleMappings = super.getRoleMappings();
Set<RoleModel> ldapRoleMappings = getLDAPRoleMappingsConverted();
if (config.getMode() == LDAPGroupMapperMode.LDAP_ONLY) {
// For LDAP-only we want to retrieve role mappings of target container just from LDAP
Set<RoleModel> modelRolesCopy = new HashSet<>(modelRoleMappings);
for (RoleModel role : modelRolesCopy) {
if (role.getContainer().equals(roleContainer)) {
modelRoleMappings.remove(role);
}
}
}
modelRoleMappings.addAll(ldapRoleMappings);
return modelRoleMappings;
}
protected Set<RoleModel> getLDAPRoleMappingsConverted() {
if (cachedLDAPRoleMappings != null) {
return new HashSet<>(cachedLDAPRoleMappings);
}
List<LDAPObject> ldapRoles = getLDAPRoleMappings(ldapUser);
Set<RoleModel> roles = new HashSet<>();
String roleNameLdapAttr = config.getRoleNameLdapAttribute();
for (LDAPObject role : ldapRoles) {
String roleName = role.getAttributeAsString(roleNameLdapAttr);
RoleModel modelRole = roleContainer.getRole(roleName);
if (modelRole == null) {
// Add role to local DB
modelRole = roleContainer.addRole(roleName);
}
roles.add(modelRole);
}
cachedLDAPRoleMappings = new HashSet<>(roles);
return roles;
}
@Override
public void deleteRoleMapping(RoleModel role) {
if (role.getContainer().equals(roleContainer)) {
LDAPQuery ldapQuery = createRoleQuery();
LDAPQueryConditionsBuilder conditionsBuilder = new LDAPQueryConditionsBuilder();
Condition roleNameCondition = conditionsBuilder.equal(config.getRoleNameLdapAttribute(), role.getName());
String membershipUserAttrName = getMembershipUserLdapAttribute();
String membershipUserAttr = LDAPUtils.getMemberValueOfChildObject(ldapUser, config.getMembershipTypeLdapAttribute(), membershipUserAttrName);
Condition membershipCondition = conditionsBuilder.equal(config.getMembershipLdapAttribute(), membershipUserAttr);
ldapQuery.addWhereCondition(roleNameCondition).addWhereCondition(membershipCondition);
LDAPObject ldapRole = ldapQuery.getFirstResult();
if (ldapRole == null) {
// Role mapping doesn't exist in LDAP. For LDAP_ONLY mode, we don't need to do anything. For READ_ONLY, delete it in local DB.
if (config.getMode() == LDAPGroupMapperMode.READ_ONLY) {
super.deleteRoleMapping(role);
}
} else {
// Role mappings exists in LDAP. For LDAP_ONLY mode, we can just delete it in LDAP. For READ_ONLY we can't delete it -> throw error
if (config.getMode() == LDAPGroupMapperMode.READ_ONLY) {
throw new ModelException("Not possible to delete LDAP role mappings as mapper mode is READ_ONLY");
} else {
// Delete ldap role mappings
cachedLDAPRoleMappings = null;
deleteRoleMappingInLDAP(ldapUser, ldapRole);
}
}
} else {
super.deleteRoleMapping(role);
}
}
}
}
| apache-2.0 |
AlanJinTS/zstack | plugin/vxlan/src/main/java/org/zstack/network/l2/vxlan/vtep/APIQueryVtepReply.java | 975 | package org.zstack.network.l2.vxlan.vtep;
import org.zstack.header.query.APIQueryReply;
import org.zstack.header.rest.RestResponse;
import java.util.Arrays;
import java.util.List;
/**
* Created by weiwang on 27/05/2017.
*/
@RestResponse(allTo = "inventories")
public class APIQueryVtepReply extends APIQueryReply {
private List<VtepInventory> inventories;
public List<VtepInventory> getInventories() {
return inventories;
}
public void setInventories(List<VtepInventory> inventories) {
this.inventories = inventories;
}
public static APIQueryVtepReply __example__() {
APIQueryVtepReply reply = new APIQueryVtepReply();
VtepInventory inv = new VtepInventory();
inv.setUuid(uuid());
inv.setHostUuid(uuid());
inv.setVtepIp("192.168.100.10");
inv.setPort(4789);
inv.setType("KVM_HOST_VXLAN");
reply.setInventories(Arrays.asList(inv));
return reply;
}
}
| apache-2.0 |
YolandaMDavis/nifi | nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-nar-loading-utils/src/test/java/org/apache/nifi/nar/TestNarLoader.java | 6145 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.nar;
import org.apache.commons.lang3.SystemUtils;
import org.apache.nifi.controller.ControllerService;
import org.apache.nifi.processor.Processor;
import org.apache.nifi.reporting.ReportingTask;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class TestNarLoader extends AbstractTestNarLoader {
static final String WORK_DIR = "./target/work";
static final String NAR_AUTOLOAD_DIR = "./target/extensions";
static final String PROPERTIES_FILE = "./src/test/resources/conf/nifi.properties";
static final String EXTENSIONS_DIR = "./src/test/resources/extensions";
@BeforeClass
public static void setUpSuite() {
Assume.assumeTrue("Test only runs on *nix", !SystemUtils.IS_OS_WINDOWS);
}
@Test
public void testNarLoaderWhenAllAvailable() throws IOException {
// Copy all NARs from src/test/resources/extensions to target/extensions
final File extensionsDir = new File(EXTENSIONS_DIR);
final Path narAutoLoadDir = Paths.get(NAR_AUTOLOAD_DIR);
for (final File extensionFile : extensionsDir.listFiles()) {
Files.copy(extensionFile.toPath(), narAutoLoadDir.resolve(extensionFile.getName()), StandardCopyOption.REPLACE_EXISTING);
}
final List<File> narFiles = Arrays.asList(narAutoLoadDir.toFile().listFiles());
assertEquals(3, narFiles.size());
final NarLoadResult narLoadResult = narLoader.load(narFiles);
assertNotNull(narLoadResult);
assertEquals(3, narLoadResult.getLoadedBundles().size());
assertEquals(0, narLoadResult.getSkippedBundles().size());
assertEquals(5, narClassLoaders.getBundles().size());
assertEquals(1, extensionManager.getExtensions(Processor.class).size());
assertEquals(1, extensionManager.getExtensions(ControllerService.class).size());
assertEquals(0, extensionManager.getExtensions(ReportingTask.class).size());
}
@Test
public void testNarLoaderWhenDependentNarsAreMissing() throws IOException {
final File extensionsDir = new File(EXTENSIONS_DIR);
final Path narAutoLoadDir = Paths.get(NAR_AUTOLOAD_DIR);
// Copy processors NAR first which depends on service API NAR
final File processorsNar = new File(extensionsDir, "nifi-example-processors-nar-1.0.nar");
final File targetProcessorNar = new File(narAutoLoadDir.toFile(), processorsNar.getName());
Files.copy(processorsNar.toPath(), targetProcessorNar.toPath(), StandardCopyOption.REPLACE_EXISTING);
// Attempt to load while only processor NAR is available
final List<File> narFiles1 = Arrays.asList(targetProcessorNar);
final NarLoadResult narLoadResult1 = narLoader.load(narFiles1);
assertNotNull(narLoadResult1);
assertEquals(0, narLoadResult1.getLoadedBundles().size());
assertEquals(1, narLoadResult1.getSkippedBundles().size());
// Copy the service impl which also depends on service API NAR
final File serviceImplNar = new File(extensionsDir, "nifi-example-service-nar-1.1.nar");
final File targetServiceImplNar = new File(narAutoLoadDir.toFile(), serviceImplNar.getName());
Files.copy(serviceImplNar.toPath(), targetServiceImplNar.toPath(), StandardCopyOption.REPLACE_EXISTING);
// Attempt to load while processor and service impl NARs available
final List<File> narFiles2 = Arrays.asList(targetServiceImplNar);
final NarLoadResult narLoadResult2 = narLoader.load(narFiles2);
assertNotNull(narLoadResult2);
assertEquals(0, narLoadResult2.getLoadedBundles().size());
assertEquals(2, narLoadResult2.getSkippedBundles().size());
// Copy service API NAR
final File serviceApiNar = new File(extensionsDir, "nifi-example-service-api-nar-1.0.nar");
final File targetServiceApiNar = new File(narAutoLoadDir.toFile(), serviceApiNar.getName());
Files.copy(serviceApiNar.toPath(), targetServiceApiNar.toPath(), StandardCopyOption.REPLACE_EXISTING);
// Attempt to load while all NARs available
final List<File> narFiles3 = Arrays.asList(targetServiceApiNar);
final NarLoadResult narLoadResult3 = narLoader.load(narFiles3);
assertNotNull(narLoadResult3);
assertEquals(3, narLoadResult3.getLoadedBundles().size());
assertEquals(0, narLoadResult3.getSkippedBundles().size());
assertEquals(5, narClassLoaders.getBundles().size());
assertEquals(1, extensionManager.getExtensions(Processor.class).size());
assertEquals(1, extensionManager.getExtensions(ControllerService.class).size());
assertEquals(0, extensionManager.getExtensions(ReportingTask.class).size());
}
@Override
String getWorkDir() {
return WORK_DIR;
}
@Override
String getNarAutoloadDir() {
return NAR_AUTOLOAD_DIR;
}
@Override
String getPropertiesFile() {
return PROPERTIES_FILE;
}
}
| apache-2.0 |
q474818917/solr-5.2.0 | solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java | 2658 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.morphlines.solr;
import java.util.ArrayList;
import java.util.List;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrInputDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A mockup DocumentLoader implementation for unit tests; collects all documents into a main memory list.
*/
class CollectingDocumentLoader implements DocumentLoader {
private final int batchSize;
private final List<SolrInputDocument> batch = new ArrayList<> ();
private List<SolrInputDocument> results = new ArrayList<> ();
private static final Logger LOGGER = LoggerFactory.getLogger(CollectingDocumentLoader.class);
public CollectingDocumentLoader(int batchSize) {
if (batchSize <= 0) {
throw new IllegalArgumentException("batchSize must be a positive number: " + batchSize);
}
this.batchSize = batchSize;
}
@Override
public void beginTransaction() {
LOGGER.trace("beginTransaction");
batch.clear();
}
@Override
public void load(SolrInputDocument doc) {
LOGGER.trace("load doc: {}", doc);
batch.add(doc);
if (batch.size() >= batchSize) {
loadBatch();
}
}
@Override
public void commitTransaction() {
LOGGER.trace("commitTransaction");
if (batch.size() > 0) {
loadBatch();
}
}
private void loadBatch() {
try {
results.addAll(batch);
} finally {
batch.clear();
}
}
@Override
public UpdateResponse rollbackTransaction() {
LOGGER.trace("rollback");
return new UpdateResponse();
}
@Override
public void shutdown() {
LOGGER.trace("shutdown");
}
@Override
public SolrPingResponse ping() {
LOGGER.trace("ping");
return new SolrPingResponse();
}
}
| apache-2.0 |
bradparks/beaker-notebook | plugin/jvm/src/main/java/com/twosigma/beaker/jvm/serialization/BeakerCodeCellListDeserializer.java | 2190 | /*
* Copyright 2014 TWO SIGMA OPEN SOURCE, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twosigma.beaker.jvm.serialization;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.DeserializationContext;
import org.codehaus.jackson.map.JsonDeserializer;
import org.codehaus.jackson.map.ObjectMapper;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.twosigma.beaker.BeakerCodeCell;
/*
* This class is used to deserialize the above fake root object when reading the notebook code cells
*/
public class BeakerCodeCellListDeserializer extends JsonDeserializer<BeakerCodeCellList> {
private final Provider<BeakerObjectConverter> objectSerializerProvider;
@Inject
public BeakerCodeCellListDeserializer(Provider<BeakerObjectConverter> osp) {
objectSerializerProvider = osp;
}
@Override
public BeakerCodeCellList deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException {
ObjectMapper mapper = (ObjectMapper)jp.getCodec();
JsonNode node = mapper.readTree(jp);
List<BeakerCodeCell> l = new ArrayList<BeakerCodeCell>();
if (node.isArray()) {
for (JsonNode o : node) {
Object obj = objectSerializerProvider.get().deserialize(o, mapper);
if (obj instanceof BeakerCodeCell)
l.add((BeakerCodeCell) obj);
}
}
BeakerCodeCellList r = new BeakerCodeCellList();
r.theList = l;
return r;
}
} | apache-2.0 |
asedunov/intellij-community | plugins/svn4idea/src/org/jetbrains/idea/svn/commandLine/Command.java | 6264 | package org.jetbrains.idea.svn.commandLine;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.api.Depth;
import org.jetbrains.idea.svn.api.ProgressTracker;
import org.jetbrains.idea.svn.properties.PropertyValue;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc2.SvnTarget;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
* @author Konstantin Kolosovsky.
*/
// TODO: Probably make command immutable and use CommandBuilder for updates.
public class Command {
@NotNull private final List<String> myParameters = ContainerUtil.newArrayList();
@NotNull private final List<String> myOriginalParameters = ContainerUtil.newArrayList();
@NotNull private final SvnCommandName myName;
private File workingDirectory;
@Nullable private File myConfigDir;
@Nullable private LineCommandListener myResultBuilder;
@Nullable private volatile SVNURL myRepositoryUrl;
@NotNull private SvnTarget myTarget;
@Nullable private Collection<File> myTargets;
@Nullable private PropertyValue myPropertyValue;
@Nullable private ProgressTracker myCanceller;
public Command(@NotNull SvnCommandName name) {
myName = name;
}
public void put(@Nullable Depth depth) {
CommandUtil.put(myParameters, depth, false);
}
public void put(@NotNull SvnTarget target) {
CommandUtil.put(myParameters, target);
}
public void put(@Nullable SVNRevision revision) {
CommandUtil.put(myParameters, revision);
}
public void put(@NotNull String parameter, boolean condition) {
CommandUtil.put(myParameters, condition, parameter);
}
public void put(@NonNls @NotNull String... parameters) {
put(Arrays.asList(parameters));
}
public void put(@NotNull List<String> parameters) {
myParameters.addAll(parameters);
}
public void putIfNotPresent(@NotNull String parameter) {
if (!myParameters.contains(parameter)) {
myParameters.add(parameter);
}
}
@Nullable
public ProgressTracker getCanceller() {
return myCanceller;
}
public void setCanceller(@Nullable ProgressTracker canceller) {
myCanceller = canceller;
}
@Nullable
public File getConfigDir() {
return myConfigDir;
}
public File getWorkingDirectory() {
return workingDirectory;
}
@Nullable
public LineCommandListener getResultBuilder() {
return myResultBuilder;
}
@Nullable
public SVNURL getRepositoryUrl() {
return myRepositoryUrl;
}
@NotNull
public SVNURL requireRepositoryUrl() {
SVNURL result = getRepositoryUrl();
assert result != null;
return result;
}
@NotNull
public SvnTarget getTarget() {
return myTarget;
}
@Nullable
public List<String> getTargetsPaths() {
return ContainerUtil.isEmpty(myTargets) ? null : ContainerUtil.map(myTargets, file -> CommandUtil.format(file.getAbsolutePath(), null));
}
@Nullable
public PropertyValue getPropertyValue() {
return myPropertyValue;
}
@NotNull
public SvnCommandName getName() {
return myName;
}
public void setWorkingDirectory(File workingDirectory) {
this.workingDirectory = workingDirectory;
}
public void setConfigDir(@Nullable File configDir) {
this.myConfigDir = configDir;
}
public void setResultBuilder(@Nullable LineCommandListener resultBuilder) {
myResultBuilder = resultBuilder;
}
public void setRepositoryUrl(@Nullable SVNURL repositoryUrl) {
myRepositoryUrl = repositoryUrl;
}
public void setTarget(@NotNull SvnTarget target) {
myTarget = target;
}
public void setTargets(@Nullable Collection<File> targets) {
myTargets = targets;
}
public void setPropertyValue(@Nullable PropertyValue propertyValue) {
myPropertyValue = propertyValue;
}
// TODO: used only to ensure authentication info is not logged to file. Remove when command execution model is refactored
// TODO: - so we could determine if parameter should be logged by the parameter itself.
public void saveOriginalParameters() {
myOriginalParameters.clear();
myOriginalParameters.addAll(myParameters);
}
@NotNull
public List<String> getParameters() {
return ContainerUtil.newArrayList(myParameters);
}
public String getText() {
List<String> data = new ArrayList<>();
if (myConfigDir != null) {
data.add("--config-dir");
data.add(myConfigDir.getPath());
}
data.add(myName.getName());
data.addAll(myOriginalParameters);
List<String> targetsPaths = getTargetsPaths();
if (!ContainerUtil.isEmpty(targetsPaths)) {
data.addAll(targetsPaths);
}
return StringUtil.join(data, " ");
}
public boolean isLocalInfo() {
return is(SvnCommandName.info) && hasLocalTarget() && !myParameters.contains("--revision");
}
public boolean isLocalStatus() {
return is(SvnCommandName.st) && hasLocalTarget() && !myParameters.contains("-u");
}
public boolean isLocalProperty() {
boolean isPropertyCommand =
is(SvnCommandName.proplist) || is(SvnCommandName.propget) || is(SvnCommandName.propset) || is(SvnCommandName.propdel);
return isPropertyCommand && hasLocalTarget() && isLocal(getRevision());
}
public boolean isLocalCat() {
return is(SvnCommandName.cat) && hasLocalTarget() && isLocal(getRevision());
}
@Nullable
private SVNRevision getRevision() {
int index = myParameters.indexOf("--revision");
return index >= 0 && index + 1 < myParameters.size() ? SVNRevision.parse(myParameters.get(index + 1)) : null;
}
public boolean is(@NotNull SvnCommandName name) {
return name.equals(myName);
}
private boolean hasLocalTarget() {
return myTarget.isFile() && isLocal(myTarget.getPegRevision());
}
private static boolean isLocal(@Nullable SVNRevision revision) {
return revision == null ||
SVNRevision.UNDEFINED.equals(revision) ||
SVNRevision.BASE.equals(revision) ||
SVNRevision.WORKING.equals(revision);
}
} | apache-2.0 |
android-ia/platform_tools_idea | java/compiler/impl/src/com/intellij/compiler/options/AnnotationProcessorsPanel.java | 14997 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.compiler.options;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.ShortcutSet;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.InputValidatorEx;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.AnActionButton;
import com.intellij.ui.ColoredTreeCellRenderer;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.ToolbarDecorator;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.JBList;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ui.EditableTreeModel;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.jps.model.java.compiler.ProcessorConfigProfile;
import org.jetbrains.jps.model.java.impl.compiler.ProcessorConfigProfileImpl;
import javax.swing.*;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.util.*;
import java.util.List;
/**
* @author Konstantin Bulenkov
*/
@SuppressWarnings({"unchecked", "UseOfObsoleteCollectionType"})
public class AnnotationProcessorsPanel extends JPanel {
private final ProcessorConfigProfile myDefaultProfile = new ProcessorConfigProfileImpl("");
private final List<ProcessorConfigProfile> myModuleProfiles = new ArrayList<ProcessorConfigProfile>();
private final Map<String, Module> myAllModulesMap = new HashMap<String, Module>();
private final Project myProject;
private final Tree myTree;
private final ProcessorProfilePanel myProfilePanel;
private ProcessorConfigProfile mySelectedProfile = null;
public AnnotationProcessorsPanel(Project project) {
super(new BorderLayout());
Splitter splitter = new Splitter(false, 0.3f);
add(splitter, BorderLayout.CENTER);
myProject = project;
for (Module module : ModuleManager.getInstance(project).getModules()) {
myAllModulesMap.put(module.getName(), module);
}
myTree = new Tree(new MyTreeModel());
myTree.setRootVisible(false);
final JPanel treePanel =
ToolbarDecorator.createDecorator(myTree).addExtraAction(new AnActionButton("Move to", AllIcons.Actions.Nextfile) {
@Override
public void actionPerformed(AnActionEvent e) {
final MyModuleNode node = (MyModuleNode)myTree.getSelectionPath().getLastPathComponent();
final TreePath[] selectedNodes = myTree.getSelectionPaths();
final ProcessorConfigProfile nodeProfile = ((ProfileNode)node.getParent()).myProfile;
final List<ProcessorConfigProfile> profiles = new ArrayList<ProcessorConfigProfile>();
profiles.add(myDefaultProfile);
for (ProcessorConfigProfile profile : myModuleProfiles) {
profiles.add(profile);
}
profiles.remove(nodeProfile);
final JBList list = new JBList(profiles);
final JBPopup popup = JBPopupFactory.getInstance().createListPopupBuilder(list)
.setTitle("Move to")
.setItemChoosenCallback(new Runnable() {
@Override
public void run() {
final Object value = list.getSelectedValue();
if (value instanceof ProcessorConfigProfile) {
final ProcessorConfigProfile chosenProfile = (ProcessorConfigProfile)value;
final Module toSelect = (Module)node.getUserObject();
if (selectedNodes != null) {
for (TreePath selectedNode : selectedNodes) {
final Object node = selectedNode.getLastPathComponent();
if (node instanceof MyModuleNode) {
final Module module = (Module)((MyModuleNode)node).getUserObject();
if (nodeProfile != myDefaultProfile) {
nodeProfile.removeModuleName(module.getName());
}
if (chosenProfile != myDefaultProfile) {
chosenProfile.addModuleName(module.getName());
}
}
}
}
final RootNode root = (RootNode)myTree.getModel().getRoot();
root.sync();
final DefaultMutableTreeNode node = TreeUtil.findNodeWithObject(root, toSelect);
if (node != null) {
TreeUtil.selectNode(myTree, node);
}
}
}
})
.createPopup();
RelativePoint point =
e.getInputEvent() instanceof MouseEvent ? getPreferredPopupPoint() : TreeUtil.getPointForSelection(myTree);
popup.show(point);
}
@Override
public ShortcutSet getShortcut() {
return ActionManager.getInstance().getAction("Move").getShortcutSet();
}
@Override
public boolean isEnabled() {
return myTree.getSelectionPath() != null
&& myTree.getSelectionPath().getLastPathComponent() instanceof MyModuleNode
&& !myModuleProfiles.isEmpty();
}
}).createPanel();
splitter.setFirstComponent(treePanel);
myTree.setCellRenderer(new MyCellRenderer());
myTree.addTreeSelectionListener(new TreeSelectionListener() {
@Override
public void valueChanged(TreeSelectionEvent e) {
final TreePath path = myTree.getSelectionPath();
if (path != null) {
Object node = path.getLastPathComponent();
if (node instanceof MyModuleNode) {
node = ((MyModuleNode)node).getParent();
}
if (node instanceof ProfileNode) {
final ProcessorConfigProfile nodeProfile = ((ProfileNode)node).myProfile;
final ProcessorConfigProfile selectedProfile = mySelectedProfile;
if (nodeProfile != selectedProfile) {
if (selectedProfile != null) {
myProfilePanel.saveTo(selectedProfile);
}
mySelectedProfile = nodeProfile;
myProfilePanel.setProfile(nodeProfile);
}
}
}
}
});
myProfilePanel = new ProcessorProfilePanel(project);
myProfilePanel.setBorder(IdeBorderFactory.createEmptyBorder(0, 6, 0, 0));
splitter.setSecondComponent(myProfilePanel);
}
public void initProfiles(ProcessorConfigProfile defaultProfile, Collection<ProcessorConfigProfile> moduleProfiles) {
myDefaultProfile.initFrom(defaultProfile);
myModuleProfiles.clear();
for (ProcessorConfigProfile profile : moduleProfiles) {
ProcessorConfigProfile copy = new ProcessorConfigProfileImpl("");
copy.initFrom(profile);
myModuleProfiles.add(copy);
}
final RootNode root = (RootNode)myTree.getModel().getRoot();
root.sync();
final DefaultMutableTreeNode node = TreeUtil.findNodeWithObject(root, myDefaultProfile);
if (node != null) {
TreeUtil.selectNode(myTree, node);
}
}
public ProcessorConfigProfile getDefaultProfile() {
final ProcessorConfigProfile selectedProfile = mySelectedProfile;
if (myDefaultProfile == selectedProfile) {
myProfilePanel.saveTo(selectedProfile);
}
return myDefaultProfile;
}
public List<ProcessorConfigProfile> getModuleProfiles() {
final ProcessorConfigProfile selectedProfile = mySelectedProfile;
if (myDefaultProfile != selectedProfile) {
myProfilePanel.saveTo(selectedProfile);
}
return myModuleProfiles;
}
private static void expand(JTree tree) {
int oldRowCount = 0;
do {
int rowCount = tree.getRowCount();
if (rowCount == oldRowCount) break;
oldRowCount = rowCount;
for (int i = 0; i < rowCount; i++) {
tree.expandRow(i);
}
}
while (true);
}
private class MyTreeModel extends DefaultTreeModel implements EditableTreeModel{
public MyTreeModel() {
super(new RootNode());
}
@Override
public TreePath addNode(TreePath parentOrNeighbour) {
final String newProfileName = Messages.showInputDialog(
myProject, "Profile name", "Create new profile", null, "",
new InputValidatorEx() {
@Override
public boolean checkInput(String inputString) {
if (StringUtil.isEmpty(inputString) ||
Comparing.equal(inputString, myDefaultProfile.getName())) {
return false;
}
for (ProcessorConfigProfile profile : myModuleProfiles) {
if (Comparing.equal(inputString, profile.getName())) {
return false;
}
}
return true;
}
@Override
public boolean canClose(String inputString) {
return checkInput(inputString);
}
@Override
public String getErrorText(String inputString) {
if (checkInput(inputString)) {
return null;
}
return StringUtil.isEmpty(inputString)
? "Profile name shouldn't be empty"
: "Profile " + inputString + " already exists";
}
});
if (newProfileName != null) {
final ProcessorConfigProfile profile = new ProcessorConfigProfileImpl(newProfileName);
myModuleProfiles.add(profile);
((DataSynchronizable)getRoot()).sync();
final DefaultMutableTreeNode object = TreeUtil.findNodeWithObject((DefaultMutableTreeNode)getRoot(), profile);
if (object != null) {
TreeUtil.selectNode(myTree, object);
}
}
return null;
}
@Override
public void removeNode(TreePath nodePath) {
Object node = nodePath.getLastPathComponent();
if (node instanceof ProfileNode) {
final ProcessorConfigProfile nodeProfile = ((ProfileNode)node).myProfile;
if (nodeProfile != myDefaultProfile) {
if (mySelectedProfile == nodeProfile) {
mySelectedProfile = null;
}
myModuleProfiles.remove(nodeProfile);
((DataSynchronizable)getRoot()).sync();
final DefaultMutableTreeNode object = TreeUtil.findNodeWithObject((DefaultMutableTreeNode)getRoot(), myDefaultProfile);
if (object != null) {
TreeUtil.selectNode(myTree, object);
}
}
}
}
@Override
public void moveNodeTo(TreePath parentOrNeighbour) {
}
}
private class RootNode extends DefaultMutableTreeNode implements DataSynchronizable {
@Override
public DataSynchronizable sync() {
final Vector newKids = new Vector();
newKids.add(new ProfileNode(myDefaultProfile, this, true).sync());
for (ProcessorConfigProfile profile : myModuleProfiles) {
newKids.add(new ProfileNode(profile, this, false).sync());
}
children = newKids;
((DefaultTreeModel)myTree.getModel()).reload();
expand(myTree);
return this;
}
}
private interface DataSynchronizable {
DataSynchronizable sync();
}
private class ProfileNode extends DefaultMutableTreeNode implements DataSynchronizable {
private final ProcessorConfigProfile myProfile;
private final boolean myIsDefault;
public ProfileNode(ProcessorConfigProfile profile, RootNode parent, boolean isDefault) {
super(profile);
setParent(parent);
myIsDefault = isDefault;
myProfile = profile;
}
@Override
public DataSynchronizable sync() {
final List<Module> nodeModules = new ArrayList<Module>();
if (myIsDefault) {
final Set<String> nonDefaultProfileModules = new HashSet<String>();
for (ProcessorConfigProfile profile : myModuleProfiles) {
nonDefaultProfileModules.addAll(profile.getModuleNames());
}
for (Map.Entry<String, Module> entry : myAllModulesMap.entrySet()) {
if (!nonDefaultProfileModules.contains(entry.getKey())) {
nodeModules.add(entry.getValue());
}
}
}
else {
for (String moduleName : myProfile.getModuleNames()) {
final Module module = myAllModulesMap.get(moduleName);
if (module != null) {
nodeModules.add(module);
}
}
}
Collections.sort(nodeModules, ModuleComparator.INSTANCE);
final Vector vector = new Vector();
for (Module module : nodeModules) {
vector.add(new MyModuleNode(module, this));
}
children = vector;
return this;
}
}
private static class MyModuleNode extends DefaultMutableTreeNode {
public MyModuleNode(Module module, ProfileNode parent) {
super(module);
setParent(parent);
setAllowsChildren(false);
}
}
private static class MyCellRenderer extends ColoredTreeCellRenderer {
@Override
public void customizeCellRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
if (value instanceof ProfileNode) {
append(((ProfileNode)value).myProfile.getName());
}
else if (value instanceof MyModuleNode) {
final Module module = (Module)((MyModuleNode)value).getUserObject();
setIcon(AllIcons.Nodes.Module);
append(module.getName());
}
}
}
private static class ModuleComparator implements Comparator<Module> {
static final ModuleComparator INSTANCE = new ModuleComparator();
@Override
public int compare(Module o1, Module o2) {
return o1.getName().compareTo(o2.getName());
}
}
}
| apache-2.0 |
UniquePassive/runelite | protocol/src/main/java/net/runelite/protocol/update/decoders/ArchiveRequestDecoder.java | 2366 | /*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.protocol.update.decoders;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import java.util.List;
import net.runelite.protocol.api.update.ArchiveRequestPacket;
public class ArchiveRequestDecoder extends ByteToMessageDecoder
{
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception
{
byte opcode = in.getByte(in.readerIndex());
if (opcode != UpdateOpcodes.ARCHIVE_REQUEST_HIGH
&& opcode != UpdateOpcodes.ARCHIVE_REQUEST_LOW)
{
ctx.fireChannelRead(in.retain());
return;
}
byte priority = in.readByte();
int index = in.readByte() & 0xFF;
int archiveId = in.readShort() & 0xFFFF;
ArchiveRequestPacket archiveRequest = new ArchiveRequestPacket();
archiveRequest.setPriority(priority == 1);
archiveRequest.setIndex(index);
archiveRequest.setArchive(archiveId);
out.add(archiveRequest);
}
}
| bsd-2-clause |
strahanjen/strahanjen.github.io | elasticsearch-master/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java | 3012 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.transport;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
/**
*
*/
public final class LocalTransportAddress implements TransportAddress {
public static final short TYPE_ID = 2;
private static final AtomicLong transportAddressIdGenerator = new AtomicLong();
/**
* generates a new unique address
*/
public static LocalTransportAddress buildUnique() {
return new LocalTransportAddress(Long.toString(transportAddressIdGenerator.incrementAndGet()));
}
private String id;
public LocalTransportAddress(String id) {
this.id = id;
}
/**
* Read from a stream.
*/
public LocalTransportAddress(StreamInput in) throws IOException {
id = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(id);
}
public String id() {
return this.id;
}
@Override
public short uniqueAddressTypeId() {
return TYPE_ID;
}
@Override
public boolean sameHost(TransportAddress other) {
return other instanceof LocalTransportAddress && id.equals(((LocalTransportAddress) other).id);
}
@Override
public boolean isLoopbackOrLinkLocalAddress() {
return false;
}
@Override
public String getHost() {
return "local";
}
@Override
public String getAddress() {
return "0.0.0.0"; // see https://en.wikipedia.org/wiki/0.0.0.0
}
@Override
public int getPort() {
return 0;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LocalTransportAddress that = (LocalTransportAddress) o;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
return true;
}
@Override
public int hashCode() {
return id != null ? id.hashCode() : 0;
}
@Override
public String toString() {
return "local[" + id + "]";
}
}
| bsd-3-clause |
mmohan01/ReFactory | data/xom/xom-1.2b2/nu/xom/samples/SQLToXML.java | 7012 | /* Copyright 2002-2004 Elliotte Rusty Harold
This library is free software; you can redistribute it and/or modify
it under the terms of version 2.1 of the GNU Lesser General Public
License as published by the Free Software Foundation.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
You can contact Elliotte Rusty Harold by sending e-mail to
elharo@metalab.unc.edu. Please include the word "XOM" in the
subject line. The XOM home page is located at http://www.xom.nu/
*/
package nu.xom.samples;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import nu.xom.Attribute;
import nu.xom.Document;
import nu.xom.Element;
import nu.xom.Serializer;
/**
* <p>
* Demonstrates building a structured XML document,
* from relational data using JDBC. A different version of this
* example was originally developed for Example 4.14 of Chapter 4 of
* <cite><a target="_top"
* href="http://www.cafeconleche.org/books/xmljava/">Processing
* XML with Java</a></cite>.
* </p>
*
* @author Elliotte Rusty Harold
* @version 1.0
*
*/
public class SQLToXML {
public static void main(String[] args ) {
// Load the ODBC driver
try {
Class.forName( "sun.jdbc.odbc.JdbcOdbcDriver" );
}
catch (ClassNotFoundException ex) {
System.err.println("Could not load the JDBC-ODBC Bridge");
return;
}
try {
Element budget = new Element("Budget");
writeAgencies(budget);
Document doc = new Document(budget);
Serializer sout = new Serializer(System.out, "UTF-8");
sout.write(doc);
sout.flush();
}
catch (IOException ex) {
System.err.println(ex);
}
}
private static void writeAgencies(Element parent) {
Connection conn = null;
Statement stmnt = null;
try {
conn = DriverManager.getConnection(
"jdbc:odbc:budauth", "", "");
stmnt = conn.createStatement();
String query = "SELECT DISTINCT AgencyName, AgencyCode"
+ " FROM BudgetAuthorizationTable;";
ResultSet agencies = stmnt.executeQuery( query );
while( agencies.next() ) {
String agencyName = agencies.getString("AgencyName");
String agencyCode = agencies.getString("AgencyCode");
Element agency = new Element("Agency");
Element name = new Element("Name");
Element code = new Element("Code");
name.appendChild(agencyName);
code.appendChild(agencyCode);
agency.appendChild(name);
agency.appendChild(code);
writeBureaus(agency, conn, agencyCode);
parent.appendChild(agency);
}
}
catch (SQLException e) {
System.err.println(e);
e.printStackTrace();
}
finally {
try {
stmnt.close();
conn.close();
}
catch(SQLException ex) {
System.err.println(ex);
}
}
}
private static void writeBureaus(Element parent, Connection conn,
String agencyCode) throws SQLException {
String query
= "SELECT DISTINCT BureauName, BureauCode "
+ "FROM BudgetAuthorizationTable WHERE AgencyCode='"
+ agencyCode + "';";
Statement stmnt = conn.createStatement();
ResultSet bureaus = stmnt.executeQuery(query);
while( bureaus.next() ) {
String bureauName = bureaus.getString("BureauName");
String bureauCode = bureaus.getString("BureauCode");
Element bureau = new Element("Bureau");
Element name = new Element("Name");
Element code = new Element("Code");
name.appendChild(bureauName);
code.appendChild(bureauCode);
bureau.appendChild(name);
bureau.appendChild(code);
writeAccounts(bureau, conn, agencyCode, bureauCode);
parent.appendChild(bureau);
}
}
private static void writeAccounts(Element parent, Connection conn,
String agencyCode, String bureauCode)
throws SQLException {
String query = "SELECT DISTINCT AccountName, AccountCode "
+ "FROM BudgetAuthorizationTable WHERE AgencyCode='"
+ agencyCode + "' AND BureauCode='" + bureauCode + "';";
Statement stmnt = conn.createStatement();
ResultSet accounts = stmnt.executeQuery(query);
while( accounts.next() ) {
String accountName = accounts.getString("AccountName");
String accountCode = accounts.getString("AccountCode");
Element account = new Element("Account");
Element name = new Element("Name");
Element code = new Element("Code");
name.appendChild(accountName);
code.appendChild(accountCode);
account.appendChild(name);
account.appendChild(code);
writeSubfunctions(
account, conn, agencyCode, bureauCode, accountCode
);
parent.appendChild(account);
}
}
private static void writeSubfunctions(Element parent,
Connection conn, String agencyCode, String bureauCode,
String accountCode) throws SQLException {
String query = "SELECT * FROM BudgetAuthorizationTable"
+ " WHERE AgencyCode='" + agencyCode + "' AND BureauCode='"
+ bureauCode + "' AND AccountCode='" + accountCode + "';";
Statement stmnt = conn.createStatement();
ResultSet subfunctions = stmnt.executeQuery(query);
while( subfunctions.next() ) {
String subfunctionTitle
= subfunctions.getString("SubfunctionTitle");
String subfunctionCode
= subfunctions.getString("SubfunctionCode");
Element subfunction = new Element("Subfunction");
Element name = new Element("Name");
Element code = new Element("Code");
name.appendChild(subfunctionTitle);
code.appendChild(subfunctionCode);
subfunction.appendChild(name);
subfunction.appendChild(code);
Element amount = new Element("Amount");
amount.addAttribute(new Attribute("year", "TransitionQuarter"));
amount.appendChild(
String.valueOf(subfunctions.getInt("TransitionQuarter") * 1000L));
subfunction.appendChild(amount);
for (int year = 1976; year <= 2006; year++) {
String fy = "FY" + year;
long amt = subfunctions.getInt(fy) * 1000L;
amount = new Element("Amount");
amount.addAttribute(new Attribute("year", String.valueOf(year)));
amount.appendChild(String.valueOf(amt));
subfunction.appendChild(amount);
}
parent.appendChild(subfunction);
}
}
} | mit |
rokn/Count_Words_2015 | testing/openjdk2/jdk/src/share/classes/sun/security/util/DerIndefLenConverter.java | 11809 | /*
* Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.util;
import java.io.IOException;
import java.util.ArrayList;
/**
* A package private utility class to convert indefinite length DER
* encoded byte arrays to definite length DER encoded byte arrays.
*
* This assumes that the basic data structure is "tag, length, value"
* triplet. In the case where the length is "indefinite", terminating
* end-of-contents bytes are expected.
*
* @author Hemma Prafullchandra
*/
class DerIndefLenConverter {
private static final int TAG_MASK = 0x1f; // bits 5-1
private static final int FORM_MASK = 0x20; // bits 6
private static final int CLASS_MASK = 0xC0; // bits 8 and 7
private static final int LEN_LONG = 0x80; // bit 8 set
private static final int LEN_MASK = 0x7f; // bits 7 - 1
private static final int SKIP_EOC_BYTES = 2;
private byte[] data, newData;
private int newDataPos, dataPos, dataSize, index;
private int unresolved = 0;
private ArrayList<Object> ndefsList = new ArrayList<Object>();
private int numOfTotalLenBytes = 0;
private boolean isEOC(int tag) {
return (((tag & TAG_MASK) == 0x00) && // EOC
((tag & FORM_MASK) == 0x00) && // primitive
((tag & CLASS_MASK) == 0x00)); // universal
}
// if bit 8 is set then it implies either indefinite length or long form
static boolean isLongForm(int lengthByte) {
return ((lengthByte & LEN_LONG) == LEN_LONG);
}
/*
* Default package private constructor
*/
DerIndefLenConverter() { }
/**
* Checks whether the given length byte is of the form
* <em>Indefinite</em>.
*
* @param lengthByte the length byte from a DER encoded
* object.
* @return true if the byte is of Indefinite form otherwise
* returns false.
*/
static boolean isIndefinite(int lengthByte) {
return (isLongForm(lengthByte) && ((lengthByte & LEN_MASK) == 0));
}
/**
* Parse the tag and if it is an end-of-contents tag then
* add the current position to the <code>eocList</code> vector.
*/
private void parseTag() throws IOException {
if (dataPos == dataSize)
return;
if (isEOC(data[dataPos]) && (data[dataPos + 1] == 0)) {
int numOfEncapsulatedLenBytes = 0;
Object elem = null;
int index;
for (index = ndefsList.size()-1; index >= 0; index--) {
// Determine the first element in the vector that does not
// have a matching EOC
elem = ndefsList.get(index);
if (elem instanceof Integer) {
break;
} else {
numOfEncapsulatedLenBytes += ((byte[])elem).length - 3;
}
}
if (index < 0) {
throw new IOException("EOC does not have matching " +
"indefinite-length tag");
}
int sectionLen = dataPos - ((Integer)elem).intValue() +
numOfEncapsulatedLenBytes;
byte[] sectionLenBytes = getLengthBytes(sectionLen);
ndefsList.set(index, sectionLenBytes);
unresolved--;
// Add the number of bytes required to represent this section
// to the total number of length bytes,
// and subtract the indefinite-length tag (1 byte) and
// EOC bytes (2 bytes) for this section
numOfTotalLenBytes += (sectionLenBytes.length - 3);
}
dataPos++;
}
/**
* Write the tag and if it is an end-of-contents tag
* then skip the tag and its 1 byte length of zero.
*/
private void writeTag() {
if (dataPos == dataSize)
return;
int tag = data[dataPos++];
if (isEOC(tag) && (data[dataPos] == 0)) {
dataPos++; // skip length
writeTag();
} else
newData[newDataPos++] = (byte)tag;
}
/**
* Parse the length and if it is an indefinite length then add
* the current position to the <code>ndefsList</code> vector.
*/
private int parseLength() throws IOException {
int curLen = 0;
if (dataPos == dataSize)
return curLen;
int lenByte = data[dataPos++] & 0xff;
if (isIndefinite(lenByte)) {
ndefsList.add(new Integer(dataPos));
unresolved++;
return curLen;
}
if (isLongForm(lenByte)) {
lenByte &= LEN_MASK;
if (lenByte > 4)
throw new IOException("Too much data");
if ((dataSize - dataPos) < (lenByte + 1))
throw new IOException("Too little data");
for (int i = 0; i < lenByte; i++)
curLen = (curLen << 8) + (data[dataPos++] & 0xff);
} else {
curLen = (lenByte & LEN_MASK);
}
return curLen;
}
/**
* Write the length and if it is an indefinite length
* then calculate the definite length from the positions
* of the indefinite length and its matching EOC terminator.
* Then, write the value.
*/
private void writeLengthAndValue() throws IOException {
if (dataPos == dataSize)
return;
int curLen = 0;
int lenByte = data[dataPos++] & 0xff;
if (isIndefinite(lenByte)) {
byte[] lenBytes = (byte[])ndefsList.get(index++);
System.arraycopy(lenBytes, 0, newData, newDataPos,
lenBytes.length);
newDataPos += lenBytes.length;
return;
}
if (isLongForm(lenByte)) {
lenByte &= LEN_MASK;
for (int i = 0; i < lenByte; i++)
curLen = (curLen << 8) + (data[dataPos++] & 0xff);
} else
curLen = (lenByte & LEN_MASK);
writeLength(curLen);
writeValue(curLen);
}
private void writeLength(int curLen) {
if (curLen < 128) {
newData[newDataPos++] = (byte)curLen;
} else if (curLen < (1 << 8)) {
newData[newDataPos++] = (byte)0x81;
newData[newDataPos++] = (byte)curLen;
} else if (curLen < (1 << 16)) {
newData[newDataPos++] = (byte)0x82;
newData[newDataPos++] = (byte)(curLen >> 8);
newData[newDataPos++] = (byte)curLen;
} else if (curLen < (1 << 24)) {
newData[newDataPos++] = (byte)0x83;
newData[newDataPos++] = (byte)(curLen >> 16);
newData[newDataPos++] = (byte)(curLen >> 8);
newData[newDataPos++] = (byte)curLen;
} else {
newData[newDataPos++] = (byte)0x84;
newData[newDataPos++] = (byte)(curLen >> 24);
newData[newDataPos++] = (byte)(curLen >> 16);
newData[newDataPos++] = (byte)(curLen >> 8);
newData[newDataPos++] = (byte)curLen;
}
}
private byte[] getLengthBytes(int curLen) {
byte[] lenBytes;
int index = 0;
if (curLen < 128) {
lenBytes = new byte[1];
lenBytes[index++] = (byte)curLen;
} else if (curLen < (1 << 8)) {
lenBytes = new byte[2];
lenBytes[index++] = (byte)0x81;
lenBytes[index++] = (byte)curLen;
} else if (curLen < (1 << 16)) {
lenBytes = new byte[3];
lenBytes[index++] = (byte)0x82;
lenBytes[index++] = (byte)(curLen >> 8);
lenBytes[index++] = (byte)curLen;
} else if (curLen < (1 << 24)) {
lenBytes = new byte[4];
lenBytes[index++] = (byte)0x83;
lenBytes[index++] = (byte)(curLen >> 16);
lenBytes[index++] = (byte)(curLen >> 8);
lenBytes[index++] = (byte)curLen;
} else {
lenBytes = new byte[5];
lenBytes[index++] = (byte)0x84;
lenBytes[index++] = (byte)(curLen >> 24);
lenBytes[index++] = (byte)(curLen >> 16);
lenBytes[index++] = (byte)(curLen >> 8);
lenBytes[index++] = (byte)curLen;
}
return lenBytes;
}
// Returns the number of bytes needed to represent the given length
// in ASN.1 notation
private int getNumOfLenBytes(int len) {
int numOfLenBytes = 0;
if (len < 128) {
numOfLenBytes = 1;
} else if (len < (1 << 8)) {
numOfLenBytes = 2;
} else if (len < (1 << 16)) {
numOfLenBytes = 3;
} else if (len < (1 << 24)) {
numOfLenBytes = 4;
} else {
numOfLenBytes = 5;
}
return numOfLenBytes;
}
/**
* Parse the value;
*/
private void parseValue(int curLen) {
dataPos += curLen;
}
/**
* Write the value;
*/
private void writeValue(int curLen) {
for (int i=0; i < curLen; i++)
newData[newDataPos++] = data[dataPos++];
}
/**
* Converts a indefinite length DER encoded byte array to
* a definte length DER encoding.
*
* @param indefData the byte array holding the indefinite
* length encoding.
* @return the byte array containing the definite length
* DER encoding.
* @exception IOException on parsing or re-writing errors.
*/
byte[] convert(byte[] indefData) throws IOException {
data = indefData;
dataPos=0; index=0;
dataSize = data.length;
int len=0;
int unused = 0;
// parse and set up the vectors of all the indefinite-lengths
while (dataPos < dataSize) {
parseTag();
len = parseLength();
parseValue(len);
if (unresolved == 0) {
unused = dataSize - dataPos;
dataSize = dataPos;
break;
}
}
if (unresolved != 0) {
throw new IOException("not all indef len BER resolved");
}
newData = new byte[dataSize + numOfTotalLenBytes + unused];
dataPos=0; newDataPos=0; index=0;
// write out the new byte array replacing all the indefinite-lengths
// and EOCs
while (dataPos < dataSize) {
writeTag();
writeLengthAndValue();
}
System.arraycopy(indefData, dataSize,
newData, dataSize + numOfTotalLenBytes, unused);
return newData;
}
}
| mit |
plumer/codana | tomcat_files/7.0.61/Resource (2).java | 2673 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.naming.resources;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* Encapsulates the contents of a resource.
*
* @author <a href="mailto:remm@apache.org">Remy Maucherat</a>
*/
public class Resource {
// ----------------------------------------------------------- Constructors
public Resource() {
// NO-OP
}
public Resource(InputStream inputStream) {
setContent(inputStream);
}
public Resource(byte[] binaryContent) {
setContent(binaryContent);
}
// ----------------------------------------------------- Instance Variables
/**
* Binary content.
*/
protected byte[] binaryContent = null;
/**
* Input stream.
*/
protected InputStream inputStream = null;
// ------------------------------------------------------------- Properties
/**
* Content accessor.
*
* @return InputStream
* @throws IOException
*/
public InputStream streamContent() throws IOException {
if (binaryContent != null) {
return new ByteArrayInputStream(binaryContent);
}
return inputStream;
}
/**
* Content accessor.
*
* @return binary content
*/
public byte[] getContent() {
return binaryContent;
}
/**
* Content mutator.
*
* @param inputStream New input stream
*/
public void setContent(InputStream inputStream) {
this.inputStream = inputStream;
}
/**
* Content mutator.
*
* @param binaryContent New bin content
*/
public void setContent(byte[] binaryContent) {
this.binaryContent = binaryContent;
}
}
| mit |
georgeyanev/takes | src/main/java/org/takes/facets/hamcrest/HmRsStatus.java | 2894 | /**
* The MIT License (MIT)
*
* Copyright (c) 2015 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.takes.facets.hamcrest;
import java.io.IOException;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.takes.Response;
/**
* Response Status Matcher.
*
* <p>This "matcher" tests given response status code.
* <p>The class is immutable and thread-safe.
*
* @author Erim Erturk (erimerturk@gmail.com)
* @version $Id$
* @since 0.13
*/
public final class HmRsStatus extends TypeSafeMatcher<Response> {
/**
* Expected response status code matcher.
*/
private final transient Matcher<? extends Number> matcher;
/**
* Expected matcher.
* @param val Value
* @since 0.17
*/
public HmRsStatus(final int val) {
this(CoreMatchers.equalTo(val));
}
/**
* Expected matcher.
* @param mtchr Is expected result code matcher.
*/
public HmRsStatus(final Matcher<? extends Number> mtchr) {
super();
this.matcher = mtchr;
}
/**
* Fail description.
* @param description Fail result description.
*/
@Override
public void describeTo(final Description description) {
this.matcher.describeTo(description);
}
/**
* Type safe matcher.
* @param item Is tested element
* @return True when expected type matched.
*/
@Override
public boolean matchesSafely(final Response item) {
try {
final String head = item.head().iterator().next();
final String[] parts = head.split(" ");
return this.matcher.matches(Integer.parseInt(parts[1]));
} catch (final IOException ex) {
throw new IllegalStateException(ex);
}
}
}
| mit |
Pony7/cspoker | external/plcafe/generated-src/jp/ac/kobe_u/cs/prolog/builtin/PRED_cafeteria_0.java | 2983 | package jp.ac.kobe_u.cs.prolog.builtin;
import jp.ac.kobe_u.cs.prolog.lang.IntegerTerm;
import jp.ac.kobe_u.cs.prolog.lang.Predicate;
import jp.ac.kobe_u.cs.prolog.lang.Prolog;
import jp.ac.kobe_u.cs.prolog.lang.StructureTerm;
import jp.ac.kobe_u.cs.prolog.lang.SymbolTerm;
import jp.ac.kobe_u.cs.prolog.lang.Term;
import jp.ac.kobe_u.cs.prolog.lang.VariableTerm;
/*
This file is generated by Prolog Cafe.
PLEASE DO NOT EDIT!
*/
/**
<code>cafeteria/0</code> defined in builtins.pl<br>
@author Mutsunori Banbara (banbara@kobe-u.ac.jp)
@author Naoyuki Tamura (tamura@kobe-u.ac.jp)
@version 1.0
*/
public class PRED_cafeteria_0 extends Predicate {
static SymbolTerm s1 = SymbolTerm.makeSymbol(":", 2);
static SymbolTerm s2 = SymbolTerm.makeSymbol("jp.ac.kobe_u.cs.prolog.builtin");
static SymbolTerm s3 = SymbolTerm.makeSymbol("$cafeteria", 1);
static SymbolTerm s4 = SymbolTerm.makeSymbol("print_message", 2);
static SymbolTerm s5 = SymbolTerm.makeSymbol("error");
static SymbolTerm s6 = SymbolTerm.makeSymbol("end_of_file");
static SymbolTerm s7 = SymbolTerm.makeSymbol("bye");
public PRED_cafeteria_0(Predicate cont) {
this.cont = cont;
}
public PRED_cafeteria_0(){}
public void setArgument(Term[] args, Predicate cont) {
this.cont = cont;
}
public int arity() { return 0; }
public String toString() {
return "cafeteria";
}
public Predicate exec(Prolog engine) {
// cafeteria:-'$cafeteria_init',repeat,'$toplvel_loop',on_exception(A,'$cafeteria'(B),print_message(error,A)),B==end_of_file,!,nl,'$fast_write'(bye),nl
engine.setB0();
Term a1, a2, a3, a4, a5, a6, a7;
Predicate p1, p2, p3, p4, p5, p6, p7, p8;
// cafeteria:-['$get_level'(A),'$cafeteria_init',repeat,'$toplvel_loop',on_exception(B,'jp.ac.kobe_u.cs.prolog.builtin':'$cafeteria'(C),'jp.ac.kobe_u.cs.prolog.builtin':print_message(error,B)),'$equality_of_term'(C,end_of_file),'$cut'(A),nl,'$fast_write'(bye),nl]
a1 = new VariableTerm(engine);
//START inline expansion of $get_level(a(1))
if (! a1.unify(new IntegerTerm(engine.B0), engine.trail)) {
return engine.fail();
}
//END inline expansion
a2 = new VariableTerm(engine);
a3 = new VariableTerm(engine);
Term[] y1 = {a3};
a4 = new StructureTerm(s3, y1);
Term[] y2 = {s2, a4};
a5 = new StructureTerm(s1, y2);
Term[] y3 = {s5, a2};
a6 = new StructureTerm(s4, y3);
Term[] y4 = {s2, a6};
a7 = new StructureTerm(s1, y4);
p1 = new PRED_nl_0(cont);
p2 = new PRED_$fast_write_1(s7, p1);
p3 = new PRED_nl_0(p2);
p4 = new PRED_$cut_1(a1, p3);
p5 = new PRED_$equality_of_term_2(a3, s6, p4);
p6 = new PRED_on_exception_3(a2, a5, a7, p5);
p7 = new PRED_$toplvel_loop_0(p6);
p8 = new PRED_repeat_0(p7);
return new PRED_$cafeteria_init_0(p8);
}
}
| gpl-2.0 |
FauxFaux/jdk9-jdk | src/jdk.internal.opt/share/classes/jdk/internal/joptsimple/MultipleArgumentsForOptionException.java | 3248 | /*
* Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* The MIT License
*
* Copyright (c) 2004-2014 Paul R. Holser, Jr.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package jdk.internal.joptsimple;
import java.util.Collection;
/**
* Thrown when asking an {@link OptionSet} for a single argument of an option when many have been specified.
*
* @author <a href="mailto:pholser@alumni.rice.edu">Paul Holser</a>
*/
class MultipleArgumentsForOptionException extends OptionException {
private static final long serialVersionUID = -1L;
MultipleArgumentsForOptionException( Collection<String> options ) {
super( options );
}
@Override
public String getMessage() {
return "Found multiple arguments for option " + multipleOptionMessage() + ", but you asked for only one";
}
}
| gpl-2.0 |
wangxiayang/soot-infoflow | test/soot/jimple/infoflow/config/ConfigSecuriBench.java | 1554 | /*******************************************************************************
* Copyright (c) 2012 Secure Software Engineering Group at EC SPRIDE.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*
* Contributors: Christian Fritz, Steven Arzt, Siegfried Rasthofer, Eric
* Bodden, and others.
******************************************************************************/
package soot.jimple.infoflow.config;
import java.util.LinkedList;
import java.util.List;
import soot.jimple.infoflow.config.IInfoflowConfig;
import soot.options.Options;
public class ConfigSecuriBench implements IInfoflowConfig{
@Override
public void setSootOptions(Options options) {
// explicitly include packages for shorter runtime:
List<String> includeList = new LinkedList<String>();
includeList.add("java.lang.*");
includeList.add("java.util.*");
includeList.add("java.io.*");
includeList.add("sun.misc.*");
includeList.add("java.net.*");
includeList.add("org.apache.http.*");
includeList.add("de.test.*");
includeList.add("soot.*");
includeList.add("securibench.*");
includeList.add("javax.servlet.*");
includeList.add("com.oreilly.servlet.*");
// includeList.add("java.security.");
// includeList.add("javax.crypto.");
options.set_include(includeList);
options.set_output_format(Options.output_format_none);
}
}
| lgpl-2.1 |
xasx/wildfly | ee/src/main/java/org/jboss/as/ee/concurrent/deployers/injection/ContextServiceResourceReferenceProcessor.java | 2269 | /*
* JBoss, Home of Professional Open Source
* Copyright 2013, Red Hat Inc., and individual contributors as indicated
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.ee.concurrent.deployers.injection;
import org.jboss.as.ee.component.InjectionSource;
import org.jboss.as.ee.component.LookupInjectionSource;
import org.jboss.as.ee.component.deployers.EEResourceReferenceProcessor;
import org.jboss.as.ee.concurrent.deployers.EEConcurrentDefaultBindingProcessor;
import org.jboss.as.server.deployment.DeploymentUnitProcessingException;
import javax.enterprise.concurrent.ContextService;
/**
* @author Eduardo Martins
*/
public class ContextServiceResourceReferenceProcessor implements EEResourceReferenceProcessor {
private static final String TYPE = ContextService.class.getName();
private static final LookupInjectionSource injectionSource = new LookupInjectionSource(EEConcurrentDefaultBindingProcessor.COMP_DEFAULT_CONTEXT_SERVICE_JNDI_NAME);
public static final ContextServiceResourceReferenceProcessor INSTANCE = new ContextServiceResourceReferenceProcessor();
private ContextServiceResourceReferenceProcessor() {
}
@Override
public String getResourceReferenceType() {
return TYPE;
}
@Override
public InjectionSource getResourceReferenceBindingSource() throws DeploymentUnitProcessingException {
return injectionSource;
}
}
| lgpl-2.1 |